예제 #1
0
    def msysCompile( self, bOutOfSource = True ):
        """run configure and make for Autotools based stuff"""
        config = os.path.join( self.workdir, self.instsrcdir, "configure" )
        build  = os.path.join( self.workdir )
        if( bOutOfSource ):
            # otherwise $srcdir is very long and a conftest may fail (like it's the
            # case in libgmp-4.2.4)
            config = os.path.join( "..", self.instsrcdir, "configure" )
            build  = os.path.join( build, self.instsrcdir + "-build" )
            utils.cleanDirectory( build )
        else:
            build  = os.path.join( build, self.instsrcdir )

        sh = os.path.join( self.msysdir, "bin", "sh.exe" )

        # todo use msysexecute
        cmd = "%s --login -c \"cd %s && %s %s && make -j2" % \
              ( sh, self.__toMSysPath( build ), self.__toMSysPath( config ), \
                self.msysConfigureFlags() )
        if utils.verbose() > 1:
            cmd += " VERBOSE=1"
        cmd += "\""
        if utils.verbose() > 0:
            print "msys compile: %s" % cmd
        if not utils.system(cmd):
            utils.die( "msys compile failed. cmd: %s" % cmd )
        return True
예제 #2
0
 def test_all(self, res, ruleStr, cond, dic):
     unknown = []
     perm = []
     for l in cond:
         if l in dic.keys():
             unknown.append(l)
     all = self.rec_p(len(unknown))
     for p in all:
         for i, v in enumerate(p):
             dic[unknown[i]] = v
         if res == self.compute(cond, dic):
             perm.append(p)
     if len(perm) == 0:
         if res == True:
             ut.exit_m(
                 "Incoherence: '{:s}' is {} and '{:s}' cannot be {}".format(
                     ruleStr, res, cond, res))
         else:
             return False
     elif len(perm) == 1:
         ut.verbose("Only one combination is possible")
         chosen_opt = perm[0]
     else:
         if ut.OPT_C == 0:
             return False
         chosen_opt = self.user_choice(ruleStr, cond, res, unknown, perm)
     for i, v in enumerate(chosen_opt):
         l = unknown[i]
         self.facts[l] = v
         for fact in self.objectivesFacts:
             if fact.fact == l:
                 self.objectivesFacts.remove(fact)
                 self.addNodeCheck(fact)
     return True
예제 #3
0
 def resolve(self, fact):
     for rule in fact.rules:
         dic = self.getFactUnknown()
         ruleStr = str("{} {} {}".format(rule.rule[0], rule.rule[2],
                                         rule.rule[1]))
         if rule.rule[2] == '<=>':
             res = self.compute(rule.rule[0], dic)
             res2 = self.compute(rule.rule[1], dic)
             if res != None and res2 != None and res != res2:
                 ut.exit_m("Incoherence: Rule = " + rule.rule[0] +
                           rule.rule[2] + rule.rule[1] + "\n Dic = " + dic)
             elif res == None and res2 != None:
                 if self.test_all(res2, ruleStr, rule.rule[0], dic) == True:
                     return True
             elif res2 == None and res != None:
                 if self.test_all(res, ruleStr, rule.rule[1], dic) == True:
                     return True
         elif len(rule.rule[1]) > 1:
             res = self.compute(rule.rule[0], dic)
             if res != None and self.test_all(res, ruleStr, rule.rule[1],
                                              dic) == True:
                 return True
         else:
             res = self.compute(rule.rule[0], dic)
             if res == True:
                 ut.verbose("Rule {} implies that {} is True.".format(
                     ruleStr, fact.fact))
                 self.facts[fact.fact] = True
                 self.objectivesFacts.remove(fact)
                 return True
     return False
예제 #4
0
    def svnFetch( self, repo ):
        """getting sources from a custom svn repo"""
        if utils.verbose() > 1:
            print "base svnFetch called"
        if ( self.noFetch ):
            if utils.verbose() > 0:
                print "skipping svn fetch/update (--offline)"
            return True

        utils.svnFetch( repo, self.svndir )
예제 #5
0
    def enterBuildDir(self):
        utils.debug( "EmergeBase.enterBuildDir called", 2 )

        if ( not os.path.exists( self.buildDir() ) ):
            os.makedirs( self.buildDir() )
            if utils.verbose() > 0:
                print("creating: %s" % self.buildDir())

        os.chdir( self.buildDir() )
        if utils.verbose() > 0:
            print("entering: %s" % self.buildDir())
예제 #6
0
    def setDirectories( self, rootdir, imagedir, workdir, instsrcdir, instdestdir, infoobject ):
        self.subinfo = infoobject

        if self.COMPILER   == "msvc2005" or self.COMPILER == "msvc2008" or self.COMPILER == "msvc2010":
            self.cmakeMakefileGenerator = "NMake Makefiles"
            self.cmakeMakeProgramm      = "nmake"
        elif self.COMPILER == "mingw" or self.COMPILER == "mingw4":
            self.cmakeMakefileGenerator = "MinGW Makefiles"
            self.cmakeMakeProgramm      = "mingw32-make"
        else:
            utils.die( "KDECOMPILER: %s not understood" % self.COMPILER )

        if self.MAKE_PROGRAM:
            self.cmakeMakeProgramm = self.MAKE_PROGRAM
            utils.debug( "set custom make program: %s" % self.MAKE_PROGRAM, 1 )

        if utils.verbose() > 1:
            print "BuildType: %s" % self.BUILDTYPE
        self.buildType = self.BUILDTYPE

        self.noFetch = utils.envAsBool( "EMERGE_OFFLINE" )
        self.noCopy = utils.envAsBool( "EMERGE_NOCOPY")
        self.noFast = utils.envAsBool( "EMERGE_NOFAST", default=True )
        self.noClean = utils.envAsBool( "EMERGE_NOCLEAN" )
        self.forced = utils.envAsBool( "EMERGE_FORCED" )
        self.buildTests = utils.envAsBool( "EMERGE_BUILDTESTS" )

        self.buildNameExt    = None

        self.rootdir         = rootdir
        self.workdir         = workdir
        self.imagedir        = imagedir
        self.instsrcdir      = instsrcdir
        self.instdestdir     = instdestdir

        self.kdesvndir       = self.KDESVNDIR
        self.kdesvnserver    = self.KDESVNSERVER
        self.kdesvnuser      = self.KDESVNUSERNAME
        self.kdesvnpass      = self.KDESVNPASSWORD
        self.svndir = None # set in self.kdeSvnFetch

        if utils.verbose() > 1 and self.kdeSvnPath():
            print "noCopy       : %s" % self.noCopy
            print "kdeSvnPath() : %s" % self.kdeSvnPath().replace("/", "\\")

        if not ( self.noCopy and self.kdeSvnPath() ) :
            if self.kdeSvnPath():
                self.sourcePath = "..\\%s" % self.kdeSvnPath().split('/')[-1]
            else:
                self.sourcePath = "..\\%s" % self.instsrcdir
        else:
            self.sourcePath = "%s" % os.path.join(self.kdesvndir, self.kdeSvnPath() ).replace("/", "\\")
예제 #7
0
    def kdeConfigureInternal( self, buildType, kdeCustomDefines ):
        """Using cmake"""
        builddir = "%s" % ( self.COMPILER )

        if( not buildType == None ):
            buildtype = "-DCMAKE_BUILD_TYPE=%s" % buildType
            builddir = "%s-%s" % ( builddir, buildType )

        if( not self.buildNameExt == None ):
            builddir = "%s-%s" % ( builddir, self.buildNameExt )

        os.chdir( self.workdir )
        if ( not os.path.exists( builddir) ):
            os.mkdir( builddir )

        if not self.noClean:
            utils.cleanDirectory( builddir )
        os.chdir( builddir )

        command = r"""cmake -G "%s" %s %s %s""" % \
              ( self.cmakeMakefileGenerator, \
                self.kdeDefaultDefines(), \
                kdeCustomDefines, \
                buildtype )

        if utils.verbose() > 0:
            print "configuration command: %s" % command
        if not utils.system(command):
            utils.die( "while CMake'ing. cmd: %s" % command )
        return True
예제 #8
0
 def manifest( self ):
     """installer compatibility: make the manifest files that make up the installers
     install database"""
     if utils.verbose() > 1:
         print "base manifest called"
     utils.createManifestDir(self.imagedir, self.category, self.package, self.version )
     return True
예제 #9
0
 def enterSourceDir(self):
     if ( not os.path.exists( self.sourceDir() ) ):
         return False
     utils.warning("entering the source directory!")
     os.chdir( self.sourceDir() )
     if utils.verbose() > 0:
         print("entering: %s" % self.sourceDir())
예제 #10
0
    def unpack( self ):
        if self.buildTarget == "svnHEAD":
            if utils.verbose() >= 1:
                print "libassuan unpack called"
            # do the svn fetch/update
            repo = "svn://cvs.gnupg.org/libassuan/trunk"
            self.svnFetch( repo )

            utils.cleanDirectory( self.workdir )

            # now copy the tree below destdir/trunk to workdir
            srcdir = os.path.join( self.svndir, "trunk" )
            destdir = os.path.join( self.workdir, "libassuan" )
            utils.copySrcDirToDestDir( srcdir, destdir )

            os.chdir( self.workdir )
            self.system( "cd %s && patch -p0 < %s" % ( self.workdir, os.path.join( self.packagedir, "libassuan.diff" ) ) )
            self.system( "cd %s && patch -p0 < %s" % ( self.workdir, os.path.join( self.packagedir, "libassuan-cmake.diff" ) ) )

            return True
        else:
            base.baseclass.unpack( self ) or utils.die( "unpack failed" )
            os.chdir( self.workdir )
            shutil.move("libassuan-1.0.5", "libassuan")
            self.system( "cd %s && patch -p0 < %s" % ( self.workdir, os.path.join( self.packagedir, "libassuan.diff" ) ) )
            self.system( "cd %s && patch -p0 < %s" % ( self.workdir, os.path.join( self.packagedir, "libassuan-cmake.diff" ) ) )
            self.system( "cd %s && patch -p0 < %s" % ( self.workdir, os.path.join( self.packagedir, "libassuan-unistd.diff" ) ) )
            return True
예제 #11
0
 def unmerge( self ):
     """unmergeing the files from the filesystem"""
     if utils.verbose() > 1:
         print "base unmerge called"
     utils.unmerge( self.rootdir, self.package, self.forced )
     portage.remInstalled( self.category, self.package, self.version )
     return True
예제 #12
0
 def install( self ):
     """installing binary tarballs"""
     if utils.verbose() > 1:
         print "base install called"
     srcdir = os.path.join( self.workdir, self.instsrcdir )
     destdir = os.path.join( self.imagedir, self.instdestdir )
     utils.copySrcDirToDestDir( srcdir, destdir )
     return True
예제 #13
0
    def kdeSvnFetch( self, svnpath, packagedir ):
        """svnpath is the part of the repo url after /home/kde, for example
        ""trunk/kdesupport/", which leads to the package itself
        without the package"""

        if utils.verbose() > 1:
            print "kdeSvnFetch called. svnpath: %s dir: %s" % ( svnpath, packagedir )

        if ( self.noFetch ):
            if utils.verbose() > 0:
                print "skipping svn fetch/update (--offline)"
            return True

        svndir = self.kdesvndir
        if ( not os.path.exists( svndir ) ):
            os.mkdir( svndir )

        repourl = self.kdesvnserver + "/home/kde/"

        for tmpdir in svnpath.split( "/" ):
            if ( tmpdir == "" ):
                continue
            if utils.verbose() > 1:
                print "  svndir: %s" % svndir
                print "  dir to checkout: %s" % tmpdir
                print "  repourl", repourl

            self.kdesinglecheckout( repourl, svndir, tmpdir, False )
            svndir = os.path.join( svndir, tmpdir )
            repourl = repourl + tmpdir + "/"

        if utils.verbose() > 0:
            print "dir in which to really checkout: %s" % svndir
            print "dir to really checkout: %s" % packagedir
        self.kdesinglecheckout( repourl, svndir, packagedir, True )

        svndir = os.path.join( self.kdesvndir, svnpath ).replace( "/", "\\" )
        #repo = self.kdesvnserver + "/home/kde/" + svnpath + dir
        #utils.svnFetch( repo, svndir, self.kdesvnuser, self.kdesvnpass )
        if utils.verbose() > 1:
            print "kdesvndir", self.kdesvndir
            print "svndir", svndir
        self.svndir = os.path.join( svndir, packagedir )

        return True
예제 #14
0
 def make(self):
     self.enterSourceDir()
     cmd  = "bjam stage"
     cmd += self.subinfo.options.configure.defines
     if utils.verbose() >= 1:
         print cmd
     os.system(cmd) and utils.die(
             "command: %s failed" % (cmd))
     return True
예제 #15
0
    def backwardChaining(self):

        self.checkInconsistency()

        # On rajoute dans nodeChecked les facts qui n'ont pas de rules
        for fact in self.factNodes:
            if not fact.rules:
                self.addNodeCheck(fact)

        # On rajoute les nouveaux objectifs récursivement
        for fact in self.graph:
            if fact not in self.nodeChecked and fact not in self.objectivesFacts:
                self.objectivesFacts.append(fact)
                self.getObjectivesRecursiveRules(fact.rules)

        self.checkInconsistency()

        for f in self.factNodes:
            if self.facts[f.fact] == True or f in self.nodeChecked:
                ut.verbose("We know that {} is {}.".format(
                    f.fact, self.facts[f.fact]))

        # Maintenant qu'on a la liste il faut résoudre les équations de chaque facts en partant du bas de la liste
        while (self.objectivesFacts):
            i = len(self.objectivesFacts) - 1
            while (i >= 0 and self.objectivesFacts):
                self.checkInconsistency()
                res = self.resolve(self.objectivesFacts[i])
                self.checkInconsistency()
                if res == True:
                    # On a trouvé un true donc on recommence du début
                    i = len(self.objectivesFacts) - 1
                elif i == 0:
                    # Comme on a pas trouvé de solution on supprime le dernier fact de la liste
                    l = len(self.objectivesFacts) - 1
                    f = self.objectivesFacts[l]
                    ut.verbose("Therefore {} is {}.".format(
                        f.fact, self.facts[f.fact]))
                    self.addNodeCheck(f)
                    self.objectivesFacts.remove(f)
                else:
                    i -= 1

        self.checkInconsistency()
예제 #16
0
def main():
    DEFAULTS = {
        "key": "./serviceAccountKey.json",
        "collection": "problems",
    }

    # Only allow python 3
    if sys.version_info < (3, 0):
        print("Sry, only Python 3 is supported")
        exit(0)

    parser = argparse.ArgumentParser(
        description='Upload data to cloud firestore from a CSV file',
        epilog="Made by László Baráth (Sasszem), 2018")
    parser.add_argument('source', help='source CSV file')
    parser.add_argument('--key',
                        help='Account key JSON file. Defaults to "{}"'.format(
                            DEFAULTS["key"]),
                        default=DEFAULTS["key"])
    parser.add_argument(
        '--collection',
        help='Collection to upload to. Defaults to "{}"'.format(
            DEFAULTS["collection"]),
        default=DEFAULTS["collection"])
    parser.add_argument(
        '--nofresh',
        help='Do not remove old entries from target collection',
        action="store_false",
        dest="fresh")
    parser.add_argument('-v',
                        '--verbose',
                        help='Enable additional logging',
                        action="store_true",
                        dest="verbose")

    args = parser.parse_args()
    utils.verbose(args.verbose)

    db = utils.make_connection(args.key)

    collection = utils.get_collection(db, args.collection, args.fresh)

    read_to_db(args.source, collection)
예제 #17
0
 def make( self ):
     """implements the make step for cmake projects"""
     self.boost = portage.getPackageInstance('win32libs', 'boost-headers')
     self.subinfo.targetInstSrc[ self.subinfo.buildTarget ] = os.path.join(self.boost.sourceDir(),"libs",self.subinfo.targetInstSrc[ self.subinfo.buildTarget ],"build")
     
     self.enterSourceDir()
     cmd  = "bjam"
     cmd += self.configureOptions(self.subinfo.options.configure.defines)
     if utils.verbose() >= 1:
         print(cmd)
     return self.system(cmd)
예제 #18
0
    def msysExecute( self, path, cmd, args ):
        sh = os.path.join( self.msysdir, "bin", "sh.exe" )

        cmd = "%s --login -c \"cd %s && %s %s" % \
              ( sh, self.__toMSysPath( path ), self.__toMSysPath( cmd ), args )

        cmd += "\""
        if utils.verbose() > 0:
            print "msys execute: %s" % cmd
        if not utils.system(cmd):
            utils.die( "msys execute failed. cmd: %s" % cmd )
        return True
예제 #19
0
def main():
    if utils.verbose() > 0:
        print "KDEROOT:     ", ROOTDIR
        print "KDECOMPILER: ", COMPILER
        print "DOWNLOADDIR: ", DOWNLOADDIR
        print "KDESVNDIR:   ", KDESVNDIR
        print "KDESVNSERVER:", KDESVNSERVER
# this would fail, MSYSDIR undefined. But this module is deprecated anyway.
#       print "MSYSDIR:", MSYSDIR

    test = baseclass()
    test.system( "dir" )
예제 #20
0
    def settab(self, tabdict=None):
        #print("=========matrixdbfactory:settab========")
        #print(tabdict)
        #print("\n")
        if tabdict is None:
            return None
        for key in tabdict.keys():
            utils.verbose("  writting object: " + str(key), file=sys.stdout)
            #clear any existing table entries before adding new entries
            df = dbfactory(self._dbsession)
            df.cleartab(tabdict[key].keys(), [key])
            for tab in tabdict[key].keys():
                dbsession = self._dbsession.loadSession(tab)
                if hasattr(dbobject, tab):
                    tabcls = getattr(dbobject, tab)
                else:
                    continue

                for record in tabdict[key][tab]:
                    if tabcls.isValid(key, record):
                        create_or_update(dbsession, tabcls, key, record)
예제 #21
0
 def __adjustPath(self, directory):
     """return adjusted path"""
     if not self.subinfo.options.useShortPathes:
         return directory
     path = c_char_p(directory)
     length = windll.kernel32.GetShortPathNameA(path, 0, 0)
     if length == 0:
         return directory
     buf = create_string_buffer('\000' * (length + 1))
     windll.kernel32.GetShortPathNameA(path, byref(buf), length+1) # ignore function result...
     if utils.verbose() > 0:
         print("converting " + directory + " to " + buf.value)
     return buf.value
예제 #22
0
    def kdesinglecheckout( self, repourl, ownpath, codir, doRecursive = False ):
        """in ownpath try to checkout codir from repourl
        if codir exists and doRecursive is false, simply return,
        if codir does not exist, but ownpath/.svn exists,
        do a svn update codir
        else do svn co repourl/codir
        if doRecursive is false, add -N to the svn command """

        if ( os.path.exists( os.path.join( ownpath, codir ) ) and not doRecursive ):
            if utils.verbose() > 0:
                print "ksco exists:", ownpath, codir
            return

        if ( doRecursive ):
            recFlag = ""
        else:
            recFlag = "--depth=files"

        svnInstallDir = os.path.join(self.rootdir, 'dev-utils', 'svn', 'bin')
        if not os.path.exists(svnInstallDir):
            utils.die("required subversion package not installed")

        if ( os.path.exists( os.path.join( ownpath, codir, ".svn" ) ) ):
            # svn up
            svncmd = "%s/svn update %s %s" % (svnInstallDir, recFlag, codir )
        else:
            #svn co
            svncmd = "%s/svn checkout %s %s" % (svnInstallDir, recFlag, repourl + codir )

        if utils.verbose() > 1:
            print "kdesinglecheckout:pwd ", ownpath
            print "kdesinglecheckout:   ", svncmd
        os.chdir( ownpath )
        with utils.LockFile(utils.svnLockFileName()):
            if not utils.system(svncmd):
                utils.die( "while checking out. cmd: %s" % svncmd )
예제 #23
0
파일: ast2doc.py 프로젝트: oschuett/ast2doc
def document_all_modules(packages, ast_dir, output_dir, sym_lookup_table):

    # init
    modules_lists = {'__ALL__': []}
    modules_description = {}
    privates_referenced = {}

    # scan packages
    src_root = path.normpath(path.commonprefix(packages.keys()))
    for d, p in packages.iteritems():
        # d: dir hosting a PACKAGE file, p: basically the eval()uation of that PACKAGE file
        rel_path = path.relpath(d, src_root)
        modules_lists[rel_path] = []

        # scan PACKAGE-owned module files (the 'files' key is contributed by the scan_packages() function)
        for f in p['files']:
            mod_name = f.rsplit(".", 1)[0]
            ast_file = path.join(ast_dir, mod_name + ".ast")
            if (path.isfile(ast_file)):
                print("Reading ast: " + ast_file)
                ast = utils.read_ast(ast_file)
                if (ast['tag'] == 'module'):
                    if (utils.verbose()):
                        print '>>>> Module: %s [%s]' % (mod_name, rel_path)

                    # lists of modules per PACKAGE, needed by the landing page
                    modules_lists[rel_path].append(mod_name)
                    modules_lists['__ALL__'].append(mod_name)
                    modules_description[mod_name] = ast['descr'][0] if ast[
                        'descr'] else missing_description  # Only 1st \brief is retained here

                    # dump the current module HTML documentation
                    body, my_privates_referenced = render_module(
                        ast, rel_path, ast_dir, output_dir, sym_lookup_table)
                    printout(body,
                             output_dir,
                             mod_name=mod_name,
                             jscript=[
                                 'packages_modules.json', 'js/common.js',
                                 'js/updateURL.js', 'js/highlightArgument.js',
                                 jquery_url
                             ],
                             custom_script=jquery_function % mod_name)
                    if my_privates_referenced:
                        privates_referenced[
                            mod_name.upper()] = my_privates_referenced

    return modules_lists, modules_description, privates_referenced
예제 #24
0
    def kdeMakeInternal( self, buildType ):
        """Using the *make program"""
        builddir = "%s" % ( self.COMPILER )

        if( not buildType == None ):
            builddir = "%s-%s" % ( builddir, buildType )
        if( not self.buildNameExt == None ):
            builddir = "%s-%s" % ( builddir, self.buildNameExt )

        os.chdir( os.path.join( self.workdir, builddir ) )
        command = self.cmakeMakeProgramm
        # adding Targets later
        if utils.verbose() > 1:
            command += " VERBOSE=1"
        if not utils.system(command):
            utils.die( "while Make'ing. cmd: %s" % command )
        return True
예제 #25
0
    def msysInstall( self, bOutOfSource = True ):
        """run make install for Autotools based stuff"""
        install = os.path.join( self.imagedir, self.instdestdir )
        build  = os.path.join( self.workdir )
        if( bOutOfSource ):
            build  = os.path.join( build, self.instsrcdir + "-build" )
        else:
            build  = os.path.join( build, self.instsrcdir )

        sh = os.path.join( self.msysdir, "bin", "sh.exe" )

        # todo use msysexecute
        cmd = "%s --login -c \"cd %s && make -j2 install DESTDIR=%s\"" % \
              ( sh, self.__toMSysPath( build ), self.__toMSysPath( install ) )
        if utils.verbose() > 0:
            print "msys install: %s" % cmd
        if not utils.system(cmd):
            utils.die( "msys install failed. cmd: %s" % cmd )
        return True
 def make(self):
     if self.subinfo.defaultTarget == '1.44.0':
         return True
     cmd  = "build.bat "
     if compiler.isMinGW():
         cmd += "gcc"
     else:
         if compiler.isMSVC2005():
             cmd += "vc8"
         elif compiler.isMSVC2008():
             cmd += "vc9"
         elif compiler.isMSVC2010():
             cmd += "vc10"
     if utils.verbose() >= 1:
         print cmd
     utils.system(cmd, cwd = os.path.join(portage.getPackageInstance('win32libs-bin',
             'boost-headers').sourceDir(),"tools","build","v2","engine")) or utils.die(
             "command: %s failed" % (cmd))
     return True
예제 #27
0
    def kdeTest( self ):
        """running cmake based unittests"""
        builddir = "%s" % ( self.COMPILER )

        if( not self.buildType == None ):
            builddir = "%s-%s" % ( builddir, self.buildType )

        if( not self.buildNameExt == None ):
            builddir = "%s-%s" % ( builddir, self.buildNameExt )

        os.chdir( self.workdir )
        os.chdir( builddir )

        if utils.verbose() > 0:
            print "builddir: " + builddir

        if not utils.system( "%s test" % ( self.cmakeMakeProgramm ) ):
            utils.die( "while testing. cmd: %s" % "%s test" % ( self.cmakeMakeProgramm ) )
        return True
예제 #28
0
    def kdeInstallInternal( self, buildType ):
        """Using *make install"""
        builddir = "%s" % ( self.COMPILER )

        if( not buildType == None ):
            builddir = "%s-%s" % ( builddir, buildType )

        if( not self.buildNameExt == None ):
            builddir = "%s-%s" % ( builddir, self.buildNameExt )

        os.chdir( self.workdir )
        os.chdir( builddir )

        if utils.verbose() > 0:
            print "builddir: " + builddir

        fastString = ""
        if not self.noFast:
            fastString = "/fast"
        if not utils.system( "%s DESTDIR=%s install%s" % ( self.cmakeMakeProgramm, self.imagedir, fastString ) ):
            utils.die( "while installing. cmd: %s" % "%s DESTDIR=%s install" % ( self.cmakeMakeProgramm, self.imagedir ) )
        return True
예제 #29
0
 def install(self):
     self.enterSourceDir()
     cmd  = "bjam install"
     cmd += self.subinfo.options.configure.defines
     if utils.verbose() >= 1:
         print cmd
     os.system(cmd) and utils.die(
             "command: %s failed" % (cmd))
     shutil.copytree(os.path.join(self.imageDir(), "lib"),
                      os.path.join(self.imageDir(), "bin"),
                      ignore=shutil.ignore_patterns('*.a','*.lib'))
     shutil.move(os.path.join(self.imageDir(), "include", "boost-1_44",
                 "boost"),
                 os.path.join(self.imageDir(),"include","boost"))
     shutil.rmtree(os.path.join(self.imageDir(),"include","boost-1_44"))
     if self.isTargetBuild():
         shutil.rmtree(os.path.join(self.imageDir(), "lib"))
         shutil.rmtree(os.path.join(self.imageDir(), "bin"))
     #disable autolinking
     f = open(os.path.join(self.imageDir(),"include", "boost", "config", "user.hpp"), 'a')
     f.write('#define BOOST_ALL_NO_LIB\n')
     f.close()
     return True
예제 #30
0
    def qmerge( self ):
        """mergeing the imagedirectory into the filesystem"""
        if utils.verbose() > 1:
            print "base qmerge called"
        for pkgtype in ['bin', 'lib', 'doc', 'src']:
            script = os.path.join( self.packagedir, "post-install-%s.cmd" ) % pkgtype
            scriptName = "post-install-%s-%s-%s.cmd" % ( self.package, self.version, pkgtype )
            destscript = os.path.join( self.imagedir, "manifest", scriptName )
            if not os.path.exists( os.path.join( self.imagedir, "manifest" ) ):
                os.mkdir( os.path.join( self.imagedir, "manifest" ) )
            if os.path.exists( script ):
                shutil.copyfile( script, destscript )

        utils.mergeImageDirToRootDir( self.imagedir, self.rootdir )
        # run post-install scripts
        for pkgtype in ['bin', 'lib', 'doc', 'src']:
            scriptName = "post-install-%s-%s-%s.cmd" % ( self.package, self.version, pkgtype )
            script = os.path.join( self.rootdir, "manifest", scriptName )
            if os.path.exists( script ):
                cmd = "cd %s && %s" % ( self.rootdir, script )
                if not utils.system(cmd):
                    utils.warning("%s failed!" % cmd )
        portage.addInstalled( self.category, self.package, self.version )
        return True
예제 #31
0
        exit ( 1 )
    else:
        packageName = i
        break

if stayQuiet == True:
    utils.setVerbose(0)

# get KDEROOT from env
KDEROOT = os.getenv( "KDEROOT" )
utils.debug( "buildAction: %s" % mainBuildAction )
utils.debug( "doPretend: %s" % doPretend, 1 )
utils.debug( "packageName: %s" % packageName )
utils.debug( "buildType: %s" % os.getenv( "EMERGE_BUILDTYPE" ) )
utils.debug( "buildTests: %s" % utils.envAsBool( "EMERGE_BUILDTESTS" ) )
utils.debug( "verbose: %d" % utils.verbose(), 1 )
utils.debug( "trace: %s" % os.getenv( "EMERGE_TRACE" ), 1 )
utils.debug( "KDEROOT: %s\n" % KDEROOT, 1 )
utils.debug_line()

def unset_var( varname ):
    if not os.getenv( varname ) == None:
        print
        utils.warning( "%s found as environment variable. you cannot override emerge"\
                       " with this - unsetting %s locally" % ( varname, varname ) )
        os.environ[ varname ] = ""

unset_var( "CMAKE_INCLUDE_PATH" )
unset_var( "CMAKE_LIBRARY_PATH" )
unset_var( "CMAKE_FIND_PREFIX" )
unset_var( "CMAKE_INSTALL_PREFIX" )
def train_sequenceset(trainer, args, block_sizes, rs, save_root, orig_cp=None):

    rs.sequence_type = args.sequence_type
    rs.enable_shuffling = args.enable_shuffling
    rs.save_um_distances = args.save_um_distances

    rs.classes_templates = trainer.dataset.patterns

    # Define a family of models, all based on the same architecture template
    trainer.assign_model(deepcopy(trainer.network_tmpl))

    # rs.eval_orig will contain, for each test, an analysis of the ultrametric distance between the labels and the predictions
    eval_orig = trainer.evaluate_hierarchical()
    if args.save_um_distances:
        rs.eval_orig = [
            eval_orig,
        ]

    # Counting the number of correct responses per classes before the training
    rs.acc_orig = np.array(
        [[eval_orig[0][0], 0]]
    )  # Will contain the accuracy through training and the number of train samples seen, dim 1 of diagnos_original contains the accuracies at different levels
    nbr_test_samples = trainer.dataset.class_sz_test * (
        trainer.dataset.branching**trainer.dataset.depth
    )  # number of test examples
    rs.lbls_htmp_orig = np.zeros((args.test_nbr, trainer.n_classes))

    classes_correct = np.zeros(len(trainer.dataset.test_data))
    for k in range(nbr_test_samples):
        classes_correct[int(
            eval_orig[1][k][0]
        )] += 1  # The value in the array correspond to the prediction of the network for the k-th test example

    rs.classes_pred_orig = np.array(
        [[classes_correct, 0]]
    )  # This array will stock the prediction of the network during the training

    #-----------------------------------#
    #---- Data sequence generation -----#
    #-----------------------------------#
    verbose('Data generation...', args.verbose)
    trainer.make_train_sequence(
    )  #Stock rates (if not a random process) and data for training

    if orig_cp is not None:
        save_folder = orig_cp.subfolder
        trainer.train_sequence = orig_cp.train_sequence

        rs.parameters["Timescales"] = trainer.rates_vector.tolist()
        rs.train_labels_orig = trainer.train_sequence
        verbose(
            'Original sequence checkpoint provided. Skipping simulation for original sequence.',
            args.verbose, 0)

    else:
        rs.parameters["Timescales"] = trainer.rates_vector.tolist()
        rs.train_labels_orig = trainer.train_sequence
        save_folder = "T{0:.3f}_Memory{1:d}_{2:s}_{3:s}".format(
            rs.T, rs.memory_sz,
            datetime.now().strftime("%y%m%d"),
            str(abs(hash(tuple(rs.train_labels_orig)))))

        verbose('...done\n', args.verbose, 2)

        for test_id in range(args.test_nbr):
            training_range = (
                test_id * args.test_stride, (test_id + 1) * args.test_stride
            )  #Part of the sequence on which the training will be done
            verbose('Training network on original sequence...', args.verbose,
                    2)

            trainer.train(mem_sz=trainer.memory_size,
                          training_range=training_range,
                          verbose_lvl=args.verbose)

            verbose('...done\nComputing performance for original sequence...',
                    args.verbose, 2)

            eval_orig = trainer.evaluate_hierarchical()
            if args.save_um_distances:
                rs.eval_orig.append(eval_orig)
            rs.lbls_htmp_orig[test_id, :] = get_lbl_distr(
                trainer.train_sequence, training_range[0], training_range[1],
                trainer.n_classes)

            verbose('...done\n', args.verbose, 2)

            original_accuracy_current = eval_orig[0][
                0]  # Recover the standard accuracy
            original_accuracy_current = np.array(
                [[original_accuracy_current,
                  (test_id + 1) * args.test_stride]])
            rs.acc_orig = np.append(rs.acc_orig,
                                    original_accuracy_current,
                                    axis=0)

            classes_correct = np.zeros(len(trainer.dataset.test_data))
            for k in range(nbr_test_samples):
                classes_correct[int(eval_orig[1][k][0])] += 1
            classes_correct = np.array(
                [[classes_correct, (test_id + 1) * args.test_stride]])
            rs.classes_pred_orig = np.append(rs.classes_pred_orig,
                                             classes_correct,
                                             axis=0)

            verbose(
                'Accuracy on original sequence at pos {seq_pos:d} ({n_test_spls:d} test images): {acc:.2f}%'
                .format(seq_pos=training_range[1],
                        n_test_spls=nbr_test_samples,
                        acc=original_accuracy_current[0][0]), args.verbose)

        rs.classes_count = [0 for k in range(len(trainer.dataset.train_data))]
        for k in rs.train_labels_orig:
            rs.classes_count[k] += 1

        data_saver.save_orig_results(rs, os.path.join(save_root, save_folder))

    if rs.enable_shuffling:
        verbose("--- Start shuffle training ---", args.verbose)
        # Shuffle the training sequence in block of a choosen length (try to use a length of blocks that divise the length of the
        # sequence to be sure to train on the full sequence, have one small block to take that into account is not implemented # TODO)

        trainer.assign_model(deepcopy(trainer.network_tmpl))
        eval_shfl = trainer.evaluate_hierarchical()
        acc_shfl = np.array(
            [[eval_shfl[0][0], 0]]
        )  # Will contain the accuracy through training and the number of train samples seen, the first dim of diagnos_shuffle contains the accuracies at different levels

        # Counting the number of correct responses per classes before the training
        classes_correct = np.zeros(len(trainer.dataset.test_data))
        for k in range(nbr_test_samples):
            classes_correct[int(
                eval_shfl[1][k][0]
            )] += 1  # The value in the array correspond to the prediction of the network for the i-th test example

        rs.train_labels_shfl = {block_size: [] for block_size in block_sizes}
        rs.classes_pred_shfl = {
            block_size: np.array([[classes_correct, 0]])
            for block_size in block_sizes
        }  # This array will stock the prediction of the network during the training
        if args.save_um_distances:
            rs.eval_shfl = {
                block_size: [
                    eval_shfl,
                ]
                for block_size in block_sizes
            }
        rs.acc_shfl = {block_size: acc_shfl for block_size in block_sizes}
        rs.lbls_htmp_shfl = {}

        # trainer.make_train_sequence()  #Stock rates (if not a random process) and data for training
        for block_size_shuffle in block_sizes:

            rs.lbls_htmp_shfl[block_size_shuffle] = np.zeros(
                (args.test_nbr, trainer.n_classes))

            for test_id in range(args.test_nbr):
                trainer.assign_model(deepcopy(trainer.network_tmpl))
                training_range = (test_id * args.test_stride,
                                  (test_id + 1) * args.test_stride)
                shuffled_sequence = trainer.shuffle_block_partial(
                    block_size_shuffle, training_range[1])

                #trainer.train(seq=shuffled_sequence, mem_sz=trainer.memory_size, lr=args.lr, momentum=0.5, training_range=training_range)
                trainer.train(mem_sz=trainer.memory_size,
                              training_range=(0, training_range[1]),
                              seq=shuffled_sequence,
                              verbose_lvl=args.verbose)
                eval_shfl = trainer.evaluate_hierarchical()
                if args.save_um_distances:
                    rs.eval_shfl[block_size_shuffle].append(eval_shfl)
                shuffle_accuracy_current = eval_shfl[0][
                    0]  # Recover the standard accuracy
                shuffle_accuracy_current = np.array([[
                    shuffle_accuracy_current, (test_id + 1) * args.test_stride
                ]])
                rs.acc_shfl[block_size_shuffle] = np.append(
                    rs.acc_shfl[block_size_shuffle],
                    shuffle_accuracy_current,
                    axis=0)

                rs.lbls_htmp_shfl[block_size_shuffle][
                    test_id, :] = get_lbl_distr(trainer.train_sequence,
                                                training_range[0],
                                                training_range[1],
                                                trainer.n_classes)

                classes_correct = np.zeros(len(trainer.dataset.test_data))
                for k in range(nbr_test_samples):
                    classes_correct[int(eval_shfl[1][k][0])] += 1
                classes_correct = np.array(
                    [[classes_correct, (test_id + 1) * args.test_stride]])
                rs.classes_pred_shfl[block_size_shuffle] = np.append(
                    rs.classes_pred_shfl[block_size_shuffle],
                    classes_correct,
                    axis=0)
                verbose(
                    'Accuracy on shuffled sequence (block size {block_size:d}) at pos {seq_pos:d} ({n_test_spls:d} test images): {acc:.2f}%'
                    .format(block_size=block_size_shuffle,
                            seq_pos=training_range[1],
                            n_test_spls=nbr_test_samples,
                            acc=shuffle_accuracy_current[0][0]), args.verbose)

            rs.train_labels_shfl[block_size_shuffle] = shuffled_sequence

            data_saver.save_shuffle_results(
                rs, os.path.join(save_root, save_folder), block_size_shuffle)

    return rs
def run(args):
    # Global parameters
    step = 1  # Energy step
    args.test_stride = int(
        args.sequence_length / args.test_nbr
    )  # Number of sequence samples the model learns on between two evaluation steps
    systime = time.time()
    random.seed(systime)

    # This will control whether we run any shuffling scenario or not (those are demanding in computational resources)
    if ((not args.block_size_shuffle_list)
            or args.block_size_shuffle_list == [0]
            or args.sequence_type == 'uniform'):
        args.enable_shuffling = False
    else:
        args.enable_shuffling = True
    # 0 is passed as a dummy block size by our slurm batch script, it should be removed
    if 0 in args.block_size_shuffle_list:
        args.block_size_shuffle_list.remove(0)

    device = torch.device('cuda') if args.cuda else torch.device('cpu')

    #------------------------------#
    #----- DATASET GENERATION -----#
    #------------------------------#

    verbose(
        'Generating dataset {0:s} - data_seq_size={1:d}'.format(
            args.data_origin, args.artif_seq_size), args.verbose, 0)

    dataset = ds.Dataset(data_origin=args.data_origin,
                         data_sz=args.artif_seq_size,
                         tree_depth=args.artif_tree_depth,
                         class_sz_train=1000,
                         class_sz_test=400,
                         ratio_type='linear',
                         ratio_value=args.data_flips_rate,
                         noise_level=3,
                         shuffle_classes=args.artif_shuffle_classes)

    verbose('Done generating dataset {0:s}'.format(args.data_origin),
            args.verbose, 0)

    #----------------------------------#
    #----- PREPARING FILE OUTPUTS -----#
    #----------------------------------#

    cl_strategy = 'EWC' if args.ewc else '1toM'

    if args.orig_path != "":
        verbose("Attempting to run simulations from checkpoint", args.verbose,
                0)
        orig_checkpoint = OrigCP(args.orig_path)
        save_root = orig_checkpoint.root
        if 'blocks' in args.sequence_type:
            args.T = float(0)
        verbose("Save root set to {:s}".format(orig_checkpoint.root), 0)

    else:
        orig_checkpoint = None
        verbose(
            "Running simulations from scratch (default, no checkpoint used)",
            args.verbose, 0)
        save_root = os.path.join(
            paths['simus'],
            "{cl_strat:s}/{data_origin:s}_{n_classes:d}/{nnarchi:s}{hidlay_width:s}/{seq_type:s}_length{seq_length:d}_batches{batch_size:d}_lr{lr:f}_optim{optimizer:s}"
            .format(cl_strat=cl_strategy,
                    data_origin=args.data_origin,
                    n_classes=dataset.num_classes,
                    nnarchi=args.nnarchi,
                    hidlay_width="x".join(
                        [str(el) for el in args.hidden_sizes]),
                    seq_type=args.sequence_type,
                    seq_length=args.sequence_length,
                    batch_size=args.batch_sz,
                    lr=args.lr,
                    optimizer=args.optimizer))
        if args.nonlin == 'relu':
            save_root += "_nonlinRelu"
        if dataset.data_origin == 'artificial':
            save_root += "_seqlen{patterns_size:d}_ratio{bitflips:d}".format(
                patterns_size=args.artif_seq_size,
                bitflips=int(dataset.data_sz * dataset.ratio_value))
        if 'blocks' in args.sequence_type:
            args.T = float(0)
            if args.sequence_type == 'random_blocks2_2freq':
                save_root += "_splitlengths" + str(
                    args.split_length_list[0]) + "_" + str(
                        args.split_length_list[1])
            else:
                save_root += "_splitlength" + str(args.split_length_list[0])

        if (args.artif_shuffle_classes == 0):
            save_root += "_noclassreshuffle"

    verbose("Output directory for this simulation set: {:s}".format(save_root),
            args.verbose, 0)

    if args.sequence_type == 'uniform':
        args.T = 0.0

    verbose(
        'Instanciating network and trainer (sequence generation with {0:s}, length {1:d})...'
        .format(args.sequence_type, args.sequence_length), args.verbose, 0)

    #------------------------------#
    #----- MODEL (CLASSIFIER) -----#
    #------------------------------#
    # neuralnet models are now subclasses of ContinualLearner and can all implement CL strategies such as EWC

    if args.nnarchi == 'FCL':
        model = neuralnet.Net_FCL(dataset, args.hidden_sizes, args.nonlin)
    elif args.nnarchi == 'CNN':
        model = neuralnet.Net_CNN(dataset)
    else:
        model = neuralnet.resnetN(type=args.resnettype, dataset=dataset)
    model.to(device)

    #-----------------------------------#
    #----- CL-STRATEGY: ALLOCATION -----#
    #-----------------------------------#

    # Elastic Weight Consolidation (EWC)
    if isinstance(model, neuralnet.ContinualLearner):
        model.ewc_lambda = args.ewc_lambda if args.ewc else 0
        if args.ewc:
            if args.fisher_n is None or args.fisher_n < args.batch_sz:
                model.fisher_n = args.batch_sz
            else:
                model.fisher_n = args.fisher_n
            model.online = True
            model.gamma = args.gamma
            model.emp_FI = args.emp_fi

    #----------------------------------#
    #----- SEQUENCE-BASED TRAINER -----#
    #----------------------------------#

    trainer = Trainer(dataset=dataset,
                      network=model,
                      training_type=args.sequence_type,
                      memory_sampling='reservoir sampling',
                      memory_sz=args.memory_sz,
                      lr=args.lr,
                      momentum=0.5,
                      criterion=args.loss_fn,
                      optimizer=args.optimizer,
                      batch_sz=args.batch_sz,
                      preprocessing=False,
                      device=device,
                      min_visit=args.min_state_visit,
                      sequence_length=args.sequence_length,
                      energy_step=step,
                      proba_transition=args.proba_transition,
                      T=args.T,
                      dynamic_T_thr=args.T_adaptive,
                      split_length_list=args.split_length_list)

    verbose('...done', args.verbose, 0)

    rs = ResultSet()
    rs.parameters = {
        "Save root": save_root,
        "Temperature": args.T,
        "Tree Depth": dataset.depth,
        "Tree Branching": dataset.branching,
        "Flips ratio": args.data_flips_rate,
        "Sequence Length": args.sequence_length,
        "Minibatches Size": args.batch_sz,
        "Number of tests": args.test_nbr,
        "Energy Step": step,
        "Replay Memory Size": args.memory_sz,
        "Learning rate": args.lr,
        "Loss function": args.loss_fn,
        "Optimizer": args.optimizer,
        "Continual learner": "EWC" if args.ewc is True else "None",
        "Dataset": args.data_origin,
        "Random Seed": systime,
        "device_type": 'GPU' if args.cuda else 'CPU',
        "NN architecture": args.nnarchi,
        "Split total length": args.split_length_list[0],
        "Original command":
        str(sys.argv
            )  # We store the original command for this set of simulations
    }
    rs.T = trainer.T
    rs.memory_sz = args.memory_sz

    if args.orig_path != "":
        # Let's check that the parameters match
        for param in [
                k for k in orig_checkpoint.parameters.keys()
                if k not in ("Random Seed", "device_type", "Original command",
                             "Timescales")
        ]:
            assert orig_checkpoint.parameters[param] == rs.parameters[
                param], "Orig checkpoint option - MISMATCH of parameter {:s}".format(
                    param)

    train_sequenceset(trainer, args, args.block_size_shuffle_list, rs,
                      save_root, orig_checkpoint)
예제 #34
0
    def doPackaging( self, pkg_name, pkg_version = str( datetime.date.today() ).replace('-', ''), packSources = True, special = False ):
        """packaging according to the gnuwin32 packaging rules
        this requires the kdewin-packager"""

        # FIXME: add a test for the installer later
        dstpath = self.packageDestinationDir(withBuildType=False)
        binpath = os.path.join( self.imagedir, self.instdestdir )
        tmp = os.path.join( binpath, "kde" )

        patchlevel = os.getenv( "EMERGE_PKGPATCHLVL" )
        if patchlevel:
            pkg_version += "-" + patchlevel

        if( os.path.exists( tmp ) ):
            binpath = tmp

        if not utils.test4application( "kdewin-packager" ):
            utils.die( "kdewin-packager not found - please make sure it is in your path" )

        for pkgtype in ['bin', 'lib', 'doc', 'src']:
            script = os.path.join( self.packagedir, "post-install-%s.cmd" ) % pkgtype
            scriptName = "post-install-%s-%s-%s.cmd" % ( self.package, self.version, pkgtype )
            destscript = os.path.join( self.imagedir, "manifest", scriptName )
            if os.path.exists( script ):
                if not os.path.exists( os.path.join( self.imagedir, "manifest" ) ):
                    os.mkdir( os.path.join( self.imagedir, "manifest" ) )
                shutil.copyfile( script, destscript )

        if ( packSources and not ( self.noCopy and self.kde.kdeSvnPath() ) ):
            srcpath = os.path.join( self.workdir, self.instsrcdir )
            cmd = "-name %s -root %s -srcroot %s -version %s -destdir %s" % \
                  ( pkg_name, binpath, srcpath, pkg_version, dstpath )
        elif packSources and self.noCopy and self.kde.kdeSvnPath():
            srcpath = os.path.join( self.kde.kdesvndir, self.kde.kdeSvnPath() ).replace( "/", "\\" )
            if not os.path.exists( srcpath ):
                srcpath = self.kde.sourcePath
            cmd = "-name %s -root %s -srcroot %s -version %s -destdir %s" % \
                  ( pkg_name, binpath, srcpath, pkg_version, dstpath )
        else:
            cmd = "-name %s -root %s -version %s -destdir %s" % \
                  ( pkg_name, binpath, pkg_version, dstpath )
        xmltemplate = os.path.join(self.packagedir, pkg_name+"-package.xml")
        if os.path.exists(xmltemplate):
            cmd = "kdewin-packager.exe " + cmd + " -template " + xmltemplate + " -notes " + \
                    "%s/%s:%s:unknown " % ( self.category, self.package, self.version ) + "-compression 2 "
        else:
            cmd = "kdewin-packager.exe " + cmd + " -notes " + \
                    "%s/%s:%s:unknown " % ( self.category, self.package, self.version ) + "-compression 2 "

        if( not self.createCombinedPackage ):
            if( self.compiler == "mingw"):
                cmd += " -type mingw "
            elif self.compiler == "mingw4":
                cmd += " -type mingw4 "
            elif self.compiler == "msvc2005":
                cmd += " -type msvc "
            elif self.compiler == "msvc2008":
                cmd += " -type vc90 "
            elif self.compiler == "msvc2010":
                cmd += " -type vc100 "
            elif self.compiler == "msvc2011":
                cmd += " -type vc110 "
            else:
                cmd += " -type unknown "

        if special:
            cmd += " -special"
        if utils.verbose():
            print "running %s" % cmd
        if not utils.system(cmd):
            utils.die( "while packaging. cmd: %s" % cmd )
        return True
예제 #35
0
파일: app.py 프로젝트: unixzii/Cyanstack
from tornado.web import Application, StaticFileHandler
from tornado.ioloop import IOLoop
import settings
import handlers
import utils


if __name__ == "__main__":
    utils.set_verbose_enabled(True)

    app = Application(
        [
            (r"/", handlers.IndexHandler),
            (r"/actions/fetch_articles", handlers.FetchArticlesHandler),
            (r"/static/(.*)", StaticFileHandler, {"path": settings.APP_SETTINGS["static_path"]}),
            (r".*", handlers.NotFoundHandler),
        ],
        "",
        None,
        template_path=settings.APP_SETTINGS["template_path"],
        static_hash_cache=False,
        compiled_template_cache=False
    )
    app.listen(80)
    utils.verbose("Server started!!!")
    IOLoop.instance().start()
예제 #36
0
 def make_package( self ):
     """overload this function with the package specific packaging instructions"""
     if utils.verbose() > 1:
         print "currently only supported for some internal packages"
     return True