Example #1
0
    def update(self):
        """
        Set the title of the main window.
        Set the titles on the page tabs.
        Show/hide the reports window.

        Args:
            title: the window title
        """
        gtk.Window.set_title(
            self,
            Utils.parse_template(
                MAIN_WINDOW_TITLE_TMPL,
                basename=os.path.basename(self.get_page().get_file_path()),
                dirname=os.path.dirname(self.get_page().get_file_path()),
                new_flowgraph_title=NEW_FLOGRAPH_TITLE,
                read_only=self.get_page().get_read_only(),
                saved=self.get_page().get_saved(),
                platform_name=self._platform.get_name(),
            ),
        )
        # set tab titles
        for page in self._get_pages():
            page.set_markup(
                Utils.parse_template(
                    PAGE_TITLE_MARKUP_TMPL,
                    # get filename and strip out file extension
                    title=os.path.splitext(os.path.basename(page.get_file_path()))[0],
                    read_only=page.get_read_only(),
                    saved=page.get_saved(),
                    new_flowgraph_title=NEW_FLOGRAPH_TITLE,
                )
            )
        # show/hide notebook tabs
        self.notebook.set_show_tabs(len(self._get_pages()) > 1)
Example #2
0
 def handle_MODE(self, line, line_split):
     nickname, username, hostname = Utils.hostmask_split(line_split[0])
     modes = Utils.remove_colon(Utils.get_index(line_split, 3) or
         "")
     arguments = line_split[4:]
     mode_count = (len(modes) - modes.count("+")) - modes.count("-")
     recipient_name = Utils.get_index(line_split, 2)
     channel = self.get_channel(recipient_name)
     user = self.get_user_by_nickname(recipient_name)
     recipient = channel or user
     
     if recipient:
         current_index = 0
         add_mode = True
         for char in modes:
             if char == "+":
                 add_mode = True
             elif char == "-":
                 add_mode = False
             else:
                 argument = None
                 if mode_count - current_index == len(arguments):
                     argument = arguments.pop(0)
                 if add_mode:
                     recipient.add_mode(char, argument)
                 else:
                     recipient.remove_mode(char, argument)
                 current_index += 1
Example #3
0
	def onOK( self, event ):
		race = Model.race
		if not race or not race.startTime:
			return
			
		secondsNew = self.timeMsEdit.GetSeconds()
		secondsOld = (race.startTime - race.startTime.replace(hour=0, minute=0, second=0)).total_seconds()
		dTime = secondsNew - secondsOld
		
		if dTime == 0:
			return
		
		if dTime > 0.0 and not Utils.MessageOKCancel( self,
				_('Are you Sure you want to change the Race Start to Later?\n(you can always undo).'), _('Are you sure?') ):
			return
		
		undo.pushState()
		for rider in race.riders.itervalues():
			if getattr(rider, 'firstTime', None) is not None:
				rider.firstTime -= dTime
		
			# Adjust all the recorded times to account for the new start time.
			for k in xrange(len(rider.times)):
				rider.times[k] -= dTime
		
		race.numTimeInfo.adjustAllTimes( -dTime )
		race.startTime += datetime.timedelta( seconds = dTime )
		race.setChanged()
		Utils.refresh()
		
		self.EndModal( wx.ID_OK )
    def run(self):
        config = Config.get()
        #create dictionary of article ids to a dictionary with cluster numbers and vectors representing them
        articleDict = Utils.read_features(config.FILE_NAME_NUMBERED_CLUSTERS, config.FILE_NAME_NUMBERED_VECS)

        #loop through and grab all the points (dictionaries) in each cluster that match the current cluster number (i), write the keys to a list
        for i in range(0, config.NUM_CLUSTERS):
            keys = []
            for article in articleDict:
                if int(articleDict[article]['cluster']) == i:
                    keys.append(article)
            #grab those articles' vectors from articleDict (thank you @ brooke for read_features, it is everything)
            vectors = np.array([articleDict[vID]['vector'] for vID in keys])

            #cluster vectors
            preStateLabels = list(KMeans(6,
                                  random_state=42).fit(vectors).labels_)
            #append cluster number to cluster so that sub-clusters are of the form [larger][smaller] - eg cluster 4 has subclusters 40, 41, 42
            stateLabels = []
            for label in preStateLabels:
                newlabel = str(i) + str(label)
                stateLabels.append(newlabel)

            #also need to make a new utils method for append_tsv rather than write_tsv
            Utils.append_tsv(config.FILE_NAME_STATE_CLUSTERS,
                             ("index", "stateCluster"), keys, stateLabels)
        #CODE THUS FAR CREATES ALL SUBCLUSTERS, NOW YOU JUST HAVE TO FIGURE OUT HOW TO INTEGRATE THEM

        #ALSO HOW TO DETERMINE THE BEST # OF CLUSTERS FOR EACH SUBCLUSTER??? IT SEEMS LIKE THEY SHOULD VARY (MAYBE BASED ON # OF POINTS?)


        #then make sure those get borders created for them??
        #then create and color those polygons in xml
    def saveModel ( self, saveAsFlag = False ):
        """
        in: bool saveAsFlag
        returns nothing 
        """


        if saveAsFlag or not self.theModelEditor.modelHasName or not self.theModelEditor.isNameConfirmed:
            aFileName = self.__getFileSelection( self.theModelEditor.saveDirName )

            # Convert All FileExtensions to Lower Case here
            if aFileName != None and aFileName != '':
                aFileName = self.theModelEditor.filenameFormatter(aFileName)
                        
                if os.path.splitext(aFileName)[0] == '': 
                    utils.showPopupMessage(
                        utils.OK_MODE,
                        "No file name specified",
                        ME_ERROR ) 
        else:
            aFileName = self.theModelEditor.theModelFileName

        if aFileName == None:
            return

        # call modeleditor savemodel

        if self.theModelEditor.changesSaved and aFileName == self.theModelEditor.theModelFileName and not self.theModelEditor.getMode() == ME_RUN_MODE:
            return aFileName
        self.theModelEditor.saveModel( aFileName )
        if self.theModelEditor.changesSaved:
            return aFileName
 def updateConfig(self):
     assert type(self.entries) is type(self.__class__.entries), \
            _('Entries type %s invalid, expected %s')%(str(type(self.entries)),
                                           str(type(self.__class__.entries)))
     self.config.set(self.resourcepath[cat_section],
               self.resourcepath[cat_option], pprint.pformat(self.entries))
     Utils.writeConfig(self.config)
Example #7
0
	def __init__( self, parent, excelLink = None ):
		#img_filename = os.path.join( Utils.getImageFolder(), '20100718-Excel_icon.png' )
		#img = wx.Bitmap(img_filename) if img_filename and os.path.exists(img_filename) else wx.NullBitmap
		img = wx.Bitmap(os.path.join( Utils.getImageFolder(), '20100718-Excel_icon.png' ))
		
		prewizard = wiz.PreWizard()
		prewizard.SetExtraStyle( wiz.WIZARD_EX_HELPBUTTON )
		prewizard.Create( parent, wx.ID_ANY, _('Link Excel Info'), img )
		self.wizard = prewizard
		self.wizard.Bind( wiz.EVT_WIZARD_PAGE_CHANGING, self.onPageChanging )
		self.wizard.Bind( wiz.EVT_WIZARD_HELP,
			lambda evt: Utils.showHelp('Menu-DataMgmt.html#link-to-external-excel-data') )
		
		self.fileNamePage = FileNamePage( self.wizard )
		self.sheetNamePage = SheetNamePage( self.wizard )
		self.headerNamesPage = HeaderNamesPage( self.wizard )
		self.summaryPage = SummaryPage( self.wizard )
		
		wiz.WizardPageSimple_Chain( self.fileNamePage, self.sheetNamePage )
		wiz.WizardPageSimple_Chain( self.sheetNamePage, self.headerNamesPage )
		wiz.WizardPageSimple_Chain( self.headerNamesPage, self.summaryPage )

		self.excelLink = excelLink
		if excelLink:
			if excelLink.fileName:
				self.fileNamePage.setFileName( excelLink.fileName )
			if excelLink.sheetName:
				self.sheetNamePage.setExpectedSheetName( excelLink.sheetName )
			if excelLink.fieldCol:
				self.headerNamesPage.setExpectedFieldCol( excelLink.fieldCol )

		self.wizard.GetPageAreaSizer().Add( self.fileNamePage )
		self.wizard.SetPageSize( wx.Size(500,200) )
		self.wizard.FitToPage( self.fileNamePage )
Example #8
0
def show(e, color="white"):
        x,y = getPosition(e)
        x = int(round(x))
        y = int(round(y))
        sprite = getSprite(e)
        Utils.goto(x+2, y+2)
        Utils.write(sprite+'\n', color)
def train(sparkContext):
	Utils.logMessage("\nClassification model started")
	pd.read_table(pv.processedFile, sep=',',encoding='utf-8').to_csv(pv.processedFile, header=False, index=False,encoding='utf-8')
	truncatedAccounts = sparkContext.textFile(pv.processedFile).take(pv.truncateLineCount - 1)
	rawData = sparkContext.parallelize(truncatedAccounts).map(countByFeatures).map(lambda item: LabeledPoint(item[0], Vectors.dense(item[2:])))

	trainWithParam(sparkContext, rawData, 0.7, 'entropy', 4, 16)
Example #10
0
	def find_iter(self,in_pat=['*'],ex_pat=exclude_pats,prune_pat=prune_pats,src=True,bld=True,dir=False,maxdepth=25,flat=False):
		if not(src or bld or dir):
			raise StopIteration
		if self.id&3!=DIR:
			raise StopIteration
		in_pat=Utils.to_list(in_pat)
		ex_pat=Utils.to_list(ex_pat)
		prune_pat=Utils.to_list(prune_pat)
		def accept_name(node,name):
			for pat in ex_pat:
				if fnmatch.fnmatchcase(name,pat):
					return False
			for pat in in_pat:
				if fnmatch.fnmatchcase(name,pat):
					return True
			return False
		def is_prune(node,name):
			for pat in prune_pat:
				if fnmatch.fnmatchcase(name,pat):
					return True
			return False
		ret=self.find_iter_impl(src,bld,dir,accept_name,is_prune,maxdepth=maxdepth)
		if flat:
			return" ".join([x.relpath_gen(self)for x in ret])
		return ret
Example #11
0
	def find_resource(self,lst):
		if isinstance(lst,str):
			lst=Utils.split_path(lst)
		if len(lst)==1:
			parent=self
		else:
			parent=self.find_dir(lst[:-1])
			if not parent:return None
		self.__class__.bld.rescan(parent)
		name=lst[-1]
		node=parent.childs.get(name,None)
		if node:
			tp=node.id&3
			if tp==FILE or tp==BUILD:
				return node
			else:
				return None
		tree=self.__class__.bld
		if not name in tree.cache_dir_contents[parent.id]:
			return None
		path=parent.abspath()+os.sep+name
		try:
			st=Utils.h_file(path)
		except IOError:
			return None
		child=self.__class__(name,parent,FILE)
		tree.node_sigs[0][child.id]=st
		return child
Example #12
0
    def add_block(self, category, block=None, treestore=None, categories=None):
        """
        Add a block with category to this selection window.
        Add only the category when block is None.

        Args:
            category: the category list or path string
            block: the block object or None
        """
        if treestore is None: treestore = self.treestore
        if categories is None: categories = self._categories

        if isinstance(category, (str, unicode)): category = category.split('/')
        category = tuple(filter(lambda x: x, category)) #tuple is hashable
        #add category and all sub categories
        for i, cat_name in enumerate(category):
            sub_category = category[:i+1]
            if sub_category not in categories:
                iter = treestore.insert_before(categories[sub_category[:-1]], None)
                treestore.set_value(iter, NAME_INDEX, '[ %s ]'%cat_name)
                treestore.set_value(iter, KEY_INDEX, '')
                treestore.set_value(iter, DOC_INDEX, Utils.parse_template(CAT_MARKUP_TMPL, cat=cat_name))
                categories[sub_category] = iter
        #add block
        if block is None: return
        iter = treestore.insert_before(categories[category], None)
        treestore.set_value(iter, NAME_INDEX, block.get_name())
        treestore.set_value(iter, KEY_INDEX, block.get_key())
        treestore.set_value(iter, DOC_INDEX, Utils.parse_template(DOC_MARKUP_TMPL, doc=block.get_doc()))
Example #13
0
    def getGraphics(tileType):

        # Gets the Main Image #
        ImgDecider = random.randint(0,len(Tile.Images_Main[tileType]) - 1)
        image_main = Tile.Images_Main[tileType][ImgDecider] 
        if len(image_main) == 1: image_main = Tile.Images_Main[tileType] # If it finds only a character, it realizes that it is only one image
        
        # Gets the Overlay Image #
        ImgDecider = random.randint(0,len(Tile.Images_Overlay[tileType]) - 1)
        image_overlay = Tile.Images_Overlay[tileType][ImgDecider]
        if len(image_overlay) == 1: image_overlay = Tile.Images_Overlay[tileType] # Does the same thing with the overlay image    

        colorTypes = ['Bg','Fg']
        tileColors = []      

        passed = False
        
        # This for-loop creates the Background (Bg) and Foreground (Fg) colors, in that order and waits until both colors don't have similar colors#
        while not passed:
            tileColors = []
            for cntr in range(len(colorTypes)):
                Color = XMLTileData[tileType][Tile.getTag(colorTypes[cntr]+'Color')]
                
                if "[" in str(XMLTileData[tileType][Tile.getTag(colorTypes[cntr]+'Color')]) and "]" in str(XMLTileData[tileType][Tile.getTag(colorTypes[cntr]+'Color')]):
                    
                    Color = XMLTileData[tileType][Tile.getTag(colorTypes[cntr]+'Color')][1:-1].split(',') # Gets rid of brackets
                    for cntr in range(len(Color)): Color[cntr] = int(Color[cntr][2:],16) # Changes the gradient information from string into hex form (0xFF)
                    Color = Utils.new_gradient(Color)[random.randint(0,len(Utils.new_gradient(Color))-1)] #Pick a random color from the gradient defined in Tiles.xml

                tileColors.append(Color)

            if Tile.isContrasting(tileColors[0],tileColors[1]): passed = True
            
        return [tileColors[0], tileColors[1], image_main, image_overlay]
def apply_intltool_po(self):
	try:self.meths.remove('apply_core')
	except ValueError:pass
	self.default_install_path='${LOCALEDIR}'
	appname=getattr(self,'appname','set_your_app_name')
	podir=getattr(self,'podir','')
	def install_translation(task):
		out=task.outputs[0]
		filename=out.name
		(langname,ext)=os.path.splitext(filename)
		inst_file=langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo'
		self.bld.install_as(os.path.join(self.install_path,inst_file),out,self.env,self.chmod)
	linguas=self.path.find_resource(os.path.join(podir,'LINGUAS'))
	if linguas:
		file=open(linguas.abspath())
		langs=[]
		for line in file.readlines():
			if not line.startswith('#'):
				langs+=line.split()
		file.close()
		re_linguas=re.compile('[-a-zA-Z_@.]+')
		for lang in langs:
			if re_linguas.match(lang):
				node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po'))
				task=self.create_task('po')
				task.set_inputs(node)
				task.set_outputs(node.change_ext('.mo'))
				if self.bld.is_install:task.install=install_translation
	else:
		Utils.pprint('RED',"Error no LINGUAS file found in po directory")
Example #15
0
 def symlink_as(self, path, src, env=None, cwd=None):
     if sys.platform == "win32":
         return
     if not path:
         raise Utils.WafError("where do you want to install %r? (%r?)" % (src, path))
     tgt = self.get_install_path(path, env)
     dir, name = os.path.split(tgt)
     Utils.check_dir(dir)
     if self.is_install > 0:
         link = False
         if not os.path.islink(tgt):
             link = True
         elif os.readlink(tgt) != src:
             link = True
         if link:
             try:
                 os.remove(tgt)
             except OSError:
                 pass
             info("* symlink %s (-> %s)" % (tgt, src))
             os.symlink(src, tgt)
         return 0
     else:
         try:
             info("* removing %s" % (tgt))
             os.remove(tgt)
             return 0
         except OSError:
             return 1
def main():
    if len(sys.argv) < 3:
        sys.stderr.write("usage: %s SERVER_NAME VOLUME_FILE\n" % os.path.basename(sys.argv[0]))
        sys.exit(-1)

    serverName = sys.argv[1]
    volumeFile = sys.argv[2]

    lines = Utils.readFile(volumeFile, lines=True)
    volumeNameList = [line.strip() for line in lines]
    if not volumeNameList:
        sys.exit(0)

    lines = Utils.readFile(Globals.CIFS_VOLUME_FILE, lines=True)
    cifsVolumeList = [line.strip().split(":")[0] for line in lines if line.strip()]
    runningCifsVolumeList = set(cifsVolumeList).intersection(set(volumeNameList))

    if not runningCifsVolumeList:
        sys.exit(0)

    tempFileName = Utils.getTempFileName()
    try:
        fp = open(tempFileName, "w")
        fp.write("%s\n" % serverName)
        fp.close()
    except IOError, e:
        Utils.log("Failed to write server name to file %s: %s" % (tempFileName, str(e)))
        sys.stderr.write("Failed to write server name to file %s: %s\n" % (tempFileName, str(e)))
        sys.exit(3)
Example #17
0
	def getReferenceName( self, lastName, firstName ):
		key = (Utils.removeDiacritic(lastName).lower(), Utils.removeDiacritic(firstName).lower())
		try:
			return self.aliasLookup[key]
		except KeyError:
			self.aliasLookup[key] = (lastName, firstName)
			return lastName, firstName
Example #18
0
    def file_roll(self, suffix):

        name = self.get_tag_str() + '_' + suffix + ".txt"
        name = Utils.clean_string(name)
        logfile = os.path.join(self.get_data_dir(), name)

        if os.path.exists(logfile):
            name = self.get_tag_str() + suffix + ".txt"
            name = Utils.clean_string(name)
            logdir = os.path.join(self.get_data_dir(), 'old')

            if not os.path.exists(logdir):
                os.makedirs(logdir)

            moddate = Utils.get_file_date_string(logfile)

            oldname = (self.get_tag_str() + '_' + moddate + '_'
                       + suffix + ".txt")
            oldname = Utils.clean_string(oldname)

            oldlogfile = os.path.join(logdir, oldname)

            os.rename(logfile, oldlogfile)

        return logfile
Example #19
0
 def create_shapes(self):
     """Precalculate relative coordinates."""
     Element.create_shapes(self)
     self._sink_rot = None
     self._source_rot = None
     self._sink_coor = None
     self._source_coor = None
     #get the source coordinate
     try:
         connector_length = self.get_source().get_connector_length()
     except:
         return
     self.x1, self.y1 = Utils.get_rotated_coordinate((connector_length, 0), self.get_source().get_rotation())
     #get the sink coordinate
     connector_length = self.get_sink().get_connector_length() + CONNECTOR_ARROW_HEIGHT
     self.x2, self.y2 = Utils.get_rotated_coordinate((-connector_length, 0), self.get_sink().get_rotation())
     #build the arrow
     self.arrow = [(0, 0),
         Utils.get_rotated_coordinate((-CONNECTOR_ARROW_HEIGHT, -CONNECTOR_ARROW_BASE/2), self.get_sink().get_rotation()),
         Utils.get_rotated_coordinate((-CONNECTOR_ARROW_HEIGHT, CONNECTOR_ARROW_BASE/2), self.get_sink().get_rotation()),
     ]
     self._update_after_move()
     if not self.get_enabled(): self._arrow_color = Colors.CONNECTION_DISABLED_COLOR
     elif not self.is_valid(): self._arrow_color = Colors.CONNECTION_ERROR_COLOR
     else: self._arrow_color = Colors.CONNECTION_ENABLED_COLOR
Example #20
0
def apply_copy(self):
	Utils.def_attrs(self, fun=copy_func)
	self.default_install_path = 0

	lst = self.to_list(self.source)
	self.meths.remove('apply_core')

	for filename in lst:
		node = self.path.find_resource(filename)
		if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)

		target = self.target
		if not target or len(lst)>1: target = node.name

		# TODO the file path may be incorrect
		newnode = self.path.find_or_declare(target)

		tsk = self.create_task('copy')
		tsk.set_inputs(node)
		tsk.set_outputs(newnode)
		tsk.fun = self.fun
		tsk.chmod = self.chmod

		if not tsk.env:
			tsk.debug()
			raise Utils.WafError('task without an environment')
Example #21
0
 def locate_links(self, road_name, function_class_numeric):
 #find (lon, lat) of (from_node, to_node) of all links on the certain highway
 
     print "Begin locating links on " + road_name
     
     link_loc = {}
 
     if function_class_numeric == 1:
         sql = "select link_id, from_node_id, to_node_id from links where function_class_numeric=1 and name_default like '%" + road_name + "%'"
     else:
         sql = "select link_id, from_node_id, to_node_id from links where function_class_numeric in (3,4) and upper(name_default) like '%" + road_name + "%'"
     self.cursor.execute(sql)
     nodes = self.cursor.fetchall()
     for (link_id, from_node_id, to_node_id) in nodes:
         sql = "select ST_AsText(geom) from nodes where node_id =" + str(from_node_id)
         self.cursor.execute(sql)
         from_node_pos = self.cursor.fetchall()[0][0]
         from_node_loc = Utils.extract_loc_from_geometry(from_node_pos)
         
         sql = "select ST_AsText(geom) from nodes where node_id =" + str(to_node_id)
         self.cursor.execute(sql)
         to_node_pos = self.cursor.fetchall()[0][0]
         to_node_loc = Utils.extract_loc_from_geometry(to_node_pos)
         
         link_loc[link_id] = (from_node_loc, to_node_loc)
     
     print "Link locating finished, there are " + str(len(link_loc)) + " links on " + road_name
     return link_loc
Example #22
0
def main():
    if len(sys.argv) != 3:
        sys.stderr.write("usage: %s VOLUME_NAME BRICK_NAME\n" % os.path.basename(sys.argv[0]))
        sys.exit(-1)

    volumeName = sys.argv[1]
    brickName = sys.argv[2]
    # glusterfs-3.3 config change from /etc/glusterd to /var/lib/glusterd
    pidFile = "/var/lib/glusterd/vols/%s/run/%s.pid" % (volumeName, brickName.replace(":", "").replace("/", "-"))
    total, free = getBrickSpace(brickName)
    if pidFile[-5] == '-':
        pidFile = pidFile[:-5]+pidFile[-4:]
    if not os.path.exists(pidFile):
        print "OFFLINE", total, free
        sys.exit(0)

    lines = Utils.readFile(pidFile)
    if not lines:
        print "UNKNOWN", total, free
        sys.exit(0)
    try:
        pidString = lines[0]
        os.getpgid(int(pidString))
        print "ONLINE", total, free
    except ValueError, e:
        Utils.log("invalid pid %s in file %s: %s" % (pidString, pidFile, str(e)))
        print "UNKNOWN", total, free
Example #23
0
def writeVolumeCifsConfiguration(volumeName, owner, allowhosts):
    volumeFile = "%s/%s.smbconf" % (Globals.VOLUME_CONF_DIR, volumeName)
    try:
        fp = open(volumeFile, "w")
        fp.write("[%s]\n" % volumeName)
        fp.write("   comment = %s volume served by Gluster\n" % volumeName)
        fp.write("   path = %s/%s\n" % (Globals.CIFS_EXPORT_DIR, volumeName))
        fp.write("   guest ok = yes\n")
        fp.write("   public = yes\n")
        fp.write("   writable = yes\n")
#  #######  junlili - modified smbconf file  #########
        fp.write("   admin users = %s\n" % (owner))
        fp.write("   valid users = %s\n" % (owner))
        fp.write("   allow hosts = %s\n" % (allowhosts))
        fp.write("   forceuser = %s\n" % (owner))
#  ###############
#  ####### liub     - modified smbconf file  #########
        fp.write("   browseable  = no\n")
        filepath="   include=/etc/glustermg/volumes/users/%s-%s.smbconf\n"% (volumeName,"%U")
        fp.write(filepath)
#################################################
        fp.close()
    except IOError, e:
        Utils.log("Failed to write file %s: %s" % (volumeFile, str(e)))
        return False
Example #24
0
File: Node.py Project: runt18/samba
	def find_iter(self, in_pat=None, ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
		"""find nodes recursively, this returns everything but folders by default; same gotcha as ant_glob"""
		if in_pat is None:
			in_pat = ['*']

		if not (src or bld or dir):
			raise StopIteration

		if self.id & 3 != DIR:
			raise StopIteration

		in_pat = Utils.to_list(in_pat)
		ex_pat = Utils.to_list(ex_pat)
		prune_pat = Utils.to_list(prune_pat)

		def accept_name(node, name):
			for pat in ex_pat:
				if fnmatch.fnmatchcase(name, pat):
					return False
			for pat in in_pat:
				if fnmatch.fnmatchcase(name, pat):
					return True
			return False

		def is_prune(node, name):
			for pat in prune_pat:
				if fnmatch.fnmatchcase(name, pat):
					return True
			return False

		ret = self.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth)
		if flat:
			return " ".join([x.relpath_gen(self) for x in ret])

		return ret
Example #25
0
def declare_chain(name='',action='',ext_in='',ext_out='',reentrant=True,color='BLUE',install=0,before=[],after=[],decider=None,rule=None,scan=None):
	action=action or rule
	if isinstance(action,str):
		act=Task.simple_task_type(name,action,color=color)
	else:
		act=Task.task_type_from_func(name,action,color=color)
	act.ext_in=tuple(Utils.to_list(ext_in))
	act.ext_out=tuple(Utils.to_list(ext_out))
	act.before=Utils.to_list(before)
	act.after=Utils.to_list(after)
	act.scan=scan
	def x_file(self,node):
		if decider:
			ext=decider(self,node)
		else:
			ext=ext_out
		if isinstance(ext,str):
			out_source=node.change_ext(ext)
			if reentrant:
				self.allnodes.append(out_source)
		elif isinstance(ext,list):
			out_source=[node.change_ext(x)for x in ext]
			if reentrant:
				for i in xrange((reentrant is True)and len(out_source)or reentrant):
					self.allnodes.append(out_source[i])
		else:
			raise Utils.WafError("do not know how to process %s"%str(ext))
		tsk=self.create_task(name,node,out_source)
		if node.__class__.bld.is_install:
			tsk.install=install
	declare_extension(act.ext_in,x_file)
Example #26
0
    def renameCompName2(self, old_value, new_value):
        # XXX This is ugly but has to do until a better
        # XXX strategy is conceived.
        # XXX The problem is that the useful logic is in the
        # XXX companion which is not available for the clipboard
        # XXX The source's ctrl needs to be renamed for a companion
        # XXX to be created.

        # Rename references to ctrl in parameters of property
        oldCtrlSrcRef = Utils.srcRefFromCtrlName(old_value)
        newCtrlSrcRef = Utils.srcRefFromCtrlName(new_value)

        for idx in range(len(self.params)):
            segs = self.params[idx].split(oldCtrlSrcRef)
            if len(segs) > 1:
                lst = [segs[0]]
                for s in segs[1:]:
                    if s and s[0] in string.letters+string.digits+'_':
                        lst[-1] = lst[-1] + s
                    else:
                        lst.append(s)
                self.params[idx] = newCtrlSrcRef.join(lst)

            # Handle case where _init_coll_* methods are used as parameters
            param = self.params[idx]
            if param.startswith('self.'+coll_init):
                nameEnd = param.rfind('_')
                name = param[16:nameEnd]
                if name == old_value:
                    self.params[idx] = 'self.'+coll_init+new_value+param[nameEnd:]

        PerLineParser.renameCompName2(self, old_value, new_value)
    def run(self):
        config = Config.get()
        logger = logging.getLogger('workload')
        sampleRegions = Utils.read_features(config.getSample("GeneratedFiles", "clusters_with_id"))
        vecs = Utils.read_features(config.get("ExternalFiles", "vecs_with_id"))
        knn = FastKnn.FastKnn(config.getSample("ExternalFiles",
                                               "vecs_with_id"))
        assert(knn.exists())
        knn.read()
        ids = []
        clusters = []
        for i, (id, row) in enumerate(vecs.items()):
            if i % 10000 == 0:
                logger.info('interpolating coordinates for point %d of %d' % (i, len(vecs)))
            if id in sampleRegions:
                cluster = sampleRegions[id]['cluster']
            else:
                sums = defaultdict(float)
                if len(row['vector']) == 0: continue
                hood = knn.neighbors(row['vector'], 5)
                if not hood: continue
                for (id2, score) in hood:
                    c = sampleRegions[id2].get('cluster')
                    if c is not None:
                        sums[c] += score
                cluster = max(sums, key=sums.get)
            ids.append(id)
            clusters.append(cluster)

        Utils.write_tsv(config.get("GeneratedFiles", "clusters_with_id"),
                        ("index", "cluster"), ids, clusters)
Example #28
0
def postDelete(volumeName, brickList,deleteFlag = False):
    params = []
    params.append(volumeName)
    params.append(brickList)
    params.append(deleteFlag)
    for brick in brickList:
        if brick.strip() is '':
            continue
        cmd = 'python ' + BACKEND_SCRIPT + 'clear_volume_directory.py'
        server_dir = brick.split(":/")
        if len(server_dir) != 2:
            break
        cmd += ' /' + server_dir[1].strip()
        status,output = Utils.executeOnServer(server_dir[0].strip(), cmd)
        if status == -1:
            params = []
            params.append(volumeName)
            params.append(server_dir[0].strip())
            params.append(output)
            code, reval = '26104', 'Volume {0} deleted from cluster, however following error(s) occurred:\nerror when connecting to remote host {1} from localhost:{2}'
        elif status == -2:
            code,reval = '26059', 'Volume {0} deleted from cluster, however following error(s) occurred:\nError when using pub key to connect remote server {1}.{2}'
        elif status == 1:
            if re.match('exist', output) or re.match('exists', output) or re.match('exist\n',output):
                code, reval = '20053', 'volume {0}  does not exist.\n'
            else:
                code, reval = '23101', 'Volume {0} deleted from cluster on server {1}, however following error(s) occurred:\n{2}'
        if status:
            result = Utils.errorCode(code, reval, params)
            raise web.HTTPError(status = "500 Internal Server Error", data = result)
    return ''
	def loop(self):
		m=self.master
		while 1:
			tsk=m.ready.get()
			if m.stop:
				m.out.put(tsk)
				continue
			try:
				tsk.generator.bld.printout(tsk.display())
				if tsk.__class__.stat:ret=tsk.__class__.stat(tsk)
				else:ret=tsk.call_run()
			except Exception,e:
				tsk.err_msg=Utils.ex_stack()
				tsk.hasrun=EXCEPTION
				m.error_handler(tsk)
				m.out.put(tsk)
				continue
			if ret:
				tsk.err_code=ret
				tsk.hasrun=CRASHED
			else:
				try:
					tsk.post_run()
				except Utils.WafError:
					pass
				except Exception:
					tsk.err_msg=Utils.ex_stack()
					tsk.hasrun=EXCEPTION
				else:
					tsk.hasrun=SUCCESS
			if tsk.hasrun!=SUCCESS:
				m.error_handler(tsk)
			m.out.put(tsk)
Example #30
0
    def appendTagRouteOld(self, tagRoute, value=None):
        if not self._responseTag:
            return False
        if not tagRoute:
            return False

        parentTagE = self._responseTag

        tagNameList = tagRoute.split(".")
        newTagRoute = tagNameList.pop(-1)

        for i in range(len(tagNameList), 0, -1):
            tagE = self.getElementsByTagRoute(".".join(["response"] + tagNameList[:i]))
            if tagE:
                parentTagE = tagE[0]
                break
            newTagRoute = tagNameList[i-1] + "." + newTagRoute

        newTagE = self.createTagRoute(newTagRoute, value)
        if not newTagE:
            return False
        try:
            parentTagE.appendChild(newTagE)
        except xml.dom.HierarchyRequestErr, e:
            Utils.log("error occured.  %s" + str(e))
            return False
Example #31
0
 def _show_info(self, *args, **kwargs):
     Utils.pprint_dict_in_order(
         self.module_metadata,
         ("display_name", "name", "description", "devices", "authors", "references"),
     )
     Utils.print_info()
Example #32
0
    def help_global(self, guild_id, prefix):
        line_cogs = ""
        all_lines = []
        all_cogs = {
            "Birthday": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'birthday_help_description')
            },
            "Bancommand": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'bancommand_help_description')
            },
            "Configuration": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'config_help_description')
            },
            "Gallery": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'gallery_help_description')
            },
            "Help": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'help_help_description')
            },
            "Highlight": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'highlight_help_description')
            },
            "Invitation": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'invitation_help_description')
            },
            "Link": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'link_help_description')
            },
            "Loader": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'loader_help_description')
            },
            "Logs": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'logs_help_description')
            },
            "Nickname": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'nickname_help_description')
            },
            "Moderation": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'moderation_help_description')
            },
            "RoleDM": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'roleDM_help_description')
            },
            "Source": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'source_help_description')
            },
            "Turing": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'turing_help_description')
            },
            "Timer": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'timer_help_description')
            },
            "Utip": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'utip_help_description')
            },
            "Vote": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'vote_help_description')
            },
            "Welcome": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'welcome_help_description')
            },
            "Rules": {
                "status": 0,
                "desc": Utils.get_text(guild_id, 'rules_help_description')
            }
        }
        for name in all_cogs.keys():
            if Utils.is_loaded(name.lower(), guild_id):
                all_cogs[name]["status"] = 1

        for cog, dicog in all_cogs.items():
            emoji = ":white_check_mark:" if dicog["status"] else ":x:"
            line = f"-  **{cog}** {emoji}  - *{dicog['desc']}*\n"
            if (len(line_cogs) + len(line) > 1024):
                all_lines.append(line_cogs)
                line_cogs = ""
            line_cogs += line
        all_lines.append(line_cogs)

        infos = self.bot.user
        colour = discord.Colour(0)
        colour = colour.from_rgb(176, 255, 176)
        embed = discord.Embed(colour=colour,
                              title=Utils.get_text(guild_id,
                                                   'help_global_title'))
        embed.description = Utils.get_text(guild_id, 'help_help_description_2')
        embed.add_field(name=Utils.get_text(guild_id,
                                            'help_global_field_general'),
                        value=Utils.get_text(
                            guild_id, 'help_global_field_general_value'),
                        inline=False)
        num = 0
        for line_cogs in all_lines:
            num += 1
            embed.add_field(name=Utils.get_text(
                guild_id,
                'help_global_field_available').format(num, len(all_lines)),
                            value=line_cogs,
                            inline=False)
        embed.set_author(icon_url=infos.avatar_url, name=str(infos))
        embed.timestamp = datetime.today()
        return embed
Example #33
0
 def loadRecent(self, recent):
     filename = Utils.getRecent(recent)
     if filename is None: return
     self.load(filename)
Example #34
0
 def loadConfig(self):
     self.controllerSet(Utils.getStr("Connection", "controller"))
     Pendant.port = Utils.getInt("Connection", "pendantport", Pendant.port)
     GCode.LOOP_MERGE = Utils.getBool("File", "dxfloopmerge")
     self.loadHistory()
Example #35
0
    def __init__(self, extra_package_path=None):
        super(ISAFInterpreter, self).__init__()
        PrinterThread().start()
        self.current_module = None
        self.raw_prompt_template = None
        self.module_prompt_template = None
        self.prompt_hostname = 'ISAF'
        self.show_sub_commands = ('info', 'options', 'devices', 'all', 'Credentials', 'Exploits', 'Scanners',
                                  'Discovery')
        self.global_commands = sorted(['use ', 'exec ', 'help', 'exit', 'show ', 'search '])
        self.module_commands = ['run', 'back', 'set ', 'unset ', 'gset ', 'gunset ', 'check', 'connect']
        self.module_commands.extend(self.global_commands)
        self.module_commands.sort()
        self.extra_modules_dir = None
        self.extra_modules_dirs = None
        self.extra_modules = []
        self.extra_package_path = extra_package_path
        self.import_extra_package()
        self.modules = Utils.index_modules()
        self.modules += self.extra_modules
        self.modules_count = Counter()
        [self.modules_count.update(module.split('.')) for module in self.modules]
        self.main_modules_dirs = [module for module in os.listdir(Utils.MODULES_DIR) if not module.startswith("__")]
        self.__parse_prompt()

        self.banner = Fore.BLUE + """ 
             ▄█     ▄████████    ▄████████    ▄████████ 
            ███    ███    ███   ███    ███   ███    ███ 
            ███▌   ███    █▀    ███    ███   ███    █▀  
            ███▌   ███          ███    ███  ▄███▄▄▄     
            ███▌ ▀███████████ ▀███████████ ▀▀███▀▀▀     
            ███           ███   ███    ███   ███        
            ███     ▄█    ███   ███    ███   ███        
            █▀    ▄████████▀    ███    █▀    ███""" \
                      + Fore.GREEN + " v{version} \n" \
                      + Fore.LIGHTYELLOW_EX + """
             Industrial Security Auditing Framework
               D0ubl3G <d0ubl3g[at]protonmail.com>\n""" \
                      + Fore.RED + """
                           -> WARNING <-
               ISAF IS IN EARLY DEVELOPMENT PHASE.
            SHOULD NOT USE IN PRODUCTION ENVIRONMENTS.\n""" \
                      + Fore.RESET + Style.BRIGHT + """
        Modules""" + Style.NORMAL + """
           Clients: """ + Fore.GREEN + """{clients_count}""" + Fore.RESET \
                      + """      Exploits: """ + Fore.GREEN + """{exploits_count}""" + Fore.RESET \
                      + """      Discovery: """ + Fore.GREEN + """{discovery_count}""" + Fore.RESET + """ 
           Scanners: """ + Fore.GREEN + """{scanners_count}""" + Fore.RESET \
                      + """     Credentials: """ + Fore.GREEN + """{creds_count}""" + Fore.RESET \
                      + Style.BRIGHT + """\n
        Exploits""" + Style.NORMAL + """
           PLC: """ + Fore.GREEN + """{plc_exploit_count}""" + Fore.RESET \
                      + """          Switch: """ + Fore.GREEN + """{ics_switch_exploits_count}""" + Fore.RESET \
                      + """        Software: """ + Fore.GREEN + """{ics_software_exploits_count}""" + Fore.RESET \
                      + """\n\n"""

        self.banner = self.banner.format(version="0.0.1a", clients_count=self.modules_count['Clients'],
                                         exploits_count=self.modules_count['Exploits'] + self.modules_count[
                                             'extra_exploits'],
                                         discovery_count=self.modules_count['Discovery'] + self.modules_count[
                                             'extra_discovery'],
                                         scanners_count=self.modules_count['Scanners'] + self.modules_count[
                                             'extra_scanners'],
                                         creds_count=self.modules_count['Credentials'] + self.modules_count[
                                             'extra_creds'],
                                         plc_exploit_count=self.modules_count['plcs'],
                                         ics_switch_exploits_count=self.modules_count['ics_switchs'],
                                         ics_software_exploits_count=self.modules_count['ics_software']
                                         )
Example #36
0
 def command_help(self, *args, **kwargs):
     Utils.print_info(self.global_help)
     if self.current_module:
         Utils.print_info(self.module_help)
Example #37
0
 def __show_modules(self, root=''):
     for module in [module for module in self.modules if module.startswith(root)]:
         Utils.print_info(module.replace('.', os.sep))
Example #38
0
 def __init__(self, mesh, mapping=None, **kwargs):
     Utils.setKwargs(self, **kwargs)
     assert isinstance(mesh,
                       Mesh.BaseMesh), "mesh must be a SimPEG.Mesh object."
     self.mesh = mesh
     self.mapping = mapping or Maps.IdentityMap(mesh)
Example #39
0
def Delete(obj, mode, frame):
	log_state = Utils.SetScriptingLogState(False)
	
	command_name = 'Cloth_CorrectiveSmooth'
	cluster_name = 'CorrectiveSmoothCls'
	if mode == 'Push':
		command_name = 'Cloth_CorrectivePush'
		cluster_name = 'CorrectivePushCls'
	elif mode == 'Shape':
		command_name = 'Cloth_CorrectiveShape'
		cluster_name = 'CorrectiveShapeCls'
	
	if not obj or not obj.Type == 'polymsh':
		XSI.LogMessage(command_name + " ---> Invalid Object", constants.siError)
		return
	
	prim = obj.ActivePrimitive
	
	# get Cluster
	cluster = Utils.CreateAlwaysCompleteCluster(obj, constants.siVertexCluster, cluster_name)

	# check if a weight map already exists for this frame
	exist = False
	if cluster.Properties('Frame{}'.format(frame)):
		exist = True
		toolkit = Dispatch('XSI.UIToolkit')
		button_pressed = toolkit.MsgBox(
			'Do you really want to delete delete Frame{}'.format(frame),
			constants.siMsgOkCancel, 'Delete Corrective Push'
		)
	
		if button_pressed == constants.siMsgCancel:
			XSI.LogMessage("Delete Secondary " + mode + " cancelled by the user!!", constants.siInfo)
			return
	
	if not exist:
		XSI.LogMessage("Frame" + str(frame) + " doesn't exist on " + str(obj) + " ---> Delete Corrective " + mode + " aborted...")
		return
	
	else:
		XSI.DeleteObj(cluster.Properties('Frame{}'.format(frame)))

	tree = prim.ICETrees('Corrective{}'.format(mode))
	compound = tree.CompoundNodes('Corrective{}'.format(mode))

	# first delete corresponding GetDataNode
	get_nodes = tree.DataProviderNodes
	for get_node in get_nodes:
		if not get_node.Parameters('Reference').Value.find(str(frame)) == -1:
			XSI.LogMessage('Delete Get Data Node "{}"'.format(get_node.Parameters('Reference').Value))
			XSI.DeleteObj(get_node)

	# delete Corresponding MultiplyByScalarNode
	multiply_nodes = compound.Nodes.Filter('MultiplyByScalarNode')
	for multiply_node in multiply_nodes:
		inputs = multiply_node.InputPorts
	
		if not inputs(0).IsConnected:	
			XSI.LogMessage('Delete MultiplyByScalar Node : {}'.format(multiply_node))
			XSI.DeleteObj(multiply_node)

	Utils.SetScriptingLogState(log_state)
Example #40
0
def learn_environment(model, params):
    NAME = params['name']
    metrics = {}
    wrHistory = {'network': []}

    memory = CHGExperienceStorage(params['experience storage'])
    ######################################################
    lastBestModels = [forkAgent(model, 0, params)] * 3

    def testModel(EXPLORE_RATE, epoch):
        T = time.time()
        opponents = [
            (Utils.DummyNetwork, Agents.CGreedyAgent),
            (Utils.DummyNetwork, Agents.CGreedyAgent),
            (Utils.DummyNetwork, Agents.CGreedyAgent),
        ] if 0 == (epoch % 2) else lastBestModels

        res = Utils.collectExperience(
            [  # agents
                (CNoisedNetwork(model, EXPLORE_RATE), Agents.CAgent),
                *opponents
            ],
            memory,
            {
                'episodes': params['test episodes'],
                'env': params.get('env', {})
            })
        print('Testing finished in %.1f sec.' % (time.time() - T))
        return res

    ######################################################
    # collect some experience
    for epoch in range(2):
        testModel(EXPLORE_RATE=0.8, epoch=0)

    #######################
    for epoch in range(params['epochs']):
        T = time.time()

        EXPLORE_RATE = params['explore rate'](epoch)
        print('[%s] %d/%d epoch. Explore rate: %.3f.' %
              (NAME, epoch, params['epochs'], EXPLORE_RATE))
        ##################
        # Training
        #     if params.get('target update', lambda _: True)(epoch):
        #       model.updateTargetModel()

        train(model, memory, {'episodes': params['train episodes'](epoch)})

        ##################
        os.makedirs('weights', exist_ok=True)
        model.save('weights/%s-latest.h5' % NAME)
        # test
        if (epoch % params['test interval']) == 0:
            print('Testing...')
            stats, winRates = testModel(EXPLORE_RATE, epoch)
            for k, v in stats.items():
                Utils.trackScores(v, metrics, metricName=k)

            for k, v in winRates.items():
                if k not in wrHistory:
                    wrHistory[k] = [0] * epoch
                wrHistory[k].append(v)
            ##################

            print('Scores sum: %.5f' % sum(stats['Score_network']))

            if (0 < (epoch % 2)) and (params['min win rate'] <=
                                      winRates['network']):
                print('save model (win rate: %.2f%%)' %
                      (100.0 * winRates['network']))
                model.save('weights/%s-epoch-%06d.h5' % (NAME, epoch))
                ########
                lastBestModels.insert(0, forkAgent(model, epoch, params))
                modelsHistory = params.get('models history', 3)
                lastBestModels = lastBestModels[:modelsHistory]

            os.makedirs('charts/%s' % NAME, exist_ok=True)
            for metricName in metrics.keys():
                Utils.plotData2file(metrics,
                                    'charts/%s/%s.jpg' % (NAME, metricName),
                                    metricName)
            Utils.plotSeries2file(wrHistory,
                                  'charts/%s/win_rates.jpg' % (NAME, ),
                                  'Win rates')
        ##################
        print('Epoch %d finished in %.1f sec.' % (epoch, time.time() - T))
        print('------------------')
    return
def train(model_config,
          experiment_id,
          load_model=None,
          epoch=0,
          best_loss=10000,
          best_loss_test=10000):
    # Determine input and output shapes
    disc_input_shape = [
        model_config["batch_size"], model_config["num_frames"], 0
    ]  # Shape of input
    if model_config["network"] == "unet":
        separator_class = Models.UnetAudioSeparator_no_att.UnetAudioSeparator_no_att(
            model_config["num_layers"],
            model_config["num_initial_filters"],
            output_type=model_config["output_type"],
            context=model_config["context"],
            mono=model_config["mono_downmix"],
            upsampling=model_config["upsampling"],
            num_sources=model_config["num_sources"],
            filter_size=model_config["filter_size"],
            merge_filter_size=model_config["merge_filter_size"])

    else:
        raise NotImplementedError

    sep_input_shape, sep_output_shape = separator_class.get_padding(
        np.array(disc_input_shape))
    separator_func = separator_class.get_output
    print(sep_input_shape, sep_output_shape)
    # Creating the batch generators
    assert ((sep_input_shape[1] - sep_output_shape[1]) % 2 == 0)
    #     pad_durations = np.array([float((sep_input_shape[1] - sep_output_shape[1])/2), 0, 0])  # Input context that the input audio has to be padded ON EACH SIDE
    #     sup_batch_gen = batchgen.BatchGen_Paired(
    #         model_config,
    #         sup_dataset,
    #         sep_input_shape,
    #         sep_output_shape,
    #         pad_durations[0]
    #     )
    coord = tf.train.Coordinator()
    with tf.name_scope('create_inputs'):
        reader = DataReader(
            model_config["data_dir"],
            coord,
            sample_size=sep_input_shape[1],
            hint_size=0,
            target_size=sep_output_shape[1],
            sample_rate=model_config["sample_rate"],
            queue_size=128,
            random_crop=True,
            data_range=data_reader_Audio_RIRs.CLEAN_DATA_RANGE,
            test_data_range=data_reader_Audio_RIRs.CLEAN_TEST_DATA_RANGE,
            disc_thread_enabled=False,
            spec_generator=None,
            use_label_class=False,
            hint_window=128,
            inject_noise=True,
            augment_reverb=True,
            augment_speech=True,
            norm_volume=False,
            stft_similarity=None)

        train_batches = reader.dequeue(model_config["batch_size"])
        """For test set"""
        test_batches = reader.dequeue_test(model_config["batch_size"])
        test_ext_batches = reader.dequeue_test_ext(model_config["batch_size"])


#     print("Starting worker")
#     sup_batch_gen.start_workers()
#     print("Started worker!")

# Placeholders and input normalisation
#     mix_context, sources = Input.get_multitrack_placeholders(sep_output_shape, model_config["num_sources"], sep_input_shape, "sup")
#tf.summary.audio("mix", mix_context, 16000, collections=["sup"]) #Enable listening to source estimates via Tensorboard

    mix_context, sources = train_batches
    #     mix = Utils.crop(mix_context, sep_output_shape)

    print("Training...")

    # BUILD MODELS
    # Separator
    separator_sources = separator_func(
        mix_context, True, not model_config["raw_audio_loss"], reuse=False
    )  # Sources are output in order [noise, speech] for speech enhancement

    # Supervised objective: MSE in log-normalized magnitude space
    #     separator_loss = 0
    #     for (real_source, sep_source) in zip(sources, separator_sources):
    separator_loss = tf.reduce_mean(tf.abs(sources - separator_sources[0]))
    #     separator_loss = separator_loss / float(len(sources)) # Normalise by number of sources

    # TRAINING CONTROL VARIABLES
    global_step = tf.get_variable('global_step', [],
                                  initializer=tf.constant_initializer(0),
                                  trainable=False,
                                  dtype=tf.int64)
    increment_global_step = tf.assign(global_step, global_step + 1)

    # Create Tests
    test_mix_context, test_sources = test_batches
    test_prediction = separator_func(test_mix_context,
                                     False,
                                     not model_config["raw_audio_loss"],
                                     reuse=True)
    test_ext_mix_context, test_ext_sources = test_ext_batches
    test_ext_prediction = separator_func(test_ext_mix_context,
                                         False,
                                         not model_config["raw_audio_loss"],
                                         reuse=True)

    test_loss = tf.reduce_mean(tf.abs(test_sources - test_prediction[0]))
    test_ext_loss = tf.reduce_mean(
        tf.abs(test_ext_sources - test_ext_prediction[0]))

    # Set up optimizers
    separator_vars = Utils.getTrainableVariables("separator")
    print("Sep_Vars: " + str(Utils.getNumParams(separator_vars)))
    print("Num of variables " + str(len(tf.global_variables())))

    update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
    with tf.control_dependencies(update_ops):
        with tf.variable_scope("separator_solver"):
            separator_solver = tf.train.AdamOptimizer(
                learning_rate=model_config["init_sup_sep_lr"]).minimize(
                    separator_loss, var_list=separator_vars)

    # SUMMARIES
    tf.summary.scalar("sep_loss", separator_loss, collections=["sup"])
    sup_summaries = tf.summary.merge_all(key='sup')
    test_loss_summary = tf.summary.scalar("sep_test_loss", test_loss)
    test_ext_loss_summary = tf.summary.scalar("sep_test_ext_loss",
                                              test_ext_loss)

    # Start session and queue input threads
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.Session(config=config)
    sess.run(tf.global_variables_initializer())
    writer = tf.summary.FileWriter(model_config["log_dir"] + os.path.sep +
                                   str(experiment_id),
                                   graph=sess.graph)

    # CHECKPOINTING
    # Load pretrained model to continue training, if we are supposed to
    if load_model != None:
        restorer = tf.train.Saver(tf.global_variables(),
                                  write_version=tf.train.SaverDef.V2)
        print("Num of variables" + str(len(tf.global_variables())))
        restorer.restore(sess, load_model)
        print('Pre-trained model restored from file ' + load_model)

    saver = tf.train.Saver(tf.global_variables(),
                           write_version=tf.train.SaverDef.V2)

    threads = tf.train.start_queue_runners(sess=sess, coord=coord)
    reader.start_threads(sess, n_threads=model_config["num_workers"])

    # Start training loop
    worse_epochs = 0
    best_model_path = None
    model_path = None
    while worse_epochs < model_config[
            "worse_epochs"]:  # Early stopping on validation set after a few epochs
        try:
            print("EPOCH: " + str(epoch))
            _global_step = sess.run(global_step)
            _init_step = _global_step
            moving_avg_loss_value = 0.0
            run = True
            for i in tqdm(range(model_config["epoch_it"])):
                try:
                    _, _sup_summaries, train_loss_value = sess.run(
                        [separator_solver, sup_summaries, separator_loss])
                    writer.add_summary(_sup_summaries,
                                       global_step=_global_step)

                    # Increment step counter, check if maximum iterations per epoch is achieved and stop in that case
                    _global_step = sess.run(increment_global_step)
                    if _global_step - _init_step > 1:
                        moving_avg_loss_value = 0.8 * moving_avg_loss_value + 0.2 * train_loss_value
                    else:
                        moving_avg_loss_value = train_loss_value

                    if _global_step - _init_step > model_config["epoch_it"]:
                        run = False
                        print(
                            "Finished training phase, stopping batch generators"
                        )
                        break
                except Exception as e:
                    print(e)
                    run = False
                    break
            print("Finished epoch!")
            # Epoch finished - Save model
            model_path = saver.save(sess,
                                    model_config["model_base_dir"] +
                                    os.path.sep + str(experiment_id) +
                                    os.path.sep + str(experiment_id),
                                    global_step=int(_global_step))

            test_loss_list = []
            test_ext_loss_list = []
            for i in tqdm(range(40)):
                _test_loss_summary, _test_ext_loss_summary, _test_loss, _test_ext_loss = sess.run(
                    [
                        test_loss_summary, test_ext_loss_summary, test_loss,
                        test_ext_loss
                    ])
                writer.add_summary(_test_loss_summary,
                                   global_step=_global_step + i)
                writer.add_summary(_test_ext_loss_summary,
                                   global_step=_global_step + i)
                test_loss_list.append(_test_loss)
                test_ext_loss_list.append(_test_ext_loss)
            curr_loss_val = np.mean(test_loss_list)
            curr_loss_test = np.mean(test_ext_loss_list)
            print("End Test (", epoch, ") :", moving_avg_loss_value,
                  curr_loss_val, curr_loss_test)

            epoch += 1
            if curr_loss_val < best_loss:
                worse_epochs = 0
                print("Performance on validation set improved from " +
                      str(best_loss) + " to " + str(curr_loss_val))
                best_model_path = model_path
                best_loss = curr_loss_val
            else:
                worse_epochs += 1
                print("Performance on validation set worsened to " +
                      str(curr_loss_val))

            if curr_loss_test < best_loss_test:
                print("Performance on test set improved from " +
                      str(best_loss_test) + " to " + str(curr_loss_test))
                best_loss_test = curr_loss_test
            else:
                print("Performance on test set worsened to " +
                      str(curr_loss_test))

        except Exception as e:
            print(e)
            break

    writer.flush()
    writer.close()
    coord.request_stop()
    sess.close()
    tf.reset_default_graph()

    print("TRAINING FINISHED - TESTING NOW AVAILABLE WITH BEST MODEL " +
          best_model_path)
    return best_model_path, epoch, best_loss, best_loss_test
Example #42
0
 def t0(self, value):
     assert Utils.isScalar(value), 't0 must be a scalar'
     del self.timeMesh
     self._t0 = float(value)
    def process():
        print(INFO + 'Processing Start.')
        # original_data_dir = r'/Users/Yuseng/Downloads/Deep-Learning-For-Computer-Vision-master/datasets/animals'
        # original_data_dir = r'/Users/Yuseng/Downloads/all/train'
        original_data_dir = r'/home/bigdata/Documents/DeepLearningProject/CatVsDog/train'
        # original_data_dir = r'/Users/zzc20160628-14/Downloads/cat_dog_data/train'
        # original_data_dir = r'/home/ubuntu/DeepLearningProject/data/train'
        base_dir = './cat_and_dog_small'
        ut.ifNoneCreateDirs(base_dir)

        train_dir = os.path.join(base_dir, 'train')
        ut.ifNoneCreateDirs(train_dir)

        val_dir = os.path.join(base_dir, 'validation')
        ut.ifNoneCreateDirs(val_dir)

        test_dir = os.path.join(base_dir, 'test')
        ut.ifNoneCreateDirs(test_dir)

        cat_train_dir = os.path.join(train_dir, 'cat')
        ut.ifNoneCreateDirs(cat_train_dir)

        dog_train_dir = os.path.join(train_dir, 'dog')
        ut.ifNoneCreateDirs(dog_train_dir)

        cat_val_dir = os.path.join(val_dir, 'cat')
        ut.ifNoneCreateDirs(cat_val_dir)

        dog_val_dir = os.path.join(val_dir, 'dog')
        ut.ifNoneCreateDirs(dog_val_dir)

        cat_test_dir = os.path.join(test_dir, 'cat')
        ut.ifNoneCreateDirs(cat_test_dir)

        dog_test_dir = os.path.join(test_dir, 'dog')
        ut.ifNoneCreateDirs(dog_test_dir)

        fnames = ['cat.{}.jpg'.format(i) for i in range(1000)]
        for name in fnames:
            src = os.path.join(original_data_dir, name)
            if not os.path.exists(src):
                raise FileNotFoundError
            dst = os.path.join(cat_train_dir, name)
            if os.path.exists(dst):
                continue
            shutil.copy(src=src, dst=dst)

        fnames = ['cat.{}.jpg'.format(i) for i in range(1000, 1500)]
        for name in fnames:
            src = os.path.join(original_data_dir, name)
            if not os.path.exists(src):
                raise FileNotFoundError
            dst = os.path.join(cat_val_dir, name)
            if os.path.exists(dst):
                continue
            shutil.copy(src=src, dst=dst)

        fnames = ['cat.{}.jpg'.format(i) for i in range(1500, 2000)]
        for name in fnames:
            src = os.path.join(original_data_dir, name)
            if not os.path.exists(src):
                raise FileNotFoundError
            dst = os.path.join(cat_test_dir, name)
            if os.path.exists(dst):
                continue
            shutil.copy(src=src, dst=dst)

        fnames = ['dog.{}.jpg'.format(i) for i in range(1000)]
        for name in fnames:
            src = os.path.join(original_data_dir, name)
            if not os.path.exists(src):
                raise FileNotFoundError
            dst = os.path.join(dog_train_dir, name)
            if os.path.exists(dst):
                continue
            shutil.copy(src=src, dst=dst)

        fnames = ['dog.{}.jpg'.format(i) for i in range(1000, 1500)]
        for name in fnames:
            src = os.path.join(original_data_dir, name)
            if not os.path.exists(src):
                raise FileNotFoundError
            dst = os.path.join(dog_val_dir, name)
            if os.path.exists(dst):
                continue
            shutil.copy(src=src, dst=dst)

        fnames = ['dog.{}.jpg'.format(i) for i in range(1500, 2000)]
        for name in fnames:
            src = os.path.join(original_data_dir, name)
            if not os.path.exists(src):
                raise FileNotFoundError
            dst = os.path.join(dog_test_dir, name)
            if os.path.exists(dst):
                continue
            shutil.copy(src=src, dst=dst)

        print(INFO + 'Processing End.')
        return train_dir, val_dir, test_dir
Example #44
0
PythonRequirements.Validate()

if platform.system() == "Windows":
    from SetupVulkan import VulkanConfiguration as VulkanRequirements

    VulkanRequirements.Validate()

from SetupPremake import PremakeConfiguration as PremakeRequirements

os.chdir('./../')  # Change from devtools/scripts directory to root

premakeInstalled = PremakeRequirements.Validate()
print("\nUpdating submodules...")
subprocess.call(["git", "submodule", "update", "--init", "--recursive"])

Utils.CopyContents("scripts/res/glfw-premake5.lua",
                   "Hazard/vendor/glfw/premake5.lua")
Utils.CopyContents("scripts/res/yaml-premake5.lua",
                   "Hazard/vendor/yaml-cpp/premake5.lua")

if platform.system() == "Darwin":
    print("\nDownloading ShaderC")
    Utils.DownloadFile(
        "https://github.com/google/shaderc/archive/refs/heads/main.zip",
        "Hazard/vendor/shaderc.zip")
    Utils.UnzipFile("Hazard/vendor/shaderc.zip", True)
    if not path.exists("Hazard/vendor/shaderc"):
        os.mkdir("Hazard/vendor/shaderc")
        os.rename("Hazard/vendor/shaderc-main/libshaderc/include",
                  "Hazard/vendor/shaderc/include")

if (premakeInstalled):
Example #45
0
 def updatePos(self, pos):
     self.pos = pos
     self.rect.center = Utils.posToTileCenter(self.pos)
Example #46
0
    def __init__(self,
                 layers,
                 decoder,
                 output_size=None,
                 in_channels=3,
                 pretrained=True,
                 prediction_size=None,
                 training_stage=1):
        #check implementation details in original paper to make sense of training stage
        super(MyModel, self).__init__()
        bs = 1
        self.training_stage = training_stage
        if prediction_size is None:
            self.prediction_size = (512, 1024)
            self.cube_size = (256, 256)
        else:
            self.prediction_size = prediction_size
            d_factor = 1024 // prediction_size[1]
            self.cube_size = (256 // d_factor, 256 // d_factor)
        self.equi_model = fusion_ResNet(bs,
                                        layers,
                                        decoder,
                                        self.prediction_size,
                                        3,
                                        pretrained,
                                        padding='ZeroPad')
        self.cube_model = fusion_ResNet(bs * 6,
                                        layers,
                                        decoder,
                                        self.cube_size,
                                        3,
                                        pretrained,
                                        padding='SpherePad')

        if self.training_stage == 3:
            self.refine_model = Refine(prediction_size)

        if layers <= 34:
            num_channels = 512
        elif layers >= 50:
            num_channels = 2048

        self.equi_decoder = choose_decoder(decoder,
                                           num_channels // 2,
                                           padding='ZeroPad')
        self.equi_conv3 = nn.Sequential(
            nn.Conv2d(num_channels // 32,
                      1,
                      kernel_size=3,
                      stride=1,
                      padding=1,
                      bias=False),
            nn.UpsamplingBilinear2d(size=self.prediction_size))
        self.cube_decoder = choose_decoder(decoder,
                                           num_channels // 2,
                                           padding='SpherePad')
        mypad = getattr(Utils.CubePad, 'SpherePad')
        self.cube_conv3 = nn.Sequential(
            mypad(1),
            nn.Conv2d(num_channels // 32,
                      1,
                      kernel_size=3,
                      stride=1,
                      padding=0,
                      bias=False),
            nn.UpsamplingBilinear2d(size=self.cube_size))

        self.equi_decoder.apply(weights_init)
        self.equi_conv3.apply(weights_init)
        self.cube_decoder.apply(weights_init)
        self.cube_conv3.apply(weights_init)

        self.ce = CETransform()

        if layers <= 34:
            ch_lst = [64, 64, 128, 256, 512, 256, 128, 64, 32]
        else:
            ch_lst = [64, 256, 512, 1024, 2048, 1024, 512, 256, 128]

        self.conv_e2c = nn.ModuleList([])
        self.conv_c2e = nn.ModuleList([])
        self.conv_mask = nn.ModuleList([])
        for i in range(9):
            conv_c2e = nn.Sequential(
                nn.Conv2d(ch_lst[i], ch_lst[i], kernel_size=3, padding=1),
                nn.ReLU(inplace=True))
            conv_e2c = nn.Sequential(
                nn.Conv2d(ch_lst[i], ch_lst[i], kernel_size=3, padding=1),
                nn.ReLU(inplace=True))
            conv_mask = nn.Sequential(
                nn.Conv2d(ch_lst[i] * 2, 1, kernel_size=1, padding=0),
                nn.Sigmoid())
            self.conv_e2c.append(conv_e2c)
            self.conv_c2e.append(conv_c2e)
            self.conv_mask.append(conv_mask)

        #self.grid = Utils.Equirec2Cube(None, 512, 1024, 256, 90).GetGrid()
        self.grid = Utils.Equirec2Cube(None, self.prediction_size[0],
                                       self.prediction_size[1],
                                       self.cube_size[0], 90).GetGrid()
        self.d2p = Utils.Depth2Points(self.grid)
Example #47
0
 def test_lookfor_next_bounded_in_db_no_args(self):
 #answer should be a dictionary of info about bb or an error string if no bb found
     answer = Utils.lookfor_next_bounded_in_db()
     print('answer from lookfor_next_bounded_in_db_no_args:'+str(answer))
     self.assertTrue(isinstance(answer, dict) or isinstance(answer, basestring))
Example #48
0
    parser.add_argument('--s3_directory',
                        required=True,
                        help='S3 Directory to save the file.')
    parser.add_argument('--publish_path',
                        required=True,
                        help='Publish folder to save final data.')
    parser.add_argument('--aws_region',
                        required=False,
                        help='AWS region to upload.',
                        choices=[Common.AWS_REGION, Common.AWS_KOREA_REGION],
                        default=Common.AWS_REGION)
    parser.add_argument('--log_level',
                        help='Log Output Level',
                        choices=['DEBUG', 'INFO', 'ERROR'],
                        default='DEBUG')
    args = parser.parse_args()
    return args


if __name__ == '__main__':
    args = parse_args()
    log_level = args.log_level

    Utils.log_configuration(log_level)
    logging.info('Input Parameters: {0}'.format(args))

    publish_path = args.publish_path
    s3_directory = args.s3_directory.strip('\/')
    aws_region = args.aws_region

    S3Operator.upload_to_s3_by_sync(publish_path, s3_directory, aws_region)
Example #49
0
 def getReferenceLicense(self, license):
     key = Utils.removeDiacritic(license).upper()
     return self.aliasLicenseLookup.get(key, key)
Example #50
0
 def test_lookfor_next_bounded_in_db_bad_args(self):
 #answer should be a dictionary of info about bb or an error string if no bb found
     answer = Utils.lookfor_next_bounded_in_db(current_item="1", current_image="2",only_get_boxed_images=True)
     print('answer from lookfor_next_bounded_in_db:'+str(answer))
     self.assertTrue(isinstance(answer, dict) or isinstance(answer, basestring))
Example #51
0
def load_dex_file() -> Element:
    return Utils.parse_html(os.path.join(HTML_DIR, DEX_FILE))
Example #52
0
 def getReferenceTeam(self, team):
     key = Utils.removeDiacritic(team).upper()
     return self.aliasTeamLookup.get(key, team)
    htmlDoc = pdfToHtml.OpenHtmlDoc(doc)
    if htmlDoc is None:
        raise Exception('Unable to open html doc : ' + pdfix.GetError())

    # convert all pages at once
    if not htmlDoc.Save(save_path, html_params, 0, None):
        raise Exception('Unable to open html doc : ' + pdfix.GetError())

    doc.Close()
    pdfix.Destroy()


try:
    # pdfix initialization
    email = Utils.getEmail()  # email address
    licenseKey = Utils.getLicenseKey()  # license key
    cwd = os.getcwd() + "/"  # current working directory
    os.makedirs(cwd + 'output')

    # pdfix initialization
    Pdfix_init(cwd + Utils.getModuleName('pdfix'))
    PdfToHtml_init(cwd + Utils.getModuleName('pdf_to_html'))

    htmlParams = PdfHtmlParams()
    htmlParams.type = kPdfHtmlFixed
    htmlParams.flags |= kHtmlNoExternalCSS | kHtmlNoExternalJS | kHtmlNoExternalJS

    ConvertToHtml(email, licenseKey, cwd + 'resources/test.pdf',
                  cwd + 'output/index.html', cwd + 'resources/config.json',
                  htmlParams)
Example #54
0
def SAMBA_LIBRARY(bld,
                  libname,
                  source,
                  deps='',
                  public_deps='',
                  includes='',
                  public_headers=None,
                  public_headers_install=True,
                  header_path=None,
                  pc_files=None,
                  vnum=None,
                  soname=None,
                  cflags='',
                  ldflags='',
                  external_library=False,
                  realname=None,
                  autoproto=None,
                  autoproto_extra_source='',
                  group='main',
                  depends_on='',
                  local_include=True,
                  global_include=True,
                  vars=None,
                  subdir=None,
                  install_path=None,
                  install=True,
                  pyembed=False,
                  pyext=False,
                  target_type='LIBRARY',
                  bundled_extension=True,
                  link_name=None,
                  abi_directory=None,
                  abi_match=None,
                  hide_symbols=False,
                  manpages=None,
                  private_library=False,
                  grouping_library=False,
                  allow_undefined_symbols=False,
                  allow_warnings=True,
                  enabled=True):
    '''define a Samba library'''

    if LIB_MUST_BE_PRIVATE(bld, libname):
        private_library = True

    if not enabled:
        SET_TARGET_TYPE(bld, libname, 'DISABLED')
        return

    source = bld.EXPAND_VARIABLES(source, vars=vars)
    if subdir:
        source = bld.SUBDIR(subdir, source)

    # remember empty libraries, so we can strip the dependencies
    if ((source == '') or (source == [])) and deps == '' and public_deps == '':
        SET_TARGET_TYPE(bld, libname, 'EMPTY')
        return

    if BUILTIN_LIBRARY(bld, libname):
        obj_target = libname
    else:
        obj_target = libname + '.objlist'

    if group == 'libraries':
        subsystem_group = 'main'
    else:
        subsystem_group = group

    # first create a target for building the object files for this library
    # by separating in this way, we avoid recompiling the C files
    # separately for the install library and the build library
    bld.SAMBA_SUBSYSTEM(obj_target,
                        source=source,
                        deps=deps,
                        public_deps=public_deps,
                        includes=includes,
                        public_headers=public_headers,
                        public_headers_install=public_headers_install,
                        header_path=header_path,
                        cflags=cflags,
                        group=subsystem_group,
                        autoproto=autoproto,
                        autoproto_extra_source=autoproto_extra_source,
                        depends_on=depends_on,
                        hide_symbols=hide_symbols,
                        allow_warnings=allow_warnings,
                        pyembed=pyembed,
                        pyext=pyext,
                        local_include=local_include,
                        global_include=global_include)

    if BUILTIN_LIBRARY(bld, libname):
        return

    if not SET_TARGET_TYPE(bld, libname, target_type):
        return

    # the library itself will depend on that object target
    deps += ' ' + public_deps
    deps = TO_LIST(deps)
    deps.append(obj_target)

    realname = bld.map_shlib_extension(realname,
                                       python=(target_type == 'PYTHON'))
    link_name = bld.map_shlib_extension(link_name,
                                        python=(target_type == 'PYTHON'))

    # we don't want any public libraries without version numbers
    if (not private_library and target_type != 'PYTHON' and not realname):
        if vnum is None and soname is None:
            raise Utils.WafError("public library '%s' must have a vnum" %
                                 libname)
        if pc_files is None:
            raise Utils.WafError(
                "public library '%s' must have pkg-config file" % libname)
        if public_headers is None:
            raise Utils.WafError("public library '%s' must have header files" %
                                 libname)

    if target_type == 'PYTHON' or realname or not private_library:
        bundled_name = libname.replace('_', '-')
    else:
        bundled_name = PRIVATE_NAME(bld, libname, bundled_extension,
                                    private_library)

    ldflags = TO_LIST(ldflags)

    features = 'cc cshlib symlink_lib install_lib'
    if pyext:
        features += ' pyext'
    if pyembed:
        features += ' pyembed'

    if abi_directory:
        features += ' abi_check'

    vscript = None
    if bld.env.HAVE_LD_VERSION_SCRIPT:
        if private_library:
            version = "%s_%s" % (Utils.g_module.APPNAME,
                                 Utils.g_module.VERSION)
        elif vnum:
            version = "%s_%s" % (libname, vnum)
        else:
            version = None
        if version:
            vscript = "%s.vscript" % libname
            bld.ABI_VSCRIPT(libname, abi_directory, version, vscript,
                            abi_match)
            fullname = apply_pattern(bundled_name, bld.env.shlib_PATTERN)
            fullpath = bld.path.find_or_declare(fullname)
            vscriptpath = bld.path.find_or_declare(vscript)
            if not fullpath:
                raise Utils.WafError("unable to find fullpath for %s" %
                                     fullname)
            if not vscriptpath:
                raise Utils.WafError("unable to find vscript path for %s" %
                                     vscript)
            bld.add_manual_dependency(fullpath, vscriptpath)
            if Options.is_install:
                # also make the .inst file depend on the vscript
                instname = apply_pattern(bundled_name + '.inst',
                                         bld.env.shlib_PATTERN)
                bld.add_manual_dependency(bld.path.find_or_declare(instname),
                                          bld.path.find_or_declare(vscript))
            vscript = os.path.join(bld.path.abspath(bld.env), vscript)

    bld.SET_BUILD_GROUP(group)
    t = bld(features=features,
            source=[],
            target=bundled_name,
            depends_on=depends_on,
            samba_ldflags=ldflags,
            samba_deps=deps,
            samba_includes=includes,
            version_script=vscript,
            local_include=local_include,
            global_include=global_include,
            vnum=vnum,
            soname=soname,
            install_path=None,
            samba_inst_path=install_path,
            name=libname,
            samba_realname=realname,
            samba_install=install,
            abi_directory="%s/%s" % (bld.path.abspath(), abi_directory),
            abi_match=abi_match,
            private_library=private_library,
            grouping_library=grouping_library,
            allow_undefined_symbols=allow_undefined_symbols)

    if realname and not link_name:
        link_name = 'shared/%s' % realname

    if link_name:
        t.link_name = link_name

    if pc_files is not None and not private_library:
        bld.PKG_CONFIG_FILES(pc_files, vnum=vnum)

    if (manpages is not None and 'XSLTPROC_MANPAGES' in bld.env
            and bld.env['XSLTPROC_MANPAGES']):
        bld.MANPAGES(manpages, install)
Example #55
0
def manage_getUsersToConvert(self, withpasswords=False):
    """ find all the users in the acl_users folder here, and
    try to find a suitable name and email address. """
    if not 'acl_users' in self.objectIds('User Folder'):
        # just double checking that we have a old user folder here
        return []
    
    old_user_folder = self.acl_users
    old_users = []

    issuetrackers = _find_issuetrackers(self)
    if self.meta_type == ISSUETRACKER_METATYPE:
        if self not in issuetrackers:
            issuetrackers.append(self)
            
    acl_cookienames = acl_cookieemails = {}
    for issuetracker in issuetrackers:
        _cookienames = issuetracker.getACLCookieNames()
        
        if _cookienames:
            acl_cookienames = _merge_dicts_nicely(acl_cookienames, _cookienames)

        _cookieemails = issuetracker.getACLCookieEmails()
        if _cookieemails:
            acl_cookieemails = _merge_dicts_nicely(acl_cookieemails, _cookieemails)
        
    for user in old_user_folder.getUsers():
        fullname = acl_cookienames.get(str(user), [])
        email = acl_cookieemails.get(str(user),[])
        
        if not fullname and email:
            _email1 = email[0].split('@')[0]
            if len(_email1.split('.'))>1:
                fullname = [x.capitalize() \
                             for x in _email1.split('.')]
                fullname = ' '.join(fullname)
                
            elif len(_email1.split('_'))>1:
                fullname = [x.capitalize() \
                            for x in _email1.split('_')]
                fullname = ' '.join(fullname)
                
            else:
                fullname = str(user).capitalize()

        d = {'username':str(user),
             'domains':user.domains,
             'roles':user.roles,
             'fullname':fullname,
             'email':email}
        
        if email and email[0] and Utils.ValidEmailAddress(email[0]):
            d['invalid_email'] = False
        else:
            d['invalid_email'] = True
            
        if withpasswords:
            d['__'] = user.__
        old_users.append(d)
        
    return old_users
Example #56
0
import Utils

INPUT_DATA = Utils.GetChallengeInput(3)

COUNT_DICT = {}
for elt in INPUT_DATA:
    # Fancy, Pythonic upsert. Returns count, if key exists, or defaults to 0 if key not found
    # Then increments in either case
    COUNT_DICT[elt] = COUNT_DICT.setdefault(elt, 0) + 1

ANSWER = ""
# TODO: clean this up as well. Dictionary Comprehensions look promising
for entry in COUNT_DICT:
    print("{0}: {1}".format(entry, COUNT_DICT[entry]))
    if COUNT_DICT[entry] == 1:
        ANSWER += entry

Utils.SubmitChallengeAnswer(ANSWER)
import traceback
from datetime import datetime, time

import Utils as util

kite = util.intialize_kite_api()

testing = False

special_target = 6000
special_target_stock_id = 'BHEL'

START_TIME_FOR_BREAKEVEN = time(9, 15, 40, 1)

while datetime.now().time() < util.MARKET_START_TIME and testing is False:
    pass

parent_orders = {}
while True:
    try:
        orders = kite.orders()
        positions = kite.positions()['day']

        for order in orders:
            if order['parent_order_id'] is None and order[
                    'tradingsymbol'].upper() == special_target_stock_id and (
                        order['status'].upper() == 'COMPLETE'
                        or order['status'].upper() == 'OPEN'):
                parent_orders[order['order_id']] = order

        if len(parent_orders) == 0:
Example #58
0
    def manage_sendReminder(self, name, email_from, email_subject,
                            remindertext):
        """ actually send the password reminder """
        try:
            user = self.getUser(name)
        except:
            return MessageDialog(
                            title  ='Illegal value',
                            message='The specified user does not exist',
                            action ='manage_main')
                            
        issuetrackerroot = self.getIssueTrackerRoot()
        
        if not email_from:
            raise "NoEmailFromError", "You must specify a from email address"
        elif not self.webmaster_email:
            self.webmaster_email = email_from

        email_to = user.getEmail()
        if not email_to or email_to and not Utils.ValidEmailAddress(email_to):
            raise "NoEmailToError", "User does not have a valid email address"
            
            
        replacement_key = "<password shown here>"
        if remindertext.find(replacement_key) == -1:
            raise "NoPasswordReplacementError",\
                  "No place to put the password reminder"
                  
        if self.encrypt_passwords:
            # generate a new password and save it
            password = Utils.getRandomString(length=6, loweronly=1)
            user.__ = password
        
        else:
            password = user.__
        
        if not email_subject:
            email_subject = "Issue Tracker password reminder"
        
        remindertext = remindertext.replace(replacement_key, password)
        
        # send it!
        

        if issuetrackerroot:
            # send via the issuetracker
            issuetrackerroot.sendEmail(remindertext, email_to, email_from, 
               email_subject, swallowerrors=False)
        else:
            body = '\r\n'.join(['From: %s'%email_from, 'To: %s'%email_to,
                                'Subject: %s'%email_subject, "", remindertext])
                            
            # Expect a mailhost object. Note that here we're outside the Issuetracker
            try:
                mailhost = self.MailHost
            except:
                try:
                    mailhost = self.SecureMailHost
                except:
                    try:
                        mailhost = self.superValues('MailHost')[0]
                    except IndexError:
                        raise "NoMailHostError", "No 'MailHost' available to send from"
            if hasattr(mailhost, 'secureSend'):
                mailhost.secureSend(remindertext, email_to, email_from, email_subject)
            else:
                mailhost.send(body, email_to, email_from, email_subject)
            
        m = "Password reminder sent to %s" % email_to
        return self.manage_main(self, self.REQUEST, manage_tabs_message=m)
Example #59
0
# coding=utf-8
import Utils as ut
from pandas import ExcelWriter

file = ExcelWriter('perf.xlsx')
a = ut.getPerf(['2017-01-03', '2018-01-02'])
a.to_excel(file)
    def __init__(self,
                 image_size,
                 category_size,
                 feature_size=0,
                 predict_ckpt=None,
                 gap_layer=False):
        self.x = tf.placeholder(tf.float32,
                                shape=[None, image_size, image_size, 3])
        self.y_true = tf.placeholder(tf.float32, shape=[None, category_size])
        if feature_size != 0:
            self.x_feat = tf.placeholder(tf.float32,
                                         shape=[None, feature_size])
        self.hold_prob = tf.placeholder(tf.float32)

        self.feature_size = feature_size

        utils.get_model_data(MODEL_DIR, MODEL_URL)

        #		transfer learning from MobilenetV1
        self.mobilenet_net = self.get_mobile_net(self.x,
                                                 final_endpoint="Conv2d_11")
        variable_to_restore = [
            v for v in slim.get_variables_to_restore()
            if v.name.split('/')[0] == 'MobilenetV1'
        ]
        #       shape of mobilenet_net: (?, 14, 14, 512)

        #       self.size = (int)(image_size/4)
        #       convo_2_flat = tf.reshape(convo_2_pooling, [-1, self.size*self.size*64])
        self.size = 14 * 14 * 512
        self.gap_layer = gap_layer

        if self.gap_layer:
            # GAP layer
            self.gap_weight = tf.Variable(
                tf.random_normal([512, len(TEETH_PART_LIST)]))
            self.gap_bias = tf.Variable(
                tf.random_normal([len(TEETH_PART_LIST)]))
            self.y_pred = self.gap_out_layer(self.mobilenet_net,
                                             self.gap_weight, self.gap_bias)
        else:
            self.convo_2_flat = tf.reshape(self.mobilenet_net, [-1, self.size])

            if feature_size != 0:
                self.convo_2_flat = tf.concat([self.convo_2_flat, self.x_feat],
                                              1)

            self.full_layer_one = tf.nn.relu(
                self.normal_full_layer(self.convo_2_flat, 1024))
            self.full_one_dropout = tf.nn.dropout(self.full_layer_one,
                                                  keep_prob=self.hold_prob)

            self.y_pred = self.normal_full_layer(self.full_one_dropout,
                                                 category_size)

        self.sess = tf.Session()

        if predict_ckpt:
            print('=====> predict_ckpt = ', predict_ckpt)
            self.saver = tf.train.Saver()
            self.saver.restore(self.sess, predict_ckpt)

            self.position = tf.argmax(self.y_pred, 1)
            if self.gap_layer:
                self.classmaps = self.generate_heatmap(self.mobilenet_net,
                                                       self.position,
                                                       self.gap_weight,
                                                       image_size)

        else:
            self.cross_entropy = tf.reduce_mean(
                tf.nn.softmax_cross_entropy_with_logits(labels=self.y_true,
                                                        logits=self.y_pred))
            self.optimizer = tf.train.AdamOptimizer(
                learning_rate=LEARNING_RATE)
            self.train_op = self.optimizer.minimize(self.cross_entropy)

            self.matches = tf.equal(tf.argmax(self.y_pred, 1),
                                    tf.argmax(self.y_true, 1))
            self.acc = tf.reduce_mean(tf.cast(self.matches, tf.float32))

            self.c_matrix = tf.confusion_matrix(tf.argmax(self.y_pred, 1),
                                                tf.argmax(self.y_true, 1))

            self.sess.run(tf.global_variables_initializer())

            #       restore pre-train mobilenet
            if g_pretrain_ckpt == None:
                self.saver = tf.train.Saver(variable_to_restore)
                self.saver.restore(self.sess,
                                   'Model_zoo/mobilenet_v1_1.0_224.ckpt')
            else:
                self.saver = tf.train.Saver()
                self.saver.restore(
                    self.sess,
                    'pretrain_model/model.ckpt-' + str(g_pretrain_ckpt))

        self.print_params()

        return