def test_scoretools_move_full_measure_tuplet_prolation_to_measure_time_signature_04(): r'''Subsume tuplet in nonassignable measure. ''' tuplet = scoretools.FixedDurationTuplet(Duration(5, 8), []) tuplet.extend("c'8 d'8 e'8 f'8 g'8 a'8") measure = Measure((5, 8), [tuplet]) scoretools.move_full_measure_tuplet_prolation_to_measure_time_signature( measure) assert format(measure) == stringtools.normalize( r''' { \time 15/24 \scaleDurations #'(2 . 3) { c'8 ~ c'32 d'8 ~ d'32 e'8 ~ e'32 f'8 ~ f'32 g'8 ~ g'32 a'8 ~ a'32 } } ''' ), format(measure) assert inspect_(measure).is_well_formed()
def edit_plan(request, access_token, planid): result = login_auth(access_token) if result['err']['code'] != 0: return HttpResponse(json.dumps(result)) userid = result['data']['id'] try: plan = Plan.objects.get(id__exact=planid) user = FBUser.objects.get(fbid=userid) PrivatePlan.objects.filter(accessible_plan=plan).delete() plan.holder = user plan.title = request.POST.get('title', plan.title) plan.destination = request.POST.get('destination', plan.destination) plan.description = request.POST.get('description', plan.description) plan.depart_time = request.POST.get('depart_time', plan.depart_time) plan.length = request.POST.get('length', plan.length) plan.limit = request.POST.get('limit', plan.limit) visible_type = request.POST.get('visible_type', plan.visible_type) plan.visible_type = int(visible_type) friend_list = request.POST.getlist('friendlist', []) plan.full_clean() plan.save() if plan.visible_type == 3: for friendid in friend_list: friend = FBUser.objects.get(fbid=friendid) private = PrivatePlan() private.accessible_user = friend private.accessible_plan = plan private.full_clean() private.save() result = format(0, 'edit success') return HttpResponse(json.dumps(result)) except Exception as e: result = format(400, str(e)) return HttpResponse(json.dumps(result))
def size(self, irc, msg, args, url): """<url> Returns the Content-Length header of <url>. Only HTTP urls are valid, of course. """ if not self._checkURLWhitelist(url): irc.error("This url is not on the whitelist.") return fd = utils.web.getUrlFd(url) try: try: size = fd.headers["Content-Length"] irc.reply(format(_("%u is %S long."), url, int(size))) except KeyError: size = conf.supybot.protocols.http.peekSize() s = fd.read(size) if len(s) != size: irc.reply(format(_("%u is %S long."), url, len(s))) else: irc.reply( format(_("The server didn't tell me how long %u " "is but it's longer than %S."), url, size) ) finally: fd.close()
def _formatPrivmsg(self, nick, network, msg): channel = msg.args[0] if self.registryValue('includeNetwork', channel): network = '@' + network else: network = '' # colorize nicks color = self.registryValue('color', channel) # Also used further down. if color: nick = ircutils.IrcString(nick) newnick = ircutils.mircColor(nick, *ircutils.canonicalColor(nick)) colors = ircutils.canonicalColor(nick, shift=4) nick = newnick if ircmsgs.isAction(msg): if color: t = ircutils.mircColor('*', *colors) else: t = '*' s = format('%s %s%s %s', t, nick, network, ircmsgs.unAction(msg)) else: if color: lt = ircutils.mircColor('<', *colors) gt = ircutils.mircColor('>', *colors) else: lt = '<' gt = '>' s = format('%s%s%s%s %s', lt, nick, network, gt, msg.args[1]) return s
def add_plan(request, access_token): result = login_auth(access_token) if result['err']['code'] != 0: return HttpResponse(json.dumps(result)) userid = result['data']['id'] try: new_plan = Plan() user = FBUser.objects.get(fbid=userid) new_plan.holder = user new_plan.title = request.POST.get('title', "testtitle") new_plan.destination = request.POST.get('destination', "testdestination") new_plan.description = request.POST.get('description', "testdescription") new_plan.depart_time = request.POST.get('depart_time', datetime.today()) new_plan.length = request.POST.get('length', 2) new_plan.limit = request.POST.get('limit', 2) visible_type = request.POST.get('visible_type', 1) new_plan.visible_type = int(visible_type) friend_list = request.POST.getlist('friendlist',[]) new_plan.full_clean() new_plan.save() if new_plan.visible_type == 3: for friendid in friend_list: friend = FBUser.objects.get(fbid=friendid) private = PrivatePlan() private.accessible_user = friend private.accessible_plan = new_plan private.full_clean() private.save() result = format(0, 'create success') return HttpResponse(json.dumps(result)) except Exception as e: result = format(400, str(e)) return HttpResponse(json.dumps(result))
def get_case_number(court, case_count, charge_count): case_numbers = [] prefix = 'CR14' if court in courts_without_year_in_prefix: prefix = 'CR00' case = format(case_count, '06') if court in courts_with_continuing_case_numbers: case = format(0, '06') case_number_spans = courts_with_continuing_case_numbers[court] for span in case_number_spans: if case_count + span[0] <= span[1]: case = format(case_count + span[0], '06') break else: case_count -= span[1] - span[0] + 1 charge = '-' + format(charge_count, '02') if court in courts_with_fm_case_number: case_length = courts_with_fm_case_number[court] case = format(case_count, '0' + case_length) case_f = str('F' + case).zfill(6) case_m = str('M' + case).zfill(6) case_numbers.append(prefix + case_f + charge) case_numbers.append(prefix + case_m + charge) else: case_numbers.append(prefix + case + charge) return case_numbers
def ReadDateTime(self, DateTime): # Initiate DS1302 communication. self.InitiateDS1302() # Write address byte. self.WriteByte(int("10111111", 2)) # Read date and time data. Data = "" Byte = self.ReadByte() DateTime["Second"] = operator.mod(Byte, 16) + operator.div(Byte, 16) * 10 Byte = self.ReadByte() DateTime["Minute"] = operator.mod(Byte, 16) + operator.div(Byte, 16) * 10 Byte = self.ReadByte() DateTime["Hour"] = operator.mod(Byte, 16) + operator.div(Byte, 16) * 10 Byte = self.ReadByte() DateTime["Day"] = operator.mod(Byte, 16) + operator.div(Byte, 16) * 10 Byte = self.ReadByte() DateTime["Month"] = operator.mod(Byte, 16) + operator.div(Byte, 16) * 10 Byte = self.ReadByte() DateTime["DayOfWeek"] = (operator.mod(Byte, 16) + operator.div(Byte, 16) * 10) - 1 Byte = self.ReadByte() DateTime["Year"] = operator.mod(Byte, 16) + operator.div(Byte, 16) * 10 Data = self.DOW[DateTime["DayOfWeek"]] + " " + format(DateTime["Year"] + 2000, "04d") + "-" + format(DateTime["Month"], "02d") + "-" + format(DateTime["Day"], "02d") Data += " " + format(DateTime["Hour"], "02d") + ":" + format(DateTime["Minute"], "02d") + ":" + format(DateTime["Second"], "02d") # End DS1302 communication. self.EndDS1302() return Data
def _seen(self, irc, channel, name, any=False): if any: db = self.anydb else: db = self.db try: results = [] if '*' in name: results = db.seenWildcard(channel, name) else: results = [[name, db.seen(channel, name)]] if len(results) == 1: (nick, info) = results[0] (when, said) = info irc.reply(format('%s was last seen in %s %s ago: %s', nick, channel, utils.timeElapsed(time.time()-when), said)) elif len(results) > 1: L = [] for (nick, info) in results: (when, said) = info L.append(format('%s (%s ago)', nick, utils.timeElapsed(time.time()-when))) irc.reply(format('%s could be %L', name, (L, 'or'))) else: irc.reply(format('I haven\'t seen anyone matching %s.', name)) except KeyError: irc.reply(format('I have not seen %s.', name))
def disp_results(fig, ax1, ax2, loss_iterations, losses, accuracy_iterations, accuracies, accuracies_iteration_checkpoints_ind, fileName, color_ind=0): modula = len(plt.rcParams['axes.color_cycle']) acrIterations =[] top_acrs={} if accuracies.size: if accuracies.size>4: top_n = 4 else: top_n = accuracies.size -1 temp = np.argpartition(-accuracies, top_n) result_indexces = temp[:top_n] temp = np.partition(-accuracies, top_n) result = -temp[:top_n] for acr in result_indexces: acrIterations.append(accuracy_iterations[acr]) top_acrs[str(accuracy_iterations[acr])]=str(accuracies[acr]) sorted_top4 = sorted(top_acrs.items(), key=operator.itemgetter(1)) maxAcc = np.amax(accuracies, axis=0) iterIndx = np.argmax(accuracies) maxAccIter = accuracy_iterations[iterIndx] maxIter = accuracy_iterations[-1] consoleInfo = format('\n[%s]:maximum accuracy [from 0 to %s ] = [Iteration %s]: %s ' %(fileName,maxIter,maxAccIter ,maxAcc)) plotTitle = format('max accuracy(%s) [Iteration %s]: %s ' % (fileName,maxAccIter, maxAcc)) print (consoleInfo) #print (str(result)) #print(acrIterations) # print 'Top 4 accuracies:' print ('Top 4 accuracies:'+str(sorted_top4)) plt.title(plotTitle) ax1.plot(loss_iterations, losses, color=plt.rcParams['axes.color_cycle'][(color_ind * 2 + 0) % modula]) ax2.plot(accuracy_iterations, accuracies, plt.rcParams['axes.color_cycle'][(color_ind * 2 + 1) % modula], label=str(fileName)) ax2.plot(accuracy_iterations[accuracies_iteration_checkpoints_ind], accuracies[accuracies_iteration_checkpoints_ind], 'o', color=plt.rcParams['axes.color_cycle'][(color_ind * 2 + 1) % modula]) plt.legend(loc='lower right')
def list_entries( self ): """ Tries to list all entries in the table. Returns ( <Result>, [<Normal entries>], [<Deleted entries>] ) where <Result> is either IBLT.RESULT_LIST_ENTRIES_COMPLETE to indicate that the list is complete, or IBLT.RESULT_LIST_ENTRIES_INCOMPLETE to indicate that some entries couldn't be recovered """ dummy = IBLT(self.m, self.k, self.T) entries = [] deleted_entries = [] while True: for i in range( len( dummy.T ) ): entry = dummy.T[i] if entry[0] == 1 or entry[0] == -1: retrieved_tup = (format(entry[1], SIZE_KEY), format(entry[2], SIZE_VAL)) hashed_key = int(md5(retrieved_tup[0]), 16) if entry[0] == 1 and entry[3] == hashed_key: entries.append(retrieved_tup) dummy.delete(retrieved_tup) break elif entry[0] == -1 and entry[3] == hashed_key: deleted_entries.append(retrieved_tup) dummy.insert(retrieved_tup) break else : break if not dummy.is_empty() : return ( IBLT.RESULT_LIST_ENTRIES_INCOMPLETE, entries, deleted_entries ) return ( IBLT.RESULT_LIST_ENTRIES_COMPLETE, entries, deleted_entries )
def handle(self): while True: try: data = self.request.recv(2*1024*1024) if not data: break #end data = json.loads(data) res = self.execute(data) logger.debug('instance count:' + str(len(self.rpc_instances.keys()))) res = json.dumps(res) res = str(len(res)).rjust(8, '0') + res self.request.send(res) except socket.timeout as err: res = ('error in RequestHandler :%s, res:%s' % (traceback.format_exc(), data)) logger.debug(res) res = json.dumps({'err':'sys.socket.error', 'msg':format(err)}) res = str(len(res)).rjust(8, '0') + res self.request.send(res) self.request.close() break except Exception as err: res = ('error in RequestHandler :%s, res:%s' % (traceback.format_exc(), data)) logger.debug(res) res = json.dumps({'err':'sys.socket.error', 'msg': format(err)}) res = str(len(res)).rjust(8, '0') + res self.request.send(res)
def printError( value, error = 0 , unit = '', relative = False ): ''' Prints error in a nice semiscientific way Input: value and error or uncertainties.value ''' not_scientific_exponents = [ -1, 0, 1, 2 ] # 30 looks nicer than 3e1 valid_digits = 2 # cast uncertainy in value ± error from uncertainties import AffineScalarFunc, Variable if type( value ) in [ Variable, AffineScalarFunc ]: error = value.std_dev() value = value.nominal_value exponent = int ( format ( error, 'e').split('e')[1] ) + 1 - valid_digits exponent_val = int ( format ( value, 'e').split('e')[1] ) value = float ( round ( value / 10**exponent ) ) * 10**exponent error = float ( round ( error / 10**exponent ) ) * 10**exponent valstring = " {0} ± {1} ".format( value, error ) if exponent_val in not_scientific_exponents: valstring = valstring + ' ' + unit else: valstring = "( {0} ± {1} ) \cdot 10^{{{2}}} {3}".format ( value/10**exponent_val, error/10**exponent_val, exponent_val, unit) if relative: valstring = valstring + ' relative Error: ' + str( 1. * error / value ) print( valstring ) return valstring
def to_regexps(cls, use, possibles, have_add=False): ''' Convert to regular expressions. `have_add` indicaes whether the caller can supply an "add". None - caller doesn't care what lower code needed. True - caller has add, and caller should need that. False - caller doesn't have add, and caller should not need it. ''' regexps = [] for possible in possibles: if isinstance(possible, RegexpContainer): cls.log.debug(format('unpacking: {0!s}', possible)) if have_add is None or possible.add_reqd == have_add: regexps.append(possible.regexp) # this flag indicates that it's "worth" using the regexp # so we "inherit" use = use or possible.use else: raise Unsuitable('Add inconsistent.') else: cls.log.debug(format('cannot unpack: {0!s}', possible.__class__)) raise Unsuitable('Not a container.') return (use, regexps)
def printDetrendedData(self): """ """ self.DetrendedDataPath = MainPath+self.name+'/TAP/' TTcount = 0 for TT in self.DetrendedData.keys(): if TT.startswith('T'): TTcount += 1 FileAllStacked = self.DetrendedDataPath+self.name+'.ALL.lcdtx' ALLOutFile = open(FileAllStacked,'w') NCount = None for TTnum in xrange(TTcount): TT = 'T'+str(TTnum+1) FileName = self.DetrendedDataPath+self.name+'.'+TT+'.lcdtx' LCOutFile = open(FileName,'w') for i in range(len(self.DetrendedData[TT]['x'])): timeStr0 = format(self.DetrendedData[TT]['x'][i],'.7f') #timeStr1 = format(self.DetrendedData[TT]['x'][i]-(P*(epoch-TTnum)),'.7f') FluxStr = format(self.DetrendedData[TT]['y'][i],'.12f') FluxStrErr = format(self.DetrendedData[TT]['yerr'][i],'.12f') lineStr0 = timeStr0+' '+FluxStr+' '+FluxStrErr lineStr1 = timeStr0+' '+FluxStr+' '+FluxStrErr print >> LCOutFile, lineStr0 print >> ALLOutFile, lineStr1 if TTnum != TTcount-1 : print >> ALLOutFile, '-1.0000000 -1.000000 -1.00000' ALLOutFile.close()
def fromDate(clazz, date): """ date may be a datetime.datetime instance, a POSIX timestamp (integer value, such as returned by time.time()), or an RFC 2068 Full Date (eg. "Mon, 23 May 2005 04:52:22 GMT") string. """ def format(date): # # FIXME: strftime() is subject to localization nonsense; we need to # ensure that we're using the correct localization, or don't use # strftime(). # return date.strftime("%a, %d %b %Y %H:%M:%S GMT") if type(date) is int: date = format(datetime.datetime.utcfromtimestamp(date)) elif type(date) is str: pass elif type(date) is unicode: date = date.encode("utf-8") elif isinstance(date, datetime.datetime): if date.tzinfo: raise NotImplementedError("I need to normalize to UTC") date = format(date) else: raise ValueError("Unknown date type: %r" % (date,)) return clazz(PCDATAElement(date))
def build_reaction_string(self, use_metabolite_names=False): """Generate a human readable reaction string""" def format(number): return "" if number == 1 else str(number).rstrip(".") + " " reactant_dict = {} product_dict = {} id_type = 'id' if use_metabolite_names: id_type = 'name' reactant_bits = [] product_bits = [] for the_metabolite, coefficient in iteritems(self._metabolites): name = str(getattr(the_metabolite, id_type)) if coefficient > 0: product_bits.append(format(coefficient) + name) else: reactant_bits.append(format(abs(coefficient)) + name) reaction_string = ' + '.join(reactant_bits) if not self.reversibility: if self.lower_bound < 0 and self.upper_bound <= 0: reaction_string += ' <-- ' else: reaction_string += ' --> ' else: reaction_string += ' <=> ' reaction_string += ' + '.join(product_bits) return reaction_string
def StepSizeFromExplore(self, FileList, StablePerc): """ """ step = {} for fileName in FileList: data = tmcmc.iopostmcmc.readMCMC(fileName) if data['acr'][-1] > 0.44-StablePerc \ and data['acr'][-1] < 0.44+StablePerc: for par in data.keys(): if not tmcmc.iopostmcmc.isNonParam(par): medfrac = np.median(data['frac'][-1000:]) print par+' has stabilized, acr = '+format(data['acr'][-1],'.2f')+\ ' frac = '+str(medfrac)+' | '+self.name+', '+self.case step[par] = {'frac':medfrac} else: for par in data.keys(): if not tmcmc.iopostmcmc.isNonParam(par): print par+' has NOT stabilized, acr = '+format(data['acr'][-1],'.2f')+\ ' | '+self.name+', '+self.case for par in step.keys(): for key in self.ModelParams.keys(): if key == par: self.ModelParams[par]['step'] = self.ModelParams[par]['step']*step[par]['frac'] self.ModelParams[par]['open'] = True
def test_extservice_services_stop_start_instance_validate(self, super_client, client): data = load(self) env = client.list_environment(uuid=data[0])[0] logger.info("env is: %s", format(env)) service = client.list_service(uuid=data[1])[0] assert len(service) > 0 logger.info("service is: %s", format(service)) ext_service = client.list_service(uuid=data[2])[0] assert len(ext_service) > 0 logger.info("ext_service is: %s", format(ext_service)) con_list = \ [client.list_container(uuid=con_uuid)[0] for con_uuid in data[3]] logger.info("con_list: %s", con_list) validate_external_service(super_client, service, [ext_service], self.port, con_list) con_list.append(env) delete_all(client, con_list)
def grafana(): import params #create pid dir Directory([params.grafana_pid_dir], owner=params.grafana_user, group=params.grafana_group, recursive=True ) Directory([params.conf_dir], owner=params.grafana_user, group=params.grafana_group, recursive=True ) #create config file File(format("{conf_dir}/grafana.ini"), owner=params.grafana_user, group=params.grafana_group, content=Template("grafana.ini.j2") ) File(format("{conf_dir}/ldap.toml"), owner=params.grafana_user, group=params.grafana_group, content=Template("ldap.toml.j2") ) File(format("/etc/default/grafana-server"), owner=params.grafana_user, group=params.grafana_group, content=Template("grafana-server.j2") )
def setup_java_patch(): import params if params.has_ranger_admin: setup_java_patch = format('ambari-python-wrap {kms_home}/db_setup.py -javapatch') env_dict = {'RANGER_KMS_HOME':params.kms_home, 'JAVA_HOME': params.java_home} if params.db_flavor.lower() == 'sqla': env_dict = {'RANGER_KMS_HOME':params.kms_home, 'JAVA_HOME': params.java_home, 'LD_LIBRARY_PATH':params.ld_library_path} Execute(setup_java_patch, environment=env_dict, logoutput=True, user=params.kms_user) kms_lib_path = format('{kms_home}/ews/webapp/lib/') files = os.listdir(kms_lib_path) hadoop_jar_files = [] for x in files: if x.startswith('hadoop-common') and x.endswith('.jar'): hadoop_jar_files.append(x) if len(hadoop_jar_files) != 0: for f in hadoop_jar_files: Execute((format('{java_home}/bin/jar'),'-uf', format('{kms_home}/ews/webapp/lib/{f}'), format('{kms_home}/ews/webapp/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory')), user=params.kms_user) File(format('{kms_home}/ews/webapp/lib/{f}'), owner=params.kms_user, group=params.kms_group)
def test_float__format__locale(self): # test locale support for __format__ code 'n' for i in range(-10, 10): x = 1234567890.0 * (10.0 ** i) self.assertEqual(locale.format('%g', x, grouping=True), format(x, 'n')) self.assertEqual(locale.format('%.10g', x, grouping=True), format(x, '.10n'))
def addr_convert(v_addr, mem, disk): bin_v_addr = format(int(v_addr, 16),'015b') pde_id = int(bin_v_addr[0:5], 2) pte_id = int(bin_v_addr[5:10], 2) offset = int(bin_v_addr[10:15], 2) base_addr = int("0xd80", 16) pde_addr = format(pde_id + base_addr,'012b') pfn = mem[int(pde_addr[0:7],2)][int(pde_addr[7:12],2)] pfn = format(int(pfn, 16), '08b') valid_pfn = pfn[0:1] print ("pde_index: "+ str(hex(pde_id))) print ("pde_content: "+ str(pfn)+ " valid: "+ str(valid_pfn)+" pfn: "+str(hex(int(pfn[1:8], 2)))) if( not eval(valid_pfn)): print("Fault!") return pte_base_addr = pfn[1:8] #print("pte_addr: "+ str(pte_addr)) #print(str(pte_base_addr)) ptn = mem[int(pte_base_addr,2)][pte_id] ptn = format(int(ptn, 16), '08b') valid_ptn = ptn[0:1] print ("pte_index: "+ str(hex(pte_id))) print ("pte_content: "+ str(ptn)+ " valid: "+ str(valid_ptn)+" ptn: "+str(hex(int(ptn[1:8], 2)))) if(not eval(valid_ptn)): print("Fault! Search in disk!") value = disk[int(ptn[1:8], 2)][offset] print ("--> value: 0x"+ str(value)) return value = mem[int(str(ptn[1:8]), 2)][offset] print ("--> value: 0x"+ str(value))
def channelstats(self, irc, msg, args, channel): """[<channel>] Returns the statistics for <channel>. <channel> is only necessary if the message isn't sent on the channel itself. """ try: stats = self.db.getChannelStats(channel) curUsers = len(irc.state.channels[channel].users) s = format('On %s there %h been %i messages, containing %i ' 'characters, %n, %n, and %n; ' '%i of those messages %s. There have been ' '%n, %n, %n, %n, %n, and %n. There %b currently %n ' 'and the channel has peaked at %n.', channel, stats.msgs, stats.msgs, stats.chars, (stats.words, 'word'), (stats.smileys, 'smiley'), (stats.frowns, 'frown'), stats.actions, stats.actions == 1 and 'was an ACTION' or 'were ACTIONs', (stats.joins, 'join'), (stats.parts, 'part'), (stats.quits, 'quit'), (stats.kicks, 'kick'), (stats.modes, 'mode', 'change'), (stats.topics, 'topic', 'change'), curUsers, (curUsers, 'user'), (stats.users, 'user')) irc.reply(s) except KeyError: irc.error(format('I\'ve never been on %s.', channel))
def test_LilyPondParser__spanners__PhrasingSlur_02(): """ Swapped start and stop. """ maker = abjad.NoteMaker() target = abjad.Container(maker([0] * 4, [(1, 4)])) abjad.phrasing_slur(target[2:]) abjad.phrasing_slur(target[:3]) assert format(target) == abjad.String.normalize( r""" { c'4 \( c'4 c'4 \) \( c'4 \) } """ ) string = r"\relative c' { c \( c c \( \) c \) }" parser = abjad.parser.LilyPondParser() result = parser(string) assert format(target) == format(result) and target is not result
def test_LilyPondParser__spanners__PhrasingSlur_01(): """ Successful slurs, showing single leaf overlap. """ maker = abjad.NoteMaker() target = abjad.Container(maker([0] * 4, [(1, 4)])) abjad.phrasing_slur(target[2:]) abjad.phrasing_slur(target[:3]) assert format(target) == abjad.String.normalize( r""" { c'4 \( c'4 c'4 \) \( c'4 \) } """ ) parser = abjad.parser.LilyPondParser() result = parser(format(target)) assert format(target) == format(result) and target is not result
def test_lilypondparsertools_LilyPondParser__spanners__Hairpin_01(): target = Staff(scoretools.make_notes([0] * 5, [(1, 4)])) hairpin = Hairpin(descriptor='<') attach(hairpin, target[:3]) hairpin = Hairpin(descriptor='>') attach(hairpin, target[2:]) dynamic = Dynamic('ppp') attach(dynamic, target[-1]) assert format(target) == stringtools.normalize( r''' \new Staff { c'4 \< c'4 c'4 \! \> c'4 c'4 \ppp } ''' ) parser = LilyPondParser() result = parser(format(target)) assert format(target) == format(result) and target is not result
def test_lilypondparsertools_LilyPondParser__spanners__Hairpin_03(): r'''Dynamics can terminate hairpins. ''' target = Staff(scoretools.make_notes([0] * 3, [(1, 4)])) hairpin = Hairpin(descriptor='<') attach(hairpin, target[0:2]) hairpin = Hairpin(descriptor='>') attach(hairpin, target[1:]) dynamic = Dynamic('p') attach(dynamic, target[1]) dynamic = Dynamic('f') attach(dynamic, target[-1]) assert format(target) == stringtools.normalize( r''' \new Staff { c'4 \< c'4 \p \> c'4 \f } ''' ) string = r"\new Staff \relative c' { c \< c \p \> c \f }" parser = LilyPondParser() result = parser(string) assert format(target) == format(result) and target is not result
def pairedt(pairs, numSamples): results = dict() t,v = pairs.items() diffs = [t[1][x] - v[1][x] for x in range(len(t[1]))] plotDiffs(diffs) sampleSize = int(len(diffs)/numSamples) indices = range(len(diffs)) random.shuffle(indices) mean_diffs = [] i = 0 for sample in range(numSamples): total_diff = 0 for x in range(sampleSize): index = indices[i] total_diff += diffs[index] i+=1 sample_avg = total_diff/float(sampleSize) mean_diffs.append(sample_avg) #normality check nt = stats.normaltest(mean_diffs) results['normal_p'] = format(round(nt[1],4)) #ttest t_prob = stats.ttest_1samp(mean_diffs, 0) results['ttest_t'] = format(round(t_prob[0],4)) results['ttest_p'] = format(round(t_prob[1],4)) #other stats results['avg_diff'] = format(round(np.mean(diffs),4)) results['numSamples'] = numSamples results['sampleSize'] = sampleSize results['num_pairs'] = len(pairs['tor']) return results
def test_lilypondparsertools_LilyPondParser__spanners__Hairpin_02(): target = Container(scoretools.make_notes([0] * 4, [(1, 4)])) hairpin = Hairpin(descriptor='<') attach(hairpin, target[0:2]) hairpin = Hairpin(descriptor='<') attach(hairpin, target[1:3]) hairpin = Hairpin(descriptor='<') attach(hairpin, target[2:]) assert format(target) == stringtools.normalize( r''' { c'4 \< c'4 \! \< c'4 \! \< c'4 \! } ''' ) string = r'''\relative c' { c \< c \< c \< c \! }''' parser = LilyPondParser() result = parser(string) assert format(target) == format(result) and target is not result
def shrinkSnarfer(self, irc, msg, match): channel = msg.args[0] if not irc.isChannel(channel): return if self.registryValue('shrinkSnarfer', channel): url = match.group(0) r = self.registryValue('nonSnarfingRegexp', channel) if r and r.search(url) is not None: self.log.debug('Matched nonSnarfingRegexp: %u', url) return minlen = self.registryValue('minimumLength', channel) try: cmd = self.registryValue('serviceRotation', channel, value=False) cmd = cmd.getService().capitalize() except ValueError: cmd = self.registryValue('default', channel).capitalize() if len(url) >= minlen: try: shorturl = getattr(self, '_get%sUrl' % cmd)(url) except (utils.web.Error, AttributeError, ShrinkError): self.log.info('Couldn\'t get shorturl for %u', url) return if self.registryValue('shrinkSnarfer.showDomain', channel): domain = ' (at %s)' % utils.web.getDomain(url) else: domain = '' if self.registryValue('bold'): s = format('%u%s', ircutils.bold(shorturl), domain) else: s = format('%u%s', shorturl, domain) m = irc.reply(s, prefixNick=False) if m is not None: m.tag('shrunken')
from resource_management.libraries.functions import format from resource_management.libraries.functions.default import default from resource_management.libraries.functions import get_kinit_path from resource_management.libraries.script.script import Script # a map of the Ambari role to the component name # for use with /usr/hdp/current/<component> SERVER_ROLE_DIRECTORY_MAP = { 'ACCUMULO_MASTER' : 'accumulo-master', 'ACCUMULO_MONITOR' : 'accumulo-monitor', 'ACCUMULO_GC' : 'accumulo-gc', 'ACCUMULO_TRACER' : 'accumulo-tracer', 'ACCUMULO_TSERVER' : 'accumulo-tablet', 'ACCUMULO_CLIENT' : 'accumulo-client' } component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "ACCUMULO_CLIENT") config = Script.get_config() conf_dir = format('/usr/hdp/current/{component_directory}/conf') server_conf_dir = format('{conf_dir}/server') pid_dir = config['configurations']['accumulo-env']['accumulo_pid_dir'] accumulo_user = config['configurations']['accumulo-env']['accumulo_user'] # Security related/required params hostname = config['hostname'] security_enabled = config['configurations']['cluster-env']['security_enabled'] kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None)) tmp_dir = Script.get_tmp_dir()
def _read(self): """Read incoming data on port""" try: while True: """Try to read data""" self._data_flowing.clear() packet = self._f.read(64) self._data_flowing.set() if not packet: _LOGGER.warn("PortScanner._read(): No packets") self._available = False return 'No Signal' self._state = True """If data can be read, scan for specific incoming packets""" if packet[:2] == b'\xd8\x08': _LOGGER.debug( 'PortScanner._read(): d8 08 packet, part 1: %s', str(binascii.hexlify(packet[0:8]), 'utf-8')) _LOGGER.debug( 'PortScanner._read(): d8 08 packet, part 2: %s', str(binascii.hexlify(packet[8:16]), 'utf-8')) byte3 = packet[2:3] # 3rd byte unknown, always 00 byte4 = packet[3:4] # 4th byte, last part of id byte5 = packet[4:5] # 5th byte, first part of id """Decode sensor ID from 4th and 5th byte, create a binary string and compare this with the last generated binary string. 0 = OFF, 1 = ON""" self._new_bin_string = self._hextobin(byte4 + byte5) _LOGGER.debug('PortScanner._read(): old_bin_string: %s', self._old_bin_string) _LOGGER.debug('PortScanner._read(): new_bin_string: %s', self._new_bin_string) for idx, (x, y) in enumerate( zip(self._old_bin_string, self._new_bin_string)): """Continue for devices which has been changed to ON or OFF.""" if x != y: dev_id = 'jablotron_' + str(idx) entity_id = 'binary_sensor.' + dev_id if y == '1': _device_state = STATE_ON else: _device_state = STATE_OFF """Only create or update a sensor when this packet is the first d8 08 packet received since startup, or if d8 08 packet reports about 1 specific device (by containing a 55 packet) or, or if a specific device is not active anymore (y == '0')""" # if self._available == False or (y == '1' and packet[10:12] == b'\x55\x09') or y == '0': # if self._mode == 'd8' or (self._mode == '55' and (self._available == False or (y == '1' and packet[10:12] == b'\x55\x09') or y == '0')): if self._mode == 'd8' or ( self._mode == '55' and (self._available == False or (y == '1' and packet[10:11] == b'\x55') or y == '0')): """ Create or update sensor """ self._hass.add_job( self.async_see(dev_id, _device_state)) """Retain last binary string""" _LOGGER.debug( 'PortScanner._read(): updating bin string to %s', self._new_bin_string) self._old_bin_string = self._new_bin_string """Set available to True since we know which devices are ON""" self._available = True elif self._mode == '55' and packet[:2] in (b'\x55\x08', b'\x55\x09'): _LOGGER.debug('PortScanner._read(): %s packet, part 1: %s', str(binascii.hexlify(packet[0:2]), 'utf-8'), str(binascii.hexlify(packet[0:8]), 'utf-8')) _LOGGER.debug('PortScanner._read(): %s packet, part 2: %s', str(binascii.hexlify(packet[0:2]), 'utf-8'), str(binascii.hexlify(packet[8:16]), 'utf-8')) packetpart = packet[0:10] byte3 = packetpart[2:3] # 3rd byte, state of device byte4 = packetpart[3:4] # 4th byte, unknown byte5 = packetpart[4: 5] # 5th byte, first part of device ID byte6 = packetpart[5: 6] # 6th byte, second part of device ID """Only process specific state changes""" if byte3 in (b'\x00', b'\x01'): # if byte4 in (b'\x6d', b'\x75', b'\x79', b'\x7d', b'\x88', b'\x80'): # 6d, 75, 79, 7d, 88 and 80 are statusses for wireless sensors # 8c and 84 are ON statusses for wired sensors if byte4 in (b'\x6d', b'\x75', b'\x79', b'\x7d', b'\x80', b'\x84', b'\x88', b'\x8c'): _device_state = STATE_ON else: _device_state = STATE_OFF """Decode sensor ID from 5th and 6th byte""" dec = int.from_bytes( byte5 + byte6, byteorder=sys.byteorder ) # turn to 'little' if sys.byteorder is wrong i = int(dec / 64) dev_id = 'jablotron_' + str(i) entity_id = 'binary_sensor.' + dev_id """ Create or update sensor """ self._hass.add_job( self.async_see(dev_id, _device_state)) elif byte3 == b'\x0c': # we don't know yet. Must be some keep alive packet from a sensor who hasn't been triggered in a loooong time _LOGGER.debug( "Unrecognized %s 0c packet: %s %s %s %s", str(binascii.hexlify(packet[0:2]), 'utf-8'), str(binascii.hexlify(byte3), 'utf-8'), str(binascii.hexlify(byte4), 'utf-8'), str(binascii.hexlify(byte5), 'utf-8'), str(binascii.hexlify(byte6), 'utf-8')) _LOGGER.debug("Probably Control Panel OFF?") elif byte3 == b'\x2e': # we don't know yet. Must be some keep alive packet from a sensor who hasn't been triggered in a loooong time _LOGGER.debug( "Unrecognized %s 2e packet: %s %s %s %s", str(binascii.hexlify(packet[0:2]), 'utf-8'), str(binascii.hexlify(byte3), 'utf-8'), str(binascii.hexlify(byte4), 'utf-8'), str(binascii.hexlify(byte5), 'utf-8'), str(binascii.hexlify(byte6), 'utf-8')) _LOGGER.debug("Probably Control Panel ON?") elif byte3 == b'\x4f': # we don't know yet. Must be some keep alive packet from a sensor who hasn't been triggered in a loooong time _LOGGER.debug( "Unrecognized %s 4f packet: %s %s %s %s", str(binascii.hexlify(packet[0:2]), 'utf-8'), str(binascii.hexlify(byte3), 'utf-8'), str(binascii.hexlify(byte4), 'utf-8'), str(binascii.hexlify(byte5), 'utf-8'), str(binascii.hexlify(byte6), 'utf-8')) _LOGGER.debug( "Probably some keep alive packet from a sensor which hasn't been triggered recently" ) else: _LOGGER.debug( "New unknown %s packet: %s %s %s %s", str(binascii.hexlify(packet[0:2]), 'utf-8'), str(binascii.hexlify(byte3), 'utf-8'), str(binascii.hexlify(byte4), 'utf-8'), str(binascii.hexlify(byte5), 'utf-8'), str(binascii.hexlify(byte6), 'utf-8')) else: pass # _LOGGER.info("Unknown packet: %s", packet) # self._stop.set() except (IndexError, FileNotFoundError, IsADirectoryError, UnboundLocalError, OSError): _LOGGER.warning( "PortScanner._read(): File or data not present at the moment: %s", self._file_path) return 'Failed' except Exception as ex: _LOGGER.error('PortScanner._read(): Unexpected error 3: %s', format(ex)) return 'Failed' return state
"User-Agent": "Firefox/47.0", } request = urllib.request.Request(url=url, headers=headers) response = urllib.request.urlopen(request) img = response.read() if(os.path.isfile(dir + "\\" + ) == False): with open(dir+"\\"+,'wb') as file: file.write(img) else: pass return True except OSError as err: print("not exist!") with open(dir+.txt","a",encoding='utf-8') as errfile: errstr = "error:"+ url +" not exist\n" if(DEBUG): errstr += " err " + str(format(err)) + "\n" errfile.write(errstr) pass return False def main(): print("hello") if __name__ == '__main__': main() os.system("pause")
print (format('*','*<15'),'ENTRY SECTOIN', format('*','*<15')) m1=float(input("marks in examination 1 (out of 100) => ")) m2=float(input("marks in examination 2 (out of 100) => ")) m3= float(input("mark in sports activity (out of 50) => ")) m4= float(input("mark in activity 1 (out of 20) => ")) m5= float(input("mark in activity 2 (out of 20) => ")) m6= float(input("mark in activity 3 (out of 20) => ")) em=m1+m2 am=m4+m5+m6 #examination percentage is 50% #activity percentage is 30% #sports activity is 20% ep=float(em*0.5/200) sp=float(m3*0.2/50) ap=float(am*0.3/60) print (format('*','*<15'),'RESULT', format('*','*<15')) print ("marks in firt examination(out of 100) is =>", m1) print ("marks in second examination (out of 100) is => ",m2) print (format ('*','*<30')) print ("total marks of examinations is => ", em) print (format ('*','*<30')) print ("marks in sports activity (out of 50) is => ",m3) print (format ('*','*<30')) print ("total marks of sports activity is => ", em) print (format ('*','*<30')) print ("marks in activity 1 (out of 20) is => ",m4)
def getFormattedCurrency(value): return "$" + format((value / 100), ',.2f')
def floatToString(self, f:float): ctx = decimal.Context() ctx.prec = 12 d1 = ctx.create_decimal(repr(f)) return format(d1, 'f')
def write_compile_file(shellcode_list, compile_file, shellcode_total_length, key, key_total, number_sections): with open(compile_file, 'r+') as f: lines = f.readlines() for i in range(len(lines)): if "NEED TO PLACE KEY TOTAL HERE" in lines[i]: lines[i+1] = "unsigned char decrypted_code[{}];\n".format(key_total) if "Place key here" in lines[i]: lines[i+1] = "unsigned char key[] = \"{}\";\n".format(key) if "place encrypted byte pieces here" in lines[i]: for index in range(len(shellcode_list)): shellcode_list[index] = '\\x'.join(format(x, '02x') for x in shellcode_list[index]) shellcode_list[index] = "\\x" + shellcode_list[index] #add \x to beginning of each element, since previous line does not do this shellcode_list[index] = shellcode_list[index].replace(" ", "") #remove any spaces if they exist lines[i+1+index] = "unsigned char string_{0}[] = \"{1}\";\n".format(index, shellcode_list[index]) if "place memcpy operations here" in lines[i]: lines[i+1] = "unsigned char string[{}];\n".format(shellcode_total_length) mem_length = 0 #how much space we need to move over to continue allocating in memory for x in range(len(shellcode_list)): length_piece = round(len(shellcode_list[x])/4) if x != 0: length_previous_piece = round(len(shellcode_list[x-1])/4) if x == 1: mem_length = length_previous_piece lines[i+3] = "memcpy(string + {0}, string_{1}, {2});\n".format(mem_length, x, length_piece) else: mem_length += length_previous_piece lines[i+2+x] = "memcpy(string + {0}, string_{1}, {2});\n".format(mem_length, x, length_piece) else: lines[i+2]= "memcpy(string, string_0, {});\n".format(length_piece) if "place byte count here" in lines[i]: #manually put in length multiple times to avoid defining a var for it in c++ code lines[i+1] = "std::vector<unsigned char> bytes(string, string + {});\n".format(shellcode_total_length) if "place number of sections here" in lines[i]: lines[i+1] = "int number_sections = {};\n".format(number_sections) if "place vector operations here" in lines[i]: #TODO START HERE WHEN YOU RESUME #first measure big the final piece of code to be decrypted will be final_section_size = shellcode_total_length % key_total if final_section_size == 0: final_section_size = key_total print("There are {} sections".format(number_sections)) #define values for the next for loop previous_section_size = key_total section_size = key_total for x in range(number_sections): key_index = x % (int(len(key)/4)) if x != 0: if x == 1: section_size += key_total lines[i+3] = "std::vector<unsigned char> section_{0} = std::vector<unsigned char>(bytes.begin() + {1}, bytes.begin() + {2});\n".format(x, key_total, section_size) lines[i+4] = "std::vector<unsigned char> section_1_decrypted = DecryptBytes(section_1, key[1], 1, {}, sizeof(key));\n".format(key_total) elif x == number_sections-1: #final piece section_size += final_section_size previous_section_size += key_total lines[i+(x*2)+1] = "std::vector<unsigned char> section_{0} = std::vector<unsigned char>(bytes.begin() + {2}, bytes.begin() + {3});\n".format(x, final_section_size, previous_section_size, section_size) lines[i+(x*2)+2] = "std::vector<unsigned char> section_{0}_decrypted = DecryptBytes(section_{0}, key[{2}], {0}, {1}, sizeof(key));\n".format(x, final_section_size, key_index) else: section_size += key_total previous_section_size += key_total lines[i+(x*2)+1] = "std::vector<unsigned char> section_{0} = std::vector<unsigned char>(bytes.begin() + {2}, bytes.begin() + {3});\n".format(x, key_total, previous_section_size, section_size) lines[i+(x*2)+2] = "std::vector<unsigned char> section_{0}_decrypted = DecryptBytes(section_{0}, key[{2}], {0}, {1}, sizeof(key));\n".format(x, key_total, key_index) else: lines[i+1] = "std::vector<unsigned char> section_{0} = std::vector<unsigned char>(bytes.begin(), bytes.begin() + {1});\n".format(x, key_total) lines[i+2] = "std::vector<unsigned char> section_0_decrypted = DecryptBytes(section_0, key[0], 0, {}, sizeof(key));\n".format(key_total) if "place copy operations here" in lines[i]: #vector appending/copying piece for x in range(number_sections): if x != 0: lines[i+x+1] = "std::copy(section_{0}_decrypted.begin(), section_{0}_decrypted.end(), std::back_inserter(code_decrypted));\n".format(x) else: lines[i+1] = "std::vector<unsigned char> code_decrypted(section_0_decrypted);\n" if "place size here" in lines[i]: lines[i+1] = "\tSIZE_T size = {};\n".format(shellcode_total_length) #write lines to file with open(compile_file, 'w+') as f: f.writelines(lines) f.close() return
kafka_ranger_plugin_repo = { 'isEnabled': 'true', 'configs': ranger_plugin_config, 'description': 'kafka repo', 'name': repo_name, 'repositoryType': 'kafka', 'type': 'kafka', 'assetType': '1' } #For curl command in ranger plugin to get db connector jdk_location = config['hostLevelParams']['jdk_location'] java_share_dir = '/usr/share/java' if xa_audit_db_flavor and xa_audit_db_flavor == 'mysql': jdbc_symlink_name = "mysql-jdbc-driver.jar" jdbc_jar_name = "mysql-connector-java.jar" audit_jdbc_url = format('jdbc:mysql://{xa_db_host}/{xa_audit_db_name}') jdbc_driver = "com.mysql.jdbc.Driver" elif xa_audit_db_flavor and xa_audit_db_flavor == 'oracle': jdbc_jar_name = "ojdbc6.jar" jdbc_symlink_name = "oracle-jdbc-driver.jar" audit_jdbc_url = format('jdbc:oracle:thin:\@//{xa_db_host}') jdbc_driver = "oracle.jdbc.OracleDriver" elif xa_audit_db_flavor and xa_audit_db_flavor == 'postgres': jdbc_jar_name = "postgresql.jar" jdbc_symlink_name = "postgres-jdbc-driver.jar" audit_jdbc_url = format('jdbc:postgresql://{xa_db_host}/{xa_audit_db_name}') jdbc_driver = "org.postgresql.Driver" elif xa_audit_db_flavor and xa_audit_db_flavor == 'mssql': jdbc_jar_name = "sqljdbc4.jar" jdbc_symlink_name = "mssql-jdbc-driver.jar" audit_jdbc_url = format('jdbc:sqlserver://{xa_db_host};databaseName={xa_audit_db_name}')
def get_answer(self): r = self.p1 + self.p2 + self.p3 + self.p4 + self.p5 + self.p6 + self.p7 + self.p8 res = r / 8 return str(format(round(res, 3), '.2f'))
def __str__(self): return "Student number {0}: {1} {2} born the {3}, can be \ reached at {4}".format(self.id, self.firstname, self.surname, self.birthday, self.email)
def change_notification(self, change_code, notifying_object, parameter): if change_code == 'building_started': self.document.change_build_state('building_in_progress') if change_code == 'reset_timer': self.document.build_widget.view.reset_timer() self.document.build_widget.view.label.set_text('0:00') if change_code == 'building_stopped': self.document.show_build_state('') self.document.change_build_state('idle') if change_code == 'building_finished': result_blob = parameter if result_blob['build'] != None or result_blob['forward_sync'] != None: if result_blob['build'] != None: try: self.document.preview.set_pdf_filename(result_blob['build']['pdf_filename']) except KeyError: pass self.document.add_change_code('pdf_updated') if result_blob['forward_sync'] != None: self.document.preview.set_synctex_rectangles(result_blob['forward_sync']) self.document.show_build_state('') if result_blob['build'] != None: build_blob = result_blob['build'] if build_blob['error'] == 'interpreter_missing': self.document.show_build_state('') self.document.change_build_state('idle') if DialogLocator.get_dialog('interpreter_missing').run(build_blob['error_arg']): DialogLocator.get_dialog('preferences').run() return if build_blob['error'] == 'interpreter_not_working': self.document.show_build_state('') self.document.change_build_state('idle') if DialogLocator.get_dialog('building_failed').run(build_blob['error_arg']): DialogLocator.get_dialog('preferences').run() return build_blob['log_messages']['BibTeX'] = build_blob['bibtex_log_messages'] self.document.set_build_log_items(build_blob['log_messages']) self.document.build_time = time.time() - self.document.last_build_start_time error_count = self.document.get_error_count() if error_count > 0: error_color_rgba = ServiceLocator.get_color_manager().get_theme_color('error_color') error_color = '#' + format(int(error_color_rgba.red * 255), '02x') + format(int(error_color_rgba.green * 255), '02x') + format(int(error_color_rgba.blue * 255), '02x') str_errors = ngettext('<span color="{color}">Failed</span> ({amount} error)!', '<span color="{color}">Failed</span> ({amount} errors)!', error_count) message = str_errors.format(color=error_color, amount=str(error_count)) self.document.show_build_state(message) else: self.document.show_build_state(_('Success!')) self.document.set_has_synctex_file(build_blob['has_synctex_file']) self.document.has_been_built = True elif result_blob['backward_sync'] != None: if not self.document.root_is_set: if result_blob['backward_sync']['filename'] == self.document.get_filename(): self.document.set_synctex_position(result_blob['backward_sync']) elif self.document.is_root: workspace = ServiceLocator.get_workspace() document = workspace.open_document_by_filename(result_blob['backward_sync']['filename']) if document != None: document.set_synctex_position(result_blob['backward_sync']) self.document.change_build_state('idle') if result_blob['build'] != None: self.document.invalidate_build_log()
def service(componentName, action='start', serviceName='yarn'): import params if serviceName == 'mapreduce' and componentName == 'historyserver': delete_pid_file = True daemon = format("{mapred_bin}/mr-jobhistory-daemon.sh") pid_file = format( "{mapred_pid_dir}/mapred-{mapred_user}-{componentName}.pid") usr = params.mapred_user log_dir = params.mapred_log_dir else: # !!! yarn-daemon.sh deletes the PID for us; if we remove it the script # may not work correctly when stopping the service delete_pid_file = False daemon = format("{yarn_bin}/yarn-daemon.sh") pid_file = format( "{yarn_pid_dir}/yarn-{yarn_user}-{componentName}.pid") usr = params.yarn_user log_dir = params.yarn_log_dir cmd = format( "export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} && {daemon} --config {hadoop_conf_dir}" ) if action == 'start': daemon_cmd = format("{ulimit_cmd} {cmd} start {componentName}") check_process = as_sudo(["test", "-f", pid_file]) + " && " + as_sudo( ["pgrep", "-F", pid_file]) # Remove the pid file if its corresponding process is not running. File(pid_file, action="delete", not_if=check_process) if componentName == 'timelineserver' and serviceName == 'yarn': File(params.ats_leveldb_lock_file, action="delete", only_if=format("ls {params.ats_leveldb_lock_file}"), not_if=check_process, ignore_failures=True) try: # Attempt to start the process. Internally, this is skipped if the process is already running. Execute(daemon_cmd, user=usr, not_if=check_process) # Ensure that the process with the expected PID exists. Execute( check_process, not_if=check_process, tries=5, try_sleep=1, ) except: show_logs(log_dir, usr) raise elif action == 'stop': daemon_cmd = format("{cmd} stop {componentName}") try: Execute(daemon_cmd, user=usr) except: show_logs(log_dir, usr) raise # !!! yarn-daemon doesn't need us to delete PIDs if delete_pid_file is True: File(pid_file, action="delete") elif action == 'refreshQueues': rm_kinit_cmd = params.rm_kinit_cmd refresh_cmd = format( "{rm_kinit_cmd} export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} && {yarn_container_bin}/yarn rmadmin -refreshQueues" ) Execute( refresh_cmd, user=usr, timeout= 20, # when Yarn is not started command hangs forever and should be killed tries=5, try_sleep=5, timeout_kill_strategy=TerminateStrategy. KILL_PROCESS_GROUP, # the process cannot be simply killed by 'kill -15', so kill pg group instread. )
#writing the image to the screen font = cv2.FONT_HERSHEY_SIMPLEX #cv2.putText(img, str(datetime.datetime.now().strftime("%d/%m/%y-%H/%M/%S")), (100,500), font, 4,(255,255,255),2) cv2.putText(img, "DateTime - "+str(datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S")), (5,25), font, 0.5,(255,255,255)) cv2.imshow('Mapping Faces within the Image', img) #writing the image to a file if len(faces) > 0: #Older versions of cv2.imwrite #cv2.imwrite("temp"+str(time.strftime("%d/%m/%y-%H%M%S"))+".jpg",img) #cv2.imwrite("temp"+str(datetime.datetime.now())+".jpg",img) #cv2.imwrite("temp"+str(datetime.datetime.now().strftime("%d/%m/%y-%H/%M/%S"))+".jpg",img) #cv2.imwrite("FaceCaptureWarrenPi-"+str(datetime.datetime.now())+".jpg",img) #current version of cv2.imwrite imagename = "FaceCaptureWarrenPi-" + format(str(datetime.datetime.now())) + ".jpg" writepath = "/home/pi/Downloads/TW_Experiments/Python_Projects/RaspiPythonProjects/OpenCV/CaptureVideoStream/imagecapture/" + imagename cv2.imwrite(writepath, img) print "Captured image to file !!!" #Uploading files to AWS with open(writepath, 'rb') as data: s3.upload_fileobj(data, "tw37-opencv", imagename) #looking for escape sequence key = cv2.waitKey(1) & 0xFF if key == ord("q"): print "Quitting....hold on" break #Clearing the buffer before loading the next image
c.append(cirq.measure(*input_qubit, key='result')) return c def bitstring(bits): return ''.join(str(int(b)) for b in bits) if __name__ == '__main__': qubit_count = 5 input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)] circuit = make_circuit(qubit_count,input_qubits) circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap') circuit_sample_count =2000 simulator = cirq.Simulator() result = simulator.run(circuit, repetitions=circuit_sample_count) frequencies = result.histogram(key='result', fold_func=bitstring) writefile = open("../data/startCirq1579.csv","w+") print(format(frequencies),file=writefile) print("results end", file=writefile) print(circuit.__len__(), file=writefile) print(circuit,file=writefile) writefile.close()
def floatn(x, n=3): # format floats to n decimals return float(format(x, '.%gf' % n))
def today_date(day=0): return format(today_times(day)[0], prefs['dates']['today_format'])
# Compute precision, recall, and F1 score. if n_selected == 0 or true_positives == 0: precision = 0.0 recall = 0.0 f1_score = 0.0 else: precision = 100 * true_positives / n_selected recall = 100 * true_positives / n_relevant f1_score = 2 * precision * recall / (precision + recall) # Write row. row = [ dataset_name, aug_kind_str, test_unit_str, str(trial_id), predict_unit_str, str(int(np.round(1000 * tolerance))).rjust(4), format(threshold, ".10f"), str(n_relevant).rjust(5), str(n_selected).rjust(6), str(true_positives).rjust(5), str(false_positives).rjust(6), str(false_negatives).rjust(5), format(precision, ".6f").rjust(10), format(recall, ".6f").rjust(10), format(f1_score, ".6f").rjust(10) ] # Write row. csv_file = open(metrics_path, 'a') csv_writer = csv.writer(csv_file, delimiter=',') csv_writer.writerow(row) csv_file.close()
def lineBot(op): try: if op.type == 0: return if op.type == 5: contact = cl.getContact(param2) print ("[ 5 ] 通知添加好友 名字: " + contact.displayName) if settings["autoAdd"] == True: cl.sendMessage(op.param1, "你好 {} 謝謝你加本機為好友 :D\n 本機為CoCo製作\n line.me/ti/p/1MRX_Gjbmv".format(str(cl.getContact(op.param1).displayName))) if op.type == 11: group = cl.getGroup(op.param1) contact = cl.getContact(op.param2) print ("[11]有人打開群組網址 群組名稱: " + str(group.name) + "\n" + op.param1 + "\n名字: " + contact.displayName) if op.param1 in settings["qrprotect"]: if op.param2 in admin: pass else: gs = cl.getGroup(op.param1) cl.kickoutFromGroup(op.param1,[op.param2]) gs.preventJoinByTicket = True cl.updateGroup(gs) if op.type == 13: contact1 = cl.getContact(op.param2) contact2 = cl.getContact(op.param3) group = cl.getGroup(op.param1) print ("[ 13 ] 通知邀請群組: " + str(group.name) + "\n邀請者: " + contact1.displayName + "\n被邀請者" + contact2.displayName) if op.param1 in settings["inviteprotect"]: if op.param2 in admin: pass else: cl.cancelGroupInvitation(op.param3) if clMID in op.param3: if settings["autoJoin"] == True: print ("進入群組: " + str(group.name)) cl.acceptGroupInvitation(op.param1) if op.type == 19: contact1 = cl.getContact(op.param2) group = cl.getGroup(op.param1) contact2 = cl.getContact(op.param3) print ("[19]有人把人踢出群組 群組名稱: " + str(group.name) + "\n" + op.param1 +"\n踢人者: " + contact1.displayName + "\nMid: " + contact1.mid + "\n被踢者" + contact2.displayName + "\nMid:" + contact2.mid ) if mid1 in op.param3: if op.param2 in admin: pass else: cl.kickoutFromGroup(op.param1,[op.param2]) settings["blacklist"][op.param2] = True group = cl.getGroup(op.param1) try: group.preventedJoinByTicket = False cl.updateGroup(group) str1 = cl.reissueGroupTicket(op.param1) except Exception as e: print(e) cl.sendMessage(mid1, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid2, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid3, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid4, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid6, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") if mid2 in op.param3: if op.param2 in admin: pass else: cl.kickoutFromGroup(op.param1,[op.param2]) settings["blacklist"][op.param2] = True group = cl.getGroup(op.param1) try: group.preventedJoinByTicket = False cl.updateGroup(group) str1 = cl.reissueGroupTicket(op.param1) except Exception as e: print(e) cl.sendMessage(mid1, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid2, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid3, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid4, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid6, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") if mid3 in op.param3: if op.param2 in admin: pass else: cl.kickoutFromGroup(op.param1,[op.param2]) settings["blacklist"][op.param2] = True group = cl.getGroup(op.param1) try: group.preventedJoinByTicket = False cl.updateGroup(group) str1 = cl.reissueGroupTicket(op.param1) except Exception as e: print(e) cl.sendMessage(mid1, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid2, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid3, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid4, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid6, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") if mid4 in op.param3: if op.param2 in admin: pass else: cl.kickoutFromGroup(op.param1,[op.param2]) settings["blacklist"][op.param2] = True group = cl.getGroup(op.param1) try: group.preventedJoinByTicket = False cl.updateGroup(group) str1 = cl.reissueGroupTicket(op.param1) except Exception as e: print(e) cl.sendMessage(mid1, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid2, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid3, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid4, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid6, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") if mid6 in op.param3: if op.param2 in admin: pass else: cl.kickoutFromGroup(op.param1,[op.param2]) settings["blacklist"][op.param2] = True group = cl.getGroup(op.param1) try: group.preventedJoinByTicket = False cl.updateGroup(group) str1 = cl.reissueGroupTicket(op.param1) except Exception as e: print(e) cl.sendMessage(mid1, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid2, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid3, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid4, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid6, "/jgurlx gid: " + op.param1 + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") if op.type == 24: if settings["autoLeave"] == True: cl.leaveRoom(op.param1) if op.type == 26 or op.type == 25: msg = op.message text = msg.text msg_id = msg.id receiver = msg.to sender = msg._from if msg.toType == 0: if sender != cl.profile.mid: to = sender else: to = receiver else: to = receiver if msg.contentType == 0: if text is None: return if sender in admin: if "Fbc:" in msg.text: bctxt = text.replace("Fbc:","") t = cl.getAllContactIds() for manusia in t: cl.sendMessage(manusia,(bctxt)) elif "Gbc:" in msg.text: bctxt = text.replace("Gbc:","") n = cl.getGroupIdsJoined() for manusia in n: cl.sendMessage(manusia,(bctxt)) elif 'invitebot' in text.lower(): if msg.toType == 2: group = cl.getGroup(to) try: group.preventedJoinByTicket = False cl.updateGroup(group) str1 = cl.reissueGroupTicket(to) except Exception as e: print(e) cl.sendMessage(mid1, "/jgurlx gid: " + msg.to + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid2, "/jgurlx gid: " + msg.to + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid3, "/jgurlx gid: " + msg.to + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid4, "/jgurlx gid: " + msg.to + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") cl.sendMessage(mid6, "/jgurlx gid: " + msg.to + " gid " + "url: http://line.me/R/ti/g/" + str1 + " url") elif text.startswith("/jgurlx"): str1 = find_between_r(msg.text, "gid: ", " gid") str2 = find_between_r(msg.text, "url: http://line.me/R/ti/g/", " url") print("1") cl.acceptGroupInvitationByTicket(str1, str2) JoinedGroups.append(str1) group = cl.getGroup(str1) try: time.sleep(0.1) cl.reissueGroupTicket(str1) group.preventedJoinByTicket = True cl.updateGroup(group) except Exception as e: print(e) elif "Ri " in msg.text: Ri0 = text.replace("Ri ","") Ri1 = Ri0.rstrip() Ri2 = Ri1.replace("@","") Ri3 = Ri2.rstrip() _name = Ri3 gs = cl.getGroup(msg.to) targets = [] for s in gs.members: if _name in s.displayName: targets.append(s.mid) if targets == []: pass else: for target in targets: if target in admin: pass else: try: cl.kickoutFromGroup(to,[target]) cl.findAndAddContactsByMid(target) cl.inviteIntoGroup(to,[target]) except: pass elif "Tk " in msg.text: key = eval(msg.contentMetadata["MENTION"]) key["MENTIONEES"][0]["M"] targets = [] for x in key["MENTIONEES"]: targets.append(x["M"]) for target in targets: if target in admin: pass else: try: cl.kickoutFromGroup(to,[target]) except: pass elif "Mk " in msg.text: mk0 = msg.text.replace("Mk ","") mk1 = mk0.rstrip() mk2 = mk1.replace("@","") mk3 = mk2.rstrip() _name = mk3 gs = cl.getGroup(msg.to) targets = [] for s in gs.members: if _name in s.displayName: targets.append(s.mid) if targets == []: pass else: for target in targets: try: cl.kickoutFromGroup(msg.to,[target]) except: pass elif "Nk " in msg.text: _name = text.replace("Nk ","") gs = cl.getGroup(to) targets = [] for g in gs.members: if _name in g.displayName: targets.append(g.mid) if targets == []: pass else: for target in targets: if target in admin: pass else: try: cl.kickoutFromGroup(to,[target]) except: pass elif "Vk " in msg.text: vkick0 = msg.text.replace("Vk ","") vkick1 = vkick0.rstrip() vkick2 = vkick1.replace("@","") vkick3 = vkick2.rstrip() _name = vkick3 gs = cl.getGroup(msg.to) targets = [] for s in gs.members: if _name in s.displayName: targets.append(s.mid) if targets == []: pass else: for target in targets: try: cl.kickoutFromGroup(msg.to,[target]) cl.findAndAddContactsByMid(target) cl.inviteIntoGroup(msg.to,[target]) cl.cancelGroupInvitation(msg.to,[target]) except: pass elif "Zk" in msg.text: gs = cl.getGroup(to) targets = [] for g in gs.members: if g.displayName in "": targets.append(g.mid) if targets == []: pass else: for target in targets: if target in admin: pass else: try: cl.kickoutFromGroup(to,[target]) except: pass elif msg.text in ["c","C","cancel","Cancel"]: if msg.toType == 2: X = cl.getGroup(msg.to) if X.invitee is not None: gInviMids = (contact.mid for contact in X.invitee) ginfo = cl.getGroup(msg.to) sinvitee = str(len(ginfo.invitee)) start = time.time() for cancelmod in gInviMids: cl.cancelGroupInvitation(msg.to, [cancelmod]) elapsed_time = time.time() - start cl.sendMessage(to, "已取消完成\n取消時間: %s秒" % (elapsed_time)) cl.sendMessage(to, "取消人數:" + sinvitee) else: cl.sendMessage(to, "沒有任何人在邀請中!!") elif text.lower() == 'gcancel': gid = cl.getGroupIdsInvited() start = time.time() for i in gid: cl.rejectGroupInvitation(i) elapsed_time = time.time() - start cl.sendMessage(to, "全部群組邀請已取消") cl.sendMessage(to, "取消時間: %s秒" % (elapsed_time)) elif "Gn " in msg.text: if msg.toType == 2: X = cl.getGroup(msg.to) X.name = msg.text.replace("Gn ","") cl.updateGroup(X) else: cl.sendMessage(msg.to,"無法使用在群組外") elif "Inv " in msg.text: midd = msg.text.replace("Inv ","") cl.findAndAddContactsByMid(midd) cl.inviteIntoGroup(msg.to,[midd]) elif text.lower() == 'kill ban': if msg.toType == 2: group = cl.getGroup(to) gMembMids = [contact.mid for contact in group.members] matched_list = [] for tag in settings["blacklist"]: matched_list+=filter(lambda str: str == tag, gMembMids) if matched_list == []: print ("1") cl.sendMessage(to, "沒有黑名單") return for jj in matched_list: cl.kickoutFromGroup(to, [jj]) cl.sendMessage(to, "黑名單以踢除") elif "/invitemeto:" in msg.text: gid = msg.text.replace("/invitemeto:","") if gid == "": cl.sendMessage(to,"請輸入群組ID") else: try: cl.findAndAddContactsByMid(msg.from_) cl.inviteIntoGroup(gid,[msg.from_]) except: cl.sendMessage(to,"我不在那個群組裡") elif msg.text in ["分機5離開全部群組"]: gid = cl.getGroupIdsJoined() for i in gid: cl.leaveGroup(i) cl.sendText(msg.to,"已離開全部群組") elif msg.text in ["Friendlist"]: anl = cl.getAllContactIds() ap = "" for q in anl: ap += "• "+cl.getContact(q).displayName + "\n" cl.sendMessage(msg.to,"「 朋友列表 」\n"+ap+"人數 : "+str(len(anl))) elif text.lower() == 'speed': time0 = timeit.timeit('"-".join(str(n) for n in range(100))', number=10000) str1 = str(time0) start = time.time() cl.sendMessage(to,'處理速度\n' + str1 + '秒') elapsed_time = time.time() - start cl.sendMessage(to,'指令反應\n' + format(str(elapsed_time)) + '秒') elif text.lower() == 'rebot': cl.sendMessage(to, "重新啟動") restartBot() elif text.lower() == 'runtime': timeNow = time.time() runtime = timeNow - botStart runtime = format_timespan(runtime) cl.sendMessage(to, "機器運行時間 {}".format(str(runtime))) elif text.lower() == 'about': try: arr = [] owner = "u28d781fa3ba9783fd5144390352b0c24" creator = cl.getContact(owner) contact = cl.getContact(clMID) grouplist = cl.getGroupIdsJoined() contactlist = cl.getAllContactIds() blockedlist = cl.getBlockedContactIds() ret_ = "╔══[ 關於自己 ]" ret_ += "\n╠ 名稱 : {}".format(contact.displayName) ret_ += "\n╠ 群組 : {}".format(str(len(grouplist))) ret_ += "\n╠ 好友 : {}".format(str(len(contactlist))) ret_ += "\n╠ 黑單 : {}".format(str(len(blockedlist))) ret_ += "\n╠══[ 關於機器 ]" ret_ += "\n╠ 版本 : 淫蕩6主機測試版" ret_ += "\n╠ 作者 : {}".format(creator.displayName) ret_ += "\n╚══[ 未經許可禁止重製 ]" cl.sendMessage(to, str(ret_)) except Exception as e: cl.sendMessage(msg.to, str(e)) elif text.lower() == 'ourl': if msg.toType == 2: G = cl.getGroup(to) if G.preventedJoinByTicket == False: pass else: G.preventedJoinByTicket = False cl.updateGroup(G) elif text.lower() == 'curl': if msg.toType == 2: G = cl.getGroup(to) if G.preventedJoinByTicket == True: pass else: G.preventedJoinByTicket = True cl.updateGroup(G) elif text.lower() == 'tagall': group = cl.getGroup(msg.to) nama = [contact.mid for contact in group.members] k = len(nama)//100 for a in range(k+1): txt = u'' s=0 b=[] for i in group.members[a*100 : (a+1)*100]: b.append({"S":str(s), "E" :str(s+6), "M":i.mid}) s += 7 txt += u'@Alin \n' cl.sendMessage(to, text=txt, contentMetadata={u'MENTION': json.dumps({'MENTIONEES':b})}, contentType=0) cl.sendMessage(to, "總共 {} 個成員".format(str(len(nama)))) elif text.lower() == 'time': tz = pytz.timezone("Asia/Makassar") timeNow = datetime.now(tz=tz) day = ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday","Friday", "Saturday"] hari = ["星期日", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六"] bulan = ["Januari", "Februari", "Maret", "April", "Mei", "Juni", "Juli", "Agustus", "September", "Oktober", "November", "Desember"] hr = timeNow.strftime("%A") bln = timeNow.strftime("%m") for i in range(len(day)): if hr == day[i]: hasil = hari[i] for k in range(0, len(bulan)): if bln == str(k): bln = bulan[k-1] readTime = hasil + ", " + timeNow.strftime('%d') + " - " + bln + " - " + timeNow.strftime('%Y') + "\n時間 : [ " + timeNow.strftime('%H:%M:%S') + " ]" cl.sendMessage(msg.to, readTime) if op.type == 26: msg = op.message text = msg.text msg_id = msg.id receiver = msg.to sender = msg._from if msg.toType == 0: if sender != cl.profile.mid: to = sender else: to = receiver else: to = receiver if settings["autoRead"] == True: cl.sendChatChecked(to, msg_id) if to in read["readPoint"]: if sender not in read["ROM"][to]: read["ROM"][to][sender] = True except Exception as error: logError(error)
def advanced_generic(yourchamp): conn = sqlite3.connect('matchups.db') cs = conn.cursor() advanced_screen = Tk() treeview = ttk.Treeview(advanced_screen) cs.execute('SELECT * FROM ' + yourchamp.get()) records = cs.fetchall() print(records) treeview['columns'] = ('You', 'Enemy', 'Games', 'Difficulty', 'Winrate') # Format treeview.column('#0', width=0, stretch=NO) treeview.column('You', anchor=CENTER, width=80) treeview.column('Enemy', anchor=CENTER, width=80) treeview.column('Games', anchor=CENTER, width=45) treeview.column('Difficulty', anchor=CENTER, width=70) treeview.column('Winrate', anchor=CENTER, width=60) # Headers treeview.heading('#0', text='', anchor=CENTER) treeview.heading('You', text='You', anchor=CENTER) treeview.heading('Enemy', text='Enemy', anchor=CENTER) treeview.heading('Games', text='Games', anchor=CENTER) treeview.heading('Difficulty', text='Difficulty', anchor=CENTER) treeview.heading('Winrate', text='Winrate', anchor=CENTER) # Show on treeview screen treeview.grid(row=0, column=0, padx=50) # Data setup opponents = [] for record in records: opponents.append(record[1]) unique = collections.Counter(opponents) unique_opponents = list(unique.keys()) unique_games = list(unique.values()) unique_winrates = [] unique_diff = [] for opponent in range(len(unique_opponents)): cs.execute("SELECT * FROM " + yourchamp.get() + " WHERE enemy='" + unique_opponents[opponent] + "'") conn.commit() register = cs.fetchall() results_counter = [] diff_counter = [] for opponent in register: results_counter.append(opponent[5]) wins = results_counter.count('W') losses = results_counter.count('L') diff_counter.append(opponent[3]) winrate = wins / (wins + losses) * 100 unique_winrates.append(format(winrate, '.2f')) difficulty = sum(diff_counter) / len(diff_counter) unique_diff.append(format(difficulty, '.2f')) print(unique_opponents) print(unique_games) print(unique_winrates) print(unique_diff) # Insert into treeview counter = 0 for opponent in unique: treeview.insert(parent='', index='end', iid=counter, text='', values=(yourchamp.get(), unique_opponents[counter], unique_games[counter], unique_diff[counter], unique_winrates[counter])) counter += 1 conn.commit() conn.close() return
def status(self, env): import status_params env.set_params(status_params) pid_file = format("{pid_dir}/hbase-{hbase_user}-thrift2.pid") check_process_status(pid_file)
def __repr__(self): return ("<trade %d [%s] for %d [%s]>" % (self.getPlayer(), format(self.getPlayer(), self.trades()), self.getOtherPlayer(), format(self.getOtherPlayer(), self.otherTrades())))
def advanced_specific(yourchamp, enemychamp): advanced_screen2 = Tk() conn = sqlite3.connect('matchups.db') cs = conn.cursor() cs.execute("SELECT *, oid FROM " + yourchamp.get() + " WHERE enemy= '" + enemychamp.get() + "'") records = cs.fetchall() conn.commit() treeview = ttk.Treeview(advanced_screen2) treeview['columns'] = ('You', 'Enemy', 'Lane', 'Games', 'Difficulty', 'Winrate') # Format treeview.column('#0', width=0, stretch=NO) treeview.column('You', anchor=CENTER, width=80) treeview.column('Enemy', anchor=CENTER, width=80) treeview.column('Lane', anchor=CENTER, width=70) treeview.column('Games', anchor=CENTER, width=45) treeview.column('Difficulty', anchor=CENTER, width=70) treeview.column('Winrate', anchor=CENTER, width=60) # Headers treeview.heading('#0', text='', anchor=CENTER) treeview.heading('You', text='You', anchor=CENTER) treeview.heading('Enemy', text='Enemy', anchor=CENTER) treeview.heading('Lane', text='Lane', anchor=CENTER) treeview.heading('Games', text='Games', anchor=CENTER) treeview.heading('Difficulty', text='Difficulty', anchor=CENTER) treeview.heading('Winrate', text='Winrate', anchor=CENTER) # Show on treeview screen treeview.grid(row=0, column=0, padx=50) # Data setup lane_counter = [] for record in records: lane_counter.append(record[2]) lane_games = collections.Counter(lane_counter) unique_lanes = list(lane_games.keys()) unique_games = list(lane_games.values()) unique_diff = [] unique_winrates = [] for lane in unique_lanes: print(lane) diff_lane = [] game_count = [] for record in records: if record[2] == lane: diff_lane.append(record[3]) game_count.append(record[5]) wins_aux = game_count.count('W') loss_aux = game_count.count('L') winrate_aux = wins_aux / (wins_aux + loss_aux) * 100 unique_winrates.append(winrate_aux) sum_diff = sum(diff_lane) sum_diff = (sum_diff / len(diff_lane)) unique_diff.append((format(sum_diff, '.2f'))) # Insert data into treeview counter = 0 for lane in unique_lanes: treeview.insert( parent='', index='end', iid=counter, text='', values=(your_champ.get(), enemy_champ.get(), unique_games[counter], unique_games[counter], unique_diff[counter], unique_winrates[counter])) counter += 1 print(unique_games) print(unique_lanes) print(unique_diff) print(unique_winrates) conn.close() return
def format(images, labels): data = [] for index in range(0, len(labels)): input = np.array(images[index]) / 255 output = np.zeros(10) output[labels[index]] = 1.0 data.append((input, output)) return data print("Loading and formatting MNIST set") mnist_set = MNIST('MNIST_data') training_inputs, training_outputs = mnist_set.load_training() test_inputs, test_outputs = mnist_set.load_testing() training_data = format(training_inputs, training_outputs) test_data = format(test_inputs, test_outputs) mnist_nn = nn.NeuralNetwork(4, [784, 100, 50, 10]) print('Training neural network') # train for 5 epochs with a learning rate of 0.5 mnist_nn.train(training_data, 5, 0.5) print("Testing neural network") numCorrect = 0 for i in range(len(test_data)): data = test_data[i] input = data[0] expected = test_outputs[i]
def test(player, type, value): print format(player, makeTrade(type, value))
district_list_string = district_list_string + x + "|" return district_list_string current_origins = add_districts([oakland, financial_district, mountain_view]) current_destinations = add_districts([russian_hill, north_beach,\ pacific_heights, outer_richmond,\ outer_sunset, mission_district, noe_valley,\ oakland, berkeley]) travel_mode = "transit" gmaps_query = urllib2.urlopen(""" https://maps.googleapis.com/maps/api/\ distancematrix/json?\ origins={}&\ destinations={}&\ mode={}&\ key={}&\ departure_time=now""".\ format(current_origins, current_destinations, travel_mode, query_data_file.gmaps_api_key)) query_result = json.loads(gmaps_query.read()) array_of_entries = np.array([]) query_time = datetime.now().isoformat(' ') query_origins = np.array(query_result['origin_addresses']) query_destinations = np.array(query_result['destination_addresses']) #iterate through and print all trips if query_result['status'] == 'OK': query_rows = query_result['rows'] for i in np.arange(len(query_rows)):
def main(argv): cmd = ns.core.CommandLine () cmd.simulationTime = 10 #seconds cmd.distance = 0.0 #meters cmd.frequency = 5.0 #whether 2.4 or 5.0 GHz simulationTime = float(cmd.simulationTime) distance = float(cmd.distance) frequency = float(cmd.frequency) #Configuration arguments udp = True bandwidth=[20,40] mcs=[2,4,7] gi=[False,True] amsduVOBE=7935 #Maximum amound by standard definition amsduVIBK=7935 #Maximum amound by standard definition expected_val=[[5.5,8.1],[8.5,13.5],[11.5,20]] print "MCS's: \t Bandwidth: \t Troughput:\t\t Delay:\t Lost packets:\tTransmited packets:" for count_mcs,a in enumerate(mcs): for count_bandwidth,b in enumerate(bandwidth): for c in gi: channel = ns.wifi.YansWifiChannelHelper.Default () phy = ns.wifi.YansWifiPhyHelper.Default () wifi = ns.wifi.WifiHelper () mac = ns.wifi.NqosWifiMacHelper.Default () phy.SetChannel (channel.Create ()) if udp == False: payloadSize = 1448 #bytes ns.core.Config.SetDefault ("ns3::TcpSocket::SegmentSize", ns.core.UintegerValue (payloadSize)) elif udp == True: payloadSize = 1472 wifiStaNode = ns.network.NodeContainer () wifiStaNode.Create (5) wifiApNode = ns.network.NodeContainer () wifiApNode.Create (1) wifi.SetStandard (ns.wifi.WIFI_PHY_STANDARD_80211n_5GHZ) # Set guard interval phy.Set ("ShortGuardEnabled", ns.core.BooleanValue (c)) mac = ns.wifi.HtWifiMacHelper.Default () DataRate = ns.wifi.HtWifiMacHelper.DataRateForMcs (a) wifi.SetRemoteStationManager ("ns3::ConstantRateWifiManager","DataMode", DataRate, "ControlMode", ns.wifi.HtWifiMacHelper.DataRateForMcs (0)) ssid = ns.wifi.Ssid ("wifi-80211n") mac.SetType ("ns3::StaWifiMac","Ssid", ns.wifi.SsidValue (ssid) ,"ActiveProbing", ns.core.BooleanValue (False) ,"BE_MaxAmsduSize", ns.core.UintegerValue (amsduVOBE) ,"BK_MaxAmsduSize", ns.core.UintegerValue (amsduVIBK) ,"VI_MaxAmsduSize", ns.core.UintegerValue (amsduVIBK) ,"VO_MaxAmsduSize", ns.core.UintegerValue (amsduVOBE) ,"BE_MaxAmpduSize", ns.core.UintegerValue (0) ,"BK_MaxAmpduSize", ns.core.UintegerValue (0) ,"VI_MaxAmpduSize", ns.core.UintegerValue (0) ,"VO_MaxAmpduSize", ns.core.UintegerValue (0)) staDevice = wifi.Install (phy, mac, wifiStaNode) mac.SetType ("ns3::ApWifiMac", "Ssid", ns.wifi.SsidValue (ssid) ,"BE_MaxAmsduSize", ns.core.UintegerValue (amsduVOBE) ,"BK_MaxAmsduSize", ns.core.UintegerValue (amsduVIBK) ,"VI_MaxAmsduSize", ns.core.UintegerValue (amsduVIBK) ,"VO_MaxAmsduSize", ns.core.UintegerValue (amsduVOBE) ,"BE_MaxAmpduSize", ns.core.UintegerValue (0) ,"BK_MaxAmpduSize", ns.core.UintegerValue (0) ,"VI_MaxAmpduSize", ns.core.UintegerValue (0) ,"VO_MaxAmpduSize", ns.core.UintegerValue (0)) apDevice = wifi.Install (phy, mac, wifiApNode) # Set channel width ns.core.Config.Set ("/NodeList/*/DeviceList/*/$ns3::WifiNetDevice/Phy/ChannelWidth", ns.core.UintegerValue (b)) # mobility mobility = ns.mobility.MobilityHelper () positionAlloc = ns.mobility.ListPositionAllocator () positionAlloc.Add (ns.core.Vector3D (0.0, 0.0, 0.0)) positionAlloc.Add (ns.core.Vector3D (distance, 0.0, 0.0)) positionAlloc.Add (ns.core.Vector3D (distance, 0.0, 0.0)) mobility.SetPositionAllocator (positionAlloc) mobility.SetMobilityModel ("ns3::ConstantPositionMobilityModel") mobility.Install (wifiApNode) mobility.Install (wifiStaNode) # Internet stack stack = ns.internet.InternetStackHelper () stack.Install (wifiApNode) stack.Install (wifiStaNode) address = ns.internet.Ipv4AddressHelper () address.SetBase (ns.network.Ipv4Address ("192.168.1.0"), ns.network.Ipv4Mask ("255.255.255.0")) staNodeInterface = address.Assign (staDevice) apNodeInterface = address.Assign (apDevice) # Setting applications serverApp = ns.network.ApplicationContainer () sinkApp = ns.network.ApplicationContainer () if udp == False: # TCP flow port = 50000 apLocalAddress = ns.network.Address (ns.network.InetSocketAddress (ns.network.Ipv4Address.GetAny (), port)) packetSinkHelper = ns.applications.PacketSinkHelper ("ns3::TcpSocketFactory", apLocalAddress) sinkApp = packetSinkHelper.Install (wifiApNode.Get (0)) sinkApp.Start (ns.core.Seconds (0.0)) sinkApp.Stop (ns.core.Seconds (simulationTime + 1)) onoff = ns.applications.OnOffHelper ("ns3::TcpSocketFactory", ns.network.Ipv4Address.GetAny ()) onoff.SetAttribute ("OnTime", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=1]")) onoff.SetAttribute ("OffTime", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=0]")) onoff.SetAttribute ("PacketSize", ns.core.UintegerValue (payloadSize)) onoff.SetAttribute ("DataRate", ns.network.DataRateValue (ns.network.DataRate (1000000000))) # bit/s apps = ns.network.ApplicationContainer () remoteAddress = ns.network.AddressValue (ns.network.InetSocketAddress (staNodeInterface.GetAddress (0), port)) onoff.SetAttribute ("Remote", remoteAddress) apps.Add (onoff.Install (wifiStaNode)) apps.Start (ns.core.Seconds (1.0)) apps.Stop (ns.core.Seconds (simulationTime + 1)) elif udp == True: # UDP flow myServer=ns.applications.UdpServerHelper (9) serverApp = myServer.Install (ns.network.NodeContainer (wifiApNode)) serverApp.Start (ns.core.Seconds (0.0)) serverApp.Stop (ns.core.Seconds (simulationTime + 1)) temp = float((expected_val[count_mcs][count_bandwidth] *1000000)/(payloadSize*8)) inter =float(1/temp) inter = format(inter,'f') myClient = ns.applications.UdpClientHelper (apNodeInterface.GetAddress (0), 9) myClient.SetAttribute ("MaxPackets", ns.core.UintegerValue (4294967295)) myClient.SetAttribute ("Interval", ns.core.TimeValue (ns.core.Time (inter))) # packets/s myClient.SetAttribute ("PacketSize", ns.core.UintegerValue (payloadSize)) clientApp = myClient.Install (ns.network.NodeContainer (wifiStaNode)) clientApp.Start (ns.core.Seconds (1.0)) clientApp.Stop (ns.core.Seconds (simulationTime + 1)) ns.internet.Ipv4GlobalRoutingHelper.PopulateRoutingTables () flowmonitor = ns.flow_monitor.FlowMonitorHelper () monitor = flowmonitor.InstallAll () monitor.SetAttribute ("StartTime", ns.core.TimeValue (ns.core.Seconds (5))) monitor.SetAttribute ("DelayBinWidth", ns.core.DoubleValue (0.001)) monitor.SetAttribute ("JitterBinWidth", ns.core.DoubleValue (0.001)) monitor.SetAttribute ("PacketSizeBinWidth", ns.core.DoubleValue (20)) ns.core.Simulator.Stop (ns.core.Seconds (simulationTime+1)) ns.core.Simulator.Run () ns.core.Simulator.Destroy () monitor.CheckForLostPackets () classifier = ns.flow_monitor.Ipv4FlowClassifier () classifier = flowmonitor.GetClassifier () stats = monitor.GetFlowStats () for flow_id, flow_stats in stats: t = classifier.FindFlow(flow_id) p_tran = flow_stats.txPackets p_rec = flow_stats.rxPackets p_diff = p_tran - p_rec delay_sum = flow_stats.delaySum delay = delay_sum / p_rec lost_packets = flow_stats.lostPackets throughput = 0 if udp == False: # TCP totalPacketsThrough = sinkApp.Get (0).GetTotalRx () throughput = totalPacketsThrough * 8 / (simulationTime * 1000000.0) # Mbit/s elif udp == True: # UDP totalPacketsThrough = serverApp.Get (0).GetReceived () throughput = totalPacketsThrough * payloadSize * 8 / (simulationTime * 1000000.0) # Mbit/s print a,"\t",b,"MHz\t",c,"\t",throughput,"Mbit/s\t",delay,"\t\t",lost_packets,"\t\t ",p_tran return 0
def plot_tour(fig, ax, figtitle, source, target, drone_info, used_drones, package_trail, xlims=[0,1], ylims=[0,1], aspect_ratio=1.0, speedfontsize=4, speedmarkersize=10, sourcetargetmarkerfontsize=4, sourcetargetmarkersize=10 ): import matplotlib.ticker as ticker ax.set_aspect(aspect_ratio) ax.set_xlim(xlims) ax.set_ylim(ylims) plt.rc('font', family='serif') # Draw the package trail xs, ys = extract_coordinates(package_trail) ax.plot(xs,ys, 'ro', markersize=5 ) for idx in range(len(xs)-1): plt.arrow( xs[idx], ys[idx], xs[idx+1]-xs[idx], ys[idx+1]-ys[idx], **{'length_includes_head': True, 'width': 0.007 , 'head_width':0.01, 'fc': 'r', 'ec': 'none', 'alpha': 0.8}) # Draw the source, target, and initial positions of the robots as bold dots xs,ys = extract_coordinates([source, target]) ax.plot(xs,ys, 'o', markersize=sourcetargetmarkersize, alpha=0.8, ms=10, mec='k', mfc='#F1AB30' ) #ax.plot(xs,ys, 'k--', alpha=0.6 ) # light line connecting source and target ax.text(source[0], source[1], 'S', fontsize=sourcetargetmarkerfontsize,\ horizontalalignment='center',verticalalignment='center') ax.text(target[0], target[1], 'T', fontsize=sourcetargetmarkerfontsize,\ horizontalalignment='center',verticalalignment='center') xs, ys = extract_coordinates( [ drone_info[idx][0] for idx in range(len(drone_info)) ] ) ax.plot(xs,ys, 'o', markersize=speedmarkersize, alpha = 0.5, mec='None', mfc='#b7e8cc' ) # Draw speed labels for idx in range(len(drone_info)): ax.text( drone_info[idx][0][0], drone_info[idx][0][1], format(drone_info[idx][1],'.3f'), fontsize=speedfontsize, horizontalalignment='center', verticalalignment='center' ) # Draw drone path from initial position to interception point for pt, idx in zip(package_trail, used_drones): initdroneposn = drone_info[idx][0] handoffpoint = pt xs, ys = extract_coordinates([initdroneposn, handoffpoint]) plt.arrow( xs[0], ys[0], xs[1]-xs[0], ys[1]-ys[0], **{'length_includes_head': True, 'width': 0.005 , 'head_width':0.02, 'fc': 'b', 'ec': 'none'}) fig.suptitle(figtitle, fontsize=15) ax.set_title('\nMakespan: ' + format(makespan(drone_info, used_drones, package_trail),'.5f'), fontsize=8) startx, endx = ax.get_xlim() starty, endy = ax.get_ylim() ax.tick_params(which='both', # Options for both major and minor ticks top='off', # turn off top ticks left='off', # turn off left ticks right='off', # turn off right ticks bottom='off') # turn off bottom ticks # Customize the major grid ax.grid(which='major', linestyle='-', linewidth='0.1', color='red') ax.grid(which='minor', linestyle=':', linewidth='0.1', color='black')
def run(self): # throw first frame self.readData() while True: maxHet = 0 minHet = 500 tempData = [] nanCount = 0 # hetData = self.imageLarger(self.readData(),32,24) hetData = self.readData() if len(hetData) < DataReader.pixel_num: continue for i in range(0, DataReader.pixel_num): curCol = i % 32 newValueForNanPoint = 0 curData = None if i < len(hetData) and isDigital(hetData[i]): curData = float(format(hetData[i], '.2f')) else: interpolationPointCount = 0 sumValue = 0 print("curCol", curCol, "i", i) abovePointIndex = i - 32 if (abovePointIndex > 0): if hetData[abovePointIndex] is not "nan": interpolationPointCount += 1 sumValue += float(hetData[abovePointIndex]) belowPointIndex = i + 32 if (belowPointIndex < DataReader.pixel_num): print(" ") if hetData[belowPointIndex] is not "nan": interpolationPointCount += 1 sumValue += float(hetData[belowPointIndex]) leftPointIndex = i - 1 if (curCol != 31): if hetData[leftPointIndex] is not "nan": interpolationPointCount += 1 sumValue += float(hetData[leftPointIndex]) rightPointIndex = i + 1 if (belowPointIndex < DataReader.pixel_num): if (curCol != 0): if hetData[rightPointIndex] is not "nan": interpolationPointCount += 1 sumValue += float(hetData[rightPointIndex]) curData = sumValue / interpolationPointCount # For debug : # print(abovePointIndex,belowPointIndex,leftPointIndex,rightPointIndex) # print("newValueForNanPoint",newValueForNanPoint," interpolationPointCount" , interpolationPointCount ,"sumValue",sumValue) nanCount += 1 tempData.append(curData) maxHet = tempData[i] if tempData[i] > maxHet else maxHet minHet = tempData[i] if tempData[i] < minHet else minHet if maxHet == 0 or minHet == 500: continue # For debug : # if nanCount > 0 : # print("____@@@@@@@ nanCount " ,nanCount , " @@@@@@@____") lock.acquire() hetaData.append({ "frame": tempData, "maxHet": maxHet, "minHet": minHet }) lock.release() self.drawRequire.emit() self.frameCount = self.frameCount + 1 print("data->" + str(self.frameCount)) self.com.close()
def trakt_list(self, url, user): try: dupes = [] q = dict(urlparse.parse_qsl(urlparse.urlsplit(url).query)) q.update({'extended': 'full'}) q = (urllib.urlencode(q)).replace('%2C', ',') u = url.replace('?' + urlparse.urlparse(url).query, '') + '?' + q result = trakt.getTraktAsJson(u) items = [] for i in result: try: items.append(i['show']) except: pass if len(items) == 0: items = result except: return try: q = dict(urlparse.parse_qsl(urlparse.urlsplit(url).query)) if not int(q['limit']) == len(items): raise Exception() q.update({'page': str(int(q['page']) + 1)}) q = (urllib.urlencode(q)).replace('%2C', ',') next = url.replace('?' + urlparse.urlparse(url).query, '') + '?' + q next = next.encode('utf-8') except: next = '' for item in items: try: title = item['title'] title = re.sub('\s(|[(])(UK|US|AU|\d{4})(|[)])$', '', title) title = client.replaceHTMLCodes(title) year = item['year'] year = re.sub('[^0-9]', '', str(year)) if int(year) > int((self.datetime).strftime('%Y')): raise Exception() imdb = item['ids']['imdb'] if imdb == None or imdb == '': imdb = '0' else: imdb = 'tt' + re.sub('[^0-9]', '', str(imdb)) tvdb = item['ids']['tvdb'] tvdb = re.sub('[^0-9]', '', str(tvdb)) if tvdb == None or tvdb == '' or tvdb in dupes: raise Exception() dupes.append(tvdb) try: premiered = item['first_aired'] except: premiered = '0' try: premiered = re.compile('(\d{4}-\d{2}-\d{2})').findall(premiered)[0] except: premiered = '0' try: studio = item['network'] except: studio = '0' if studio == None: studio = '0' try: genre = item['genres'] except: genre = '0' genre = [i.title() for i in genre] if genre == []: genre = '0' genre = ' / '.join(genre) try: duration = str(item['runtime']) except: duration = '0' if duration == None: duration = '0' try: rating = str(item['rating']) except: rating = '0' if rating == None or rating == '0.0': rating = '0' try: votes = str(item['votes']) except: votes = '0' try: votes = str(format(int(votes),',d')) except: pass if votes == None: votes = '0' try: mpaa = item['certification'] except: mpaa = '0' if mpaa == None: mpaa = '0' try: plot = item['overview'] except: plot = '0' if plot == None: plot = '0' plot = client.replaceHTMLCodes(plot) self.list.append({'title': title, 'originaltitle': title, 'year': year, 'premiered': premiered, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'plot': plot, 'imdb': imdb, 'tvdb': tvdb, 'poster': '0', 'next': next}) except: pass return self.list
import speech_recognition as sr r = sr.Recognizer() with sr.Microphone() as source: print('diga algo') audio = r.listen(source) try: print('Voce disse:' + r.recognize_google(audio, language='pt-BR')) except sr.UnknownValueError: print('Bia nao pode entender o audio') except sr.RequestError as e: print('Erro ao chama Google Speech Recognition service;{0}',format(e))