def shtml(txt,file='',browser=True): import T,pprint,F if file=='' and type(txt) is not str: try:file=T.filename(T.max(txt.__str__(),txt.__repr__(),txt.__name__)[:19]) except:pass # if type(txt) in (dict,list): txt=pprint.pformat(txt) # if len(txt)==0:return # s=[] # for i in txt.keys(): # s.append(T.string(i)+' : '+T.string(txt[i])+'\n') # txt=T.listToStr(s) # if len(file)<1:file=T.filename(getObjName(txt)+stime())[:19] if not file.lower().endswith('.txt'):file+='.txt' F.write(file,txt) # f=open(file+'.txt','a') # rm(f.name) # txt=txt.replace(txthtml[1],txthtml[1][:1]+'!!!qgb-padding!!!'+txthtml[1][1:]) # f.write(txthtml[0]) # f.write(txt) # f.write(txthtml[1]) # f.close() if(browser==True):globals()['browser'](path.abspath(file))
def checksum(session, fid, echo=False): """Gets a zoning transaction checksum :param session: Session object returned from brcdapi.pyfos_auth.login() :type session: dict :param fid: Logical FID number for the fabric of interest :type fid: int :param echo: If True, echoes any error messages to STD_OUT :type echo: bool :return: checksum :rtype: int, None :return: brcdapi_rest status object :rtype: dict """ # Get the checksum - this is needed to save the configuration. obj = brcdapi_rest.get_request(session, 'brocade-zone/effective-configuration', fid) if _is_error( obj, 'Failed to get zone data from "brocade-zone/effective-configuration"', echo): return None, obj try: return obj.get('effective-configuration').get('checksum'), obj except: brcdapi_log.log('Failed to get checksum', echo) brcdapi_log.exception(pprint.pformat(obj, indent=4), echo) return None, pyfos_auth.create_error( brcdapi_util.HTTP_INT_SERVER_ERROR, brcdapi_util.HTTP_REASON_UNEXPECTED_RESP, 'Missing effective-configuration/checksum')
def _CHANNELS(self): self.debug('loading channels') response = self.user_client.api_call("channels.list", count=1000) if 'channels' not in response: self.logger.warning(pprint.pformat(response)) raise ValueError('error getting channel list') return response['channels']
def done(context): """ Called once on script shutdown, after any other events. """ from pprint import pprint import json json_dump = context.HARLog.json() compressed_json_dump = context.HARLog.compress() if context.dump_file == '-': context.log(pprint.pformat(json.loads(json_dump))) elif context.dump_file.endswith('.zhar'): file(context.dump_file, "w").write(compressed_json_dump) else: file(context.dump_file, "w").write(json_dump) context.log( "HAR log finished with %s bytes (%s bytes compressed)" % ( len(json_dump), len(compressed_json_dump) ) ) context.log( "Compression rate is %s%%" % str( 100. * len(compressed_json_dump) / len(json_dump) ) )
def update(event, board, opts, vars): try: xml = str(event.data) data = xmltodict.parse(xml) with open("xmltojson.dump", "w") as fp: pprint(data, stream=fp) if pprint.pformat(data): json = dumps(data, indent=2, sort_keys=False) json = json.replace('@', '') json = json.replace('\n', '\r\n') if opts['debug']: print(json) board.update(event.time, event.dur, opts['address'], json) except Exception as e: print('could not convert xml to json: ', e)
def wrapper(*arg, **kw): arg_arr, kw_arr = fleader._getarg(func, arg, kw, ext) #ext过滤 fhash = fleader.md5(func.__name__, arg_arr, kw_arr) if fc: fname = func.__name__ else: fname = '' state, res = fleader.rwcache(fhash, suffix=suffix, dur=dur, path=path, fname=fname, during=during) if not state: res = func(*arg, **kw) flag, res = fleader._resfin(res, fin=fin) if flag: if booljson: import json res = json.dumps(res) if boolpprint: import pprint res = pprint.pformat(res) fleader.rwcache(fhash, res, suffix=suffix, dur=dur, path=path, fname=fname, aw=aw) return res
def get_error_str(self): res = "" j = self.m_json sides = ['client', 'server'] for s in sides: if 'err' in j['data'][s]: res += pprint.pformat(j['data'][s]['err']) return (res)
def send(self, chan_id=None, msg="default message", attachments=[]): """ """ result = self.bot_client.api_call("chat.postMessage", channel=self.id, attachments=attachments, text=msg) self.logger.debug(pprint.pformat(result)) return result
def cbFun(transportDispatcher, transportDomain, transportAddress, wholeMsg): if GlobalVars.END_TRAP_LISTENER==1: transportDispatcher.closeDispatcher() transportDispatcher.jobFinished(1) #print('cbFun is called') while wholeMsg: #print('loop...') msgVer = int(api.decodeMessageVersion(wholeMsg)) if msgVer in api.protoModules: pMod = api.protoModules[msgVer] else: #print('Unsupported SNMP version %s' % msgVer) return reqMsg, wholeMsg = decoder.decode( wholeMsg, asn1Spec=pMod.Message(), ) #print('Notification message from %s:%s: ' % ( # transportDomain, transportAddress # ) #) reqPDU = pMod.apiMessage.getPDU(reqMsg) if reqPDU.isSameTypeWith(pMod.TrapPDU()): if msgVer == api.protoVersion1: #print('Enterprise: %s' % ( # pMod.apiTrapPDU.getEnterprise(reqPDU).prettyPrint() # ) #) #print('Agent Address: %s' % ( # pMod.apiTrapPDU.getAgentAddr(reqPDU).prettyPrint() # ) #) #print('Generic Trap: %s' % ( # pMod.apiTrapPDU.getGenericTrap(reqPDU).prettyPrint() # ) #) #print('Specific Trap: %s' % ( # pMod.apiTrapPDU.getSpecificTrap(reqPDU).prettyPrint() # ) #) #print('Uptime: %s' % ( # pMod.apiTrapPDU.getTimeStamp(reqPDU).prettyPrint() # ) #) varBinds = pMod.apiTrapPDU.getVarBinds(reqPDU) else: varBinds = pMod.apiPDU.getVarBinds(reqPDU) #print('Var-binds:') for oid, val in varBinds: #print('%s = %s' % (oid.prettyPrint(), val.prettyPrint())) #print(GlobalVars.TRAP_OID) tmpOid=pprint.pformat(oid.prettyPrint()) tmpOid='.'+tmpOid.replace("'","") if tmpOid in GlobalVars.TRAP_OID: print("Online Monitoring OutPut:") print('%s=%s' % (oid.prettyPrint(), val.prettyPrint())) return wholeMsg
def __init__(self, bsp): self._bsp = bsp # Calculate the location/direction. view_ent = self._get_entity('classname', self.VIEW_CLASS) target_ent = self._get_entity('targetname', view_ent["target"]) self.location = view_ent["origin"] self.look_at = target_ent["origin"] # Print a comment making it clear which entities were used for the # camera. self.comment = "view_ent:\n" self.comment += pprint.pformat(view_ent, 4) self.comment += "\n\n" self.comment += "target_ent:\n" self.comment += pprint.pformat(target_ent, 4) self.comment += "\n"
def get_link_history(self): response = self.user_client.api_call("channels.history", channel=self.id, count=1000) if 'messages' not in response: self.logger.warning(pprint.pformat(response)) raise ValueError('response does not contain messages') out = [] for msg in response['messages']: out += msg.get('attachments', []) return out
def mergeKeys(self, aKeys, bKeys): if set(aKeys) != set(bKeys): raise ValueError, 'Keys differ: %s, %s' % (pprint.pformat( a.keys), pprint.pformat(b.keys)) keys = {} for key, aValue in aKeys.items(): bValue = bKeys[key] if key == 'Source': if aValue != bValue: raise ValueError, 'Key ("%s") differs: %s %s' % ( key, aValue, bValue) value = aValue elif key in ('Runs', 'Programs'): value = str(int(aValue) + int(bValue)) elif key in ('Data', 'Graph'): value = aValue + ',' + bValue else: raise ValueError, 'Unrecognized key: "%s"' % (key, ) keys[key] = value return keys
def mergeKeys(self, aKeys, bKeys): if set(aKeys) != set(bKeys): raise ValueError,'Keys differ: %s, %s'%(pprint.pformat(a.keys), pprint.pformat(b.keys)) keys = {} for key,aValue in aKeys.items(): bValue = bKeys[key] if key=='Source': if aValue != bValue: raise ValueError,'Key ("%s") differs: %s %s'%(key, aValue, bValue) value = aValue elif key in ('Runs','Programs'): value = str(int(aValue) + int(bValue)) elif key in ('Data','Graph'): value = aValue+','+bValue else: raise ValueError,'Unrecognized key: "%s"'%(key,) keys[key] = value return keys
def __str__(self): attributes = { "patch_shape": str(self._patch_shape), "n_labels": str(self._n_labels), "activation": str(self._activation), "n_convs": str(self._n_convs), "padding": str(self._padding), "n_filters": str(self._n_filters), "depth": str(self._depth), "batch_norm": str(self._batch_norm), "l2": str(self._l2), "filter_size": str(self._filter_size), } return pprint.pformat(attributes, sort_dicts=False)
def buildOne(Mod): builds(Mod) if not os.path.exists('arcs'): os.mkdir('arcs') out = open('arcs/%s.py'%Mod.Module,'w') out.write(STRING.replace('NAME',Mod.Module)) out.write('hld.ARCSBK = %s\n'%pprint.pformat(ARCSBK)) out.write('hld.ARCSFW = %s\n'%pprint.pformat(ARCSFW)) out.write('hld.CONNS = %s\n'%pprint.pformat(CONNS)) out.write('hld.TERMS = %s\n'%pprint.pformat(TERMS)) out.write('hld.INPUTS = %s\n'%pprint.pformat(INPUTS)) out.write('hld.OUTPUTS = %s\n'%pprint.pformat(OUTPUTS)) out.write('hld.SONS = %s\n'%pprint.pformat(SONS)) out.close()
def run(self): # Close connection on exit (to test cleanup paths) self.tid = get_tid() logs.status.info('starting thread: {} (TID {})'.format( self.name, self.tid)) old_exitfunc = getattr(sys, 'exitfunc', None) def exit(): logs.status.info('Closing hypervisors connexions') for hyp_id, hostname in self.hyps.items(): self.hyps_conn[hyp_id].close() if (old_exitfunc): old_exitfunc() sys.exitfunc = exit self.thread_event_loop = virEventLoopNativeStart(self.stop_event_loop) # self.r_status = RethinkHypEvent() while self.stop is not True: try: action = self.q_event_register.get( timeout=TIMEOUT_QUEUE_REGISTER_EVENTS) if action['type'] in ['add_hyp_to_receive_events']: hyp_id = action['hyp_id'] self.add_hyp_to_receive_events(hyp_id) elif action['type'] in ['del_hyp_to_receive_events']: hyp_id = action['hyp_id'] self.del_hyp_to_receive_events(hyp_id) elif action['type'] == 'stop_thread': self.stop = True else: logs.status.error('type action {} not supported'.format( action['type'])) except queue.Empty: pass except Exception as e: log.error( 'Exception in ThreadHypEvents main loop: {}'.format(e)) log.error('Action: {}'.format(pprint.pformat(action))) log.error('Traceback: {}'.format(traceback.format_exc())) return False self.stop_event_loop[0] = True while self.thread_event_loop.is_alive(): pass
def _received_message(self, msg): response = json.loads(str(msg)) s = pprint.pformat(response) event = response.get('t') data = response.get('d') if event == 'READY': with open("test.txt", "w+") as f: f.write(s) data = response.get('d') interval = data.get('heartbeat_interval') / 1000.0 self.keep_alive = _keep_alive_handler(interval, self.ws) else: print(s)
def label_detection(file): logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') rekognition_client = boto3.client('rekognition') street_scene_file_name = file street_scene_image = RekognitionImage.from_file(street_scene_file_name, rekognition_client) #print(f"Detecting labels in {street_scene_image.image_name}...") labels = street_scene_image.detect_labels(100) #print(f"Found {len(labels)} labels.") result = [] for label in labels: result.append(pprint.pformat(label.to_dict())) #print(labels_str) return result
def done(context): """ Called once on script shutdown, after any other events. """ from pprint import pprint import json json_dump = context.HARLog.json() compressed_json_dump = context.HARLog.compress() if context.dump_file == '-': context.log(pprint.pformat(json.loads(json_dump))) elif context.dump_file.endswith('.zhar'): file(context.dump_file, "w").write(compressed_json_dump) else: file(context.dump_file, "w").write(json_dump) context.log("HAR log finished with %s bytes (%s bytes compressed)" % (len(json_dump), len(compressed_json_dump))) context.log("Compression rate is %s%%" % str(100. * len(compressed_json_dump) / len(json_dump)))
def run(self): # Close connection on exit (to test cleanup paths) self.tid = get_tid() logs.status.info('starting thread: {} (TID {})'.format(self.name, self.tid)) old_exitfunc = getattr(sys, 'exitfunc', None) def exit(): logs.status.info('Closing hypervisors connexions') for hyp_id, hostname in self.hyps.items(): self.hyps_conn[hyp_id].close() if (old_exitfunc): old_exitfunc() sys.exitfunc = exit self.thread_event_loop = virEventLoopNativeStart(self.stop_event_loop) # self.r_status = RethinkHypEvent() while self.stop is not True: try: action = self.q_event_register.get(timeout=TIMEOUT_QUEUE_REGISTER_EVENTS) if action['type'] in ['add_hyp_to_receive_events']: hyp_id = action['hyp_id'] self.add_hyp_to_receive_events(hyp_id) elif action['type'] in ['del_hyp_to_receive_events']: hyp_id = action['hyp_id'] self.del_hyp_to_receive_events(hyp_id) elif action['type'] == 'stop_thread': self.stop = True else: logs.status.error('type action {} not supported'.format(action['type'])) except queue.Empty: pass except Exception as e: log.error('Exception in ThreadHypEvents main loop: {}'.format(e)) log.error('Action: {}'.format(pprint.pformat(action))) log.error('Traceback: {}'.format(traceback.format_exc())) return False self.stop_event_loop[0] = True while self.thread_event_loop.is_alive(): pass
def printAttr(a,console=False): '''aoto call __methods which is no args''' d=py.dir(a) if console: sk='%-{0}s'.format(maxLen(d)) si='%-{0}s'.format(len(py.len( d ))) for i,k in py.enumerate(d): print si%i,sk%k,py.eval('a.{0}'.format(k)) return sh='''<tr> <td>{0}</td> <td id="n">{1}</td> <td><textarea>{2}</textarea></td> <td>{3}</td> </tr>''' sp=getModPath()+'file/attr.html' r='';v='';vi=-1 for i,k in py.enumerate(d): try: v=py.eval('a.{0}'.format(k)) vi=len(v) if py.callable(v) and k.startswith('__'): vv='!ErrGetV()' try:vv=v() except:pass v='{0} == {1}'.format(v,vv) if type(v) is not str: import pprint v= pprint.pformat(v) except Exception as e:v=py.repr(e) r+=sh.format(i,k,v,vi) # cdt('QPSU') import T name=gst+'QPSU/'+T.filename(getObjName(a))+'.html' write(name,read(sp).replace('{result}',r)) browser(name)
def assert_orcid_push_task_successful(orcid, recid, timeout): searcher = FlowerOrcidTasksSearcher(orcid, recid) # Search for a task with that orcid, recid and not older than 1 min. # Do it for 1 minute. start_time = time.time() while time.time() < start_time + 60: if searcher.search(max_age=60): # The task is valid if its ts is not older than 1 minute. break print('Recent celery orcid_push task not found for orcid={}, recid={}'.format(orcid, recid)) time.sleep(2) if not searcher.result: return False # Wait for the task to be successful. start_time = time.time() while time.time() < start_time + timeout: # state: FAILURE. if searcher.is_updated_result_state_unsuccessful(): print('Celery task id={} failed:\n{}'.format( searcher.result['uuid'], pprint.pformat(searcher.result) )) return False # state: SUCCESS. if searcher.is_updated_result_state_successful(): print('Celery task id={} successful'.format( searcher.result['uuid'], searcher.result['state'] )) return True # state: other. print('Celery task id={} not successful yet (state={})'.format( searcher.result['uuid'], searcher.result['state'] )) time.sleep(2) return False
login=conf['alertSmtpUser'], password=conf['alertSmtpPwd'], smtpserver=conf['alertSmtp'] + ':' + str(conf['alertSmtpPort'])) time.sleep(monitor_wait_after_email) ts_file=open('/tmp/reboot.ts','w') ts_file.write(str(now)); ts_file.close; sys.exit(1) # reboot client = CgminerClient(cgminer_host, cgminer_port) result = client.command('asccount', None) if result: dev = result['ASCS'][0]['Count'] if conf['miningExpDev'] > dev: output = 'ERROR - device count: ' + str(dev) + '\n\n' + pprint.pformat(result) print output if conf['alertEnable']: SendEmail( from_addr='*****@*****.**', to_addr_list=[conf['alertEmailTo']], cc_addr_list=[], subject='Scripta Reboot [' + conf['alertDevice'] + ']', message=output, login=conf['alertSmtpUser'], password=conf['alertSmtpPwd'], smtpserver=conf['alertSmtp'] + ':' + str(conf['alertSmtpPort'])) time.sleep(monitor_wait_after_email) ts_file=open('/tmp/reboot.ts','w') ts_file.write(str(now)); ts_file.close;
def save_glpi_file(file_path, content): file(file_path, 'w').write('%s\n' % pprint.pformat(content))
smtpserver=conf['alertSmtp'] + ':' + str(conf['alertSmtpPort'])) time.sleep(monitor_wait_after_email) ts_file = open('/tmp/reboot.ts', 'w') ts_file.write(str(now)) ts_file.close sys.exit(1) # reboot client = CgminerClient(cgminer_host, cgminer_port) result = client.command('asccount', None) if result: dev = result['ASCS'][0]['Count'] if conf['miningExpDev'] > dev: output = 'ERROR - device count: ' + str( dev) + '\n\n' + pprint.pformat(result) print output if conf['alertEnable']: SendEmail(from_addr='*****@*****.**', to_addr_list=[conf['alertEmailTo']], cc_addr_list=[], subject='Scripta Reboot [' + conf['alertDevice'] + ']', message=output, login=conf['alertSmtpUser'], password=conf['alertSmtpPwd'], smtpserver=conf['alertSmtp'] + ':' + str(conf['alertSmtpPort'])) time.sleep(monitor_wait_after_email) ts_file = open('/tmp/reboot.ts', 'w') ts_file.write(str(now))
newBook['binding'] = None else: newBook['binding'] = desc.find('span',attrs={'class':'book-meta book-binding'}).text.strip('Binding ') if (desc.find('span',attrs={'class':'book-title'}) == None): newBook['title'] = None else: newBook['title'] = desc.find('span',attrs={'class':'book-title'}).text pricestuff = booktable.find('td',attrs={'class':'book-pref'}) if(pricestuff == None or pricestuff.find('span', attrs={'class':'book-price-list'}) == None): newBook['broncoListPrice'] = None else: newBook['broncoListPrice'] = float(pricestuff.find('span', attrs={'class':'book-price-list'}).text.strip('$')) newSection['books'].append(newBook) newCourse['sections'].append(newSection) department['courses'].append(newCourse) newQuarter['departments'] = departmentList quarterList.append(newQuarter) import time import difflib import pprint data = pickle.load(open('courseData.txt')) result = '\n'.join(difflib.unified_diff(pprint.pformat(data).splitlines(), pprint.pformat(quarterList).splitlines())) if result: pickle.dump(quarterList, open('courseData.txt','w'))
def __str__(self): pprint.pformat(self.data)
def get_whois(url): details = pythonwhois.get_whois(url) return (pprint.pformat(details))
def __str__(self): r = self.statusmessage + '\n' for hdr in self.headers: r += hdr[0] + ': ' + hdr[1] + '\n' r += pprint.pformat(self.body) return r
pricestuff = booktable.find('td', attrs={'class': 'book-pref'}) if (pricestuff == None or pricestuff.find( 'span', attrs={'class': 'book-price-list'}) == None): newBook['broncoListPrice'] = None else: newBook['broncoListPrice'] = float( pricestuff.find('span', attrs={ 'class': 'book-price-list' }).text.strip('$')) newSection['books'].append(newBook) newCourse['sections'].append(newSection) department['courses'].append(newCourse) newQuarter['departments'] = departmentList quarterList.append(newQuarter) import time import difflib import pprint data = pickle.load(open('courseData.txt')) result = '\n'.join( difflib.unified_diff( pprint.pformat(data).splitlines(), pprint.pformat(quarterList).splitlines())) if result: pickle.dump(quarterList, open('courseData.txt', 'w'))
def get_current_values( self ): self.logger.debug( 'get_current_values called' ) cv = self.__current_values.get() self.logger.debug( 'current_values = ', pprint.pformat( cv ) ) return cv
##フォルダ作成 folder_name = ma.make_folder(youtube_url, path='') ##出力 print("フォルダ作成:", folder_name) ##動画ダウンロード youtube = YouTube(youtube_url).streams.first().download(folder_name) ###コメント取得 comment_data = [] comment_data = get_comment.get_chat_replay_data(video_id) ##テキストに保存 comment_data_path = folder_name + '/' + 'コメント.txt' with open(comment_data_path, mode='w', encoding="utf-8") as f: f.writelines(pprint.pformat(comment_data)) ###特定の単語の頻度を抽出 word = "w|W|草|笑|すごい|!" ##特定の単語 if word == '': time = ma.get_time(comment_data) else: time = ma.get_word_time(comment_data, word) ###秒数の単位を指定 count = 10 #10秒ごと time = np.array(time) time_10 = time / count for x in range(len(time)): time_10[x] = int(time_10[x]) * count time_10co = collections.Counter(time_10)
def stuff(SLACK_MUSIC_CHANNEL_ID=None, SLACK_TOKEN=None, SPOTIFY_CLIENT_ID=None, SPOTIFY_CLIENT_SECRET=None, SPOTIFY_USERNAME=None, SPOTIFY_PLAYLIST_ID=None): links = set() youtube = 0 spotify = 0 log("Grabbing conversation history from Slack") wc = slack.WebClient(token=SLACK_TOKEN) response = wc.conversations_history(channel=SLACK_MUSIC_CHANNEL_ID, oldest=0) messages = [] for data in response: sorted_messages = reversed(data.get('messages', [])) for message in sorted_messages: messages.append(message) for msg in messages: urls = re.findall(r'<https://(.*)>', msg.get('text')) for url in urls: restored_url = "https://{}".format( url.split('|')[0]).split(' ')[0].strip() if 'spotify.com' in restored_url and '/track/' in restored_url: spotify += 1 elif 'youtube.com' in url or 'youtu.be' in restored_url: youtube += 1 else: continue links.add(restored_url) log("Authenticating with Spotify") token = spotipy.util.prompt_for_user_token( SPOTIFY_USERNAME, client_id=SPOTIFY_CLIENT_ID, client_secret=SPOTIFY_CLIENT_SECRET, redirect_uri='http://localhost/', scope='playlist-modify-private') sp = spotipy.Spotify(auth=token) log("Authenticated") # playlist = sp.playlist(SPOTIFY_PLAYLIST_ID) tracks = [] for link in links: parsed = urlparse(link) if parsed.netloc == 'open.spotify.com': track_id = parsed.path.split('/')[-1:][0] tracks.append(track_id) offset = 0 limit = 100 pl_track_ids = [] while True: pl_tracks = sp.playlist_tracks(SPOTIFY_PLAYLIST_ID, limit=limit, offset=offset) for pl_track in pl_tracks['items']: pl_track_ids.append(pl_track['track']['id']) offset += limit if len(pl_tracks['items']) < limit: break log("Playlist currently has %d tracks" % len(pl_track_ids)) log("Found %d tracks in Slack history" % len(tracks)) log("Updating Spotify playlist with new tracks") for track in tracks: if track in pl_track_ids: continue try: resp = sp.user_playlist_add_tracks(user=SPOTIFY_USERNAME, playlist_id=SPOTIFY_PLAYLIST_ID, tracks=[track]) time.sleep(1) except Exception as ex: error("Hit an exception on: https://open.spotify.com/track/{}". format(track)) error(pprint.pformat(ex))
if (not first): continue print first for j in l[2:-1]: freq_match = freq_pattern.search(j) if (freq_match): letter, freq = freq_match.groups([1, 2]) letter = normalize(letter) if (letter): key = first + letter if (len(key) == NGRAM_LENGTH): if (final_dict.has_key(key)): final_dict[key] += int(freq) else: final_dict[key] = int(freq) else: pass # print "ERROR:", j # pprint(final_dict) for i in final_dict: final_dict[i] /= float(564532247) final_arr = zip(final_dict.keys(), final_dict.values()) # pprint(sorted(final_arr, key = lambda x: -x[1])[:10000]) f2 = open("asd", "w") f2.write(pprint.pformat(sorted(final_arr, key = lambda x: -x[1])[:10000])) f2.close() input()
def __repr__(self): return pprint.pformat(self.excelobj.tables)
def __repr__(self): return pprint.pformat(self.data)