def callback_audio(toxav, friend_number, samples, audio_samples_per_channel, audio_channels_count, rate, user_data): """New audio chunk""" print audio_samples_per_channel, audio_channels_count, rate Profile.get_instance().call.chunk( ''.join(chr(x) for x in samples[:audio_samples_per_channel * 2 * audio_channels_count]), audio_channels_count, rate)
def call_state(toxav, friend_number, mask, user_data): """New call state""" print friend_number, mask if mask == TOXAV_FRIEND_CALL_STATE['FINISHED'] or mask == TOXAV_FRIEND_CALL_STATE['ERROR']: invoke_in_main_thread(Profile.get_instance().stop_call, friend_number, True) else: Profile.get_instance().call.toxav_call_state_cb(friend_number, mask)
def main(): # Test file path #path = "input/MC_SigBkgElectrons_2M.h5" path = "output/data_test_00000000.h5" dataset_name = 'egamma' # Read entire HDF5 file """ with Profile("Read entire file"): with h5py.File(path, 'r') as hf: data_h5 = hf[dataset_name][:] pass pass #""" # Read batched HDF5 file with Profile("Read batched file"): directory = 'input/' paths = sorted(glob.glob('output/data_test_0000000*.h5.gz')) batch_size = 1024 maxsize = 8 with Generator(paths, dataset_name, batch_size, maxsize) as generator: with Profile("Looping"): for step, batch in enumerate(generator): # Do someting... pass pass pass pass return 0
def createGraph(name, canvas): canvas.delete('all') #time.sleep(1) results = find_relations(name) profile = Profile(canvas, name=name, score=int(personal_score(name)), size=150) #韓國瑜 canvas_profile = canvas.create_window(base_x, base_y, window=profile) for i in range(len(results)): result = results[i] x, y = profile_offset[i] profile = Profile(canvas, name=result['name'], score=int(personal_score(result['name']))) # canvas.create_window(base_x + x, base_y + y, window=profile) x_0, y_0, x_1, y_1 = line_offset[i] canvas.create_line(base_x + x_0, base_y + y_0, base_x + x_1, base_y + y_1, fill="#476042", width=3) x, y = docList_offset[i] docList = DocList(canvas, related_docs=result['related_docs']) canvas_docList1 = canvas.create_window(base_x + x, base_y + y, window=docList)
def __init__ (self, prefix = False): Profile.__init__ (self, prefix) self.name = 'unix' self.gcc_flags = [ '-I%{prefix}/include' ] self.ld_flags = [ '-L%{prefix}/lib' ] try: self.gcc_flags.extend (self.gcc_extra_flags) except: pass try: self.ld_flags.extend (self.ld_extra_flags) except: pass self.env.set ('PATH', ':', '%{prefix}/bin', '/usr/bin', '/bin', '/usr/local/git/bin') self.env.set ('C_INCLUDE_PATH', '%{prefix}/include') self.env.set ('LD_LIBRARY_PATH', '%{prefix}/lib') self.env.set ('ACLOCAL_FLAGS', '-I%{prefix}/share/aclocal') self.env.set ('PKG_CONFIG_PATH', ':', '%{prefix}/lib/pkgconfig', '%{prefix}/share/pkgconfig') self.env.set ('XDG_CONFIG_DIRS', '%{prefix}/etc/xdg') self.env.set ('XDG_DATA_DIRS', '%{prefix}/share') self.env.set ('XDG_CONFIG_HOME', '$HOME/.config')
def fetch_profile (profileId): profile = Profile(str(profileId)) playerPage = get_full_page_stats(profile.hotslogsId) profile.heroes = get_heroes_stats(playerPage) profile.maps = get_map_stats(playerPage) profile.enemies = get_winrates_against_heroes(playerPage) return profile.toJSON()
def attach (self, bockbuild): Profile.attach (self, bockbuild) self.name = 'unix' self.env = Environment(self) self.staged_prefix = bockbuild.staged_prefix self.toolchain_root = bockbuild.toolchain_root self.gcc_flags = ['-I%s/include' % self.staged_prefix] self.ld_flags = ['-L%s/lib' % self.staged_prefix] self.env.set('BUILD_PREFIX', '%{prefix}') self.env.set('PATH', ':', '%{toolchain_root}/bin', '%{staged_prefix}/bin', '/usr/bin', '/bin', '/usr/local/git/bin') self.env.set('C_INCLUDE_PATH', '%{staged_prefix}/include') #self.env.set ('LD_LIBRARY_PATH', '%{staged_prefix}/lib') self.env.set('ACLOCAL_FLAGS', '-I%{staged_prefix}/share/aclocal') self.env.set('PKG_CONFIG_PATH', ':', '%{staged_prefix}/lib/pkgconfig', '%{staged_prefix}/share/pkgconfig', '%{toolchain_root}/lib/pkgconfig', '%{toolchain_root}/share/pkgconfig') self.env.set('XDG_CONFIG_DIRS', '%{staged_prefix}/etc/xdg') self.env.set('XDG_DATA_DIRS', '%{staged_prefix}/share') self.env.set('XDG_CONFIG_HOME', '$HOME/.config')
def index(user): user_count = Profile.objects(user_id=user[0]).count() role = Role.objects(name="Citizen").first() channel = Channel.objects(slug=map_channel(user[4])).first() if user_count == 0: #channels = [] coordinates = str(user[11]) + "," + str(user[12]) profile = Profile(user_id=int(user[0]), full_name=user[1], sign_up_with="mobile_number", sign_up_ip_address=str(ipaddress.IPv4Address(user[2])), avatar=str(user[3]), roles=[role.id], channels=[{"id": channel.id, "slug": channel.slug, "mac_address": user[5], "sign_up": True, "device_token": user[6], "last_login_at": user[7], "settings": {"email_notifications_preferred": True, "sms_notifications_preferred": True, "push_notifications_preferred": True}}], locations=[{"app_name": channel.app_name, "location": {"name": user[8], "coordinates": coordinates, "ward_id": user[9], "city_id": user[10]}}]) profile.save() print('User: '******' indexed') else: print('User: '******' already exists') return True
class Login(flask.views.MethodView): def __init__(self): self.users = Users() self.profile = Profile() def get(self): if 'email' in session: return self.profile.get() else: return flask.render_template('login.html') def post(self): print request.form['username'] session['email'] = request.form['username'] success = self.users.loginCheck(request.form['username'],request.form['password']) print success if(success): #r = requests.get('http://localhost/profile') return self.profile.get() #Response( # r.text, # status=r.status_code, # content_type=r.headers['content-type'], # ) else: return flask.render_template('login.html')
def search(self): Profile.get_instance().update() text = self.search_text.text() friend = Profile.get_instance().get_curr_friend() if text and friend and util.is_re_valid(text): index = friend.search_string(text) self.load_messages(index)
def closeEvent(self, event): settings = Settings.get_instance() settings['theme'] = str(self.themeSelect.currentText()) settings['smileys'] = self.smileys.isChecked() if settings['mirror_mode'] != self.mirror_mode.isChecked(): settings['mirror_mode'] = self.mirror_mode.isChecked() msgBox = QtGui.QMessageBox() text = QtGui.QApplication.translate( "interfaceForm", 'Restart app to apply settings', None, QtGui.QApplication.UnicodeUTF8) msgBox.setWindowTitle( QtGui.QApplication.translate("interfaceForm", 'Restart required', None, QtGui.QApplication.UnicodeUTF8)) msgBox.setText(text) msgBox.exec_() settings['smiley_pack'] = self.smiley_pack.currentText() smileys.SmileyLoader.get_instance().load_pack() language = self.lang_choose.currentText() if settings['language'] != language: settings['language'] = language text = self.lang_choose.currentText() path = Settings.supported_languages()[text] app = QtGui.QApplication.instance() app.removeTranslator(app.translator) app.translator.load(curr_directory() + '/translations/' + path) app.installTranslator(app.translator) settings['message_font_size'] = self.messages_font_size.currentIndex( ) + 10 Profile.get_instance().update() settings.save()
def _exec_main(parser, values): sconsflags = os.environ.get('SCONSFLAGS', '') all_args = sconsflags.split() + sys.argv[1:] options, args = parser.parse_args(all_args, values) if isinstance(options.debug, list) and "pdb" in options.debug: import pdb pdb.Pdb().runcall(_main, parser) elif options.profile_file: # compat layer imports "cProfile" for us if it's available. from profile import Profile # Some versions of Python 2.4 shipped a profiler that had the # wrong 'c_exception' entry in its dispatch table. Make sure # we have the right one. (This may put an unnecessary entry # in the table in earlier versions of Python, but its presence # shouldn't hurt anything). try: dispatch = Profile.dispatch except AttributeError: pass else: dispatch['c_exception'] = Profile.trace_dispatch_return prof = Profile() try: prof.runcall(_main, parser) except SConsPrintHelpException, e: prof.dump_stats(options.profile_file) raise e except SystemExit: pass
def read_profile(file_name): f = open(file_name, "r") profile = Profile() for line in f: profile.process_sample(line) return profile
def file_chunk_request(tox, friend_number, file_number, position, size, user_data): """ Outgoing chunk """ Profile.get_instance().outgoing_chunk(friend_number, file_number, position, size)
def _parse_digital_io(self, profile): ''' { "gpio_id":{"type":"gpio","path":"AXI4_GPIO_0","ipcore":"Axi4Gpio","gpio_number":"0"}, } ''' ioes = Profile.get_ioes() digital_io_id = profile['id'] ioes[digital_io_id] = dict() ioes[digital_io_id] = copy.deepcopy(profile) ioes[digital_io_id].pop("id") if 'type' in profile.keys() and profile['type'] == 'gpio': initconfig = Profile.get_initconfig() initconfig['gpio'] = initconfig.setdefault('gpio', dict()) gpio_number = int(profile['gpio_number']) dir_value = {'input': 1, 'output': 0} dire = (gpio_number, dir_value[ioes[digital_io_id].pop("dir")]) initconfig['gpio'][digital_io_id] = dict(dir=dire) if 'default' in profile.keys(): value = (gpio_number, int(ioes[digital_io_id].pop("default"))) initconfig['gpio'][digital_io_id]['value'] = value
def __init__(self, conn): self.conn = conn self.suitecursor = SuiteCursor(self.conn) self.aptsrc = AptSourceHandler(self.conn) self.main_path = None self.profile = Profile(self.conn) self.family = Family(self.conn)
def set_avatar(self): name = QtGui.QFileDialog.getOpenFileName(self, 'Open file', None, 'Image Files (*.png)') print name if name[0]: with open(name[0], 'rb') as f: data = f.read() Profile.get_instance().set_avatar(data)
def attach(self, bockbuild): Profile.attach(self, bockbuild) self.name = 'unix' self.env = Environment(self) self.staged_prefix = bockbuild.staged_prefix self.toolchain_root = bockbuild.toolchain_root self.gcc_flags = ['-I%s/include' % self.staged_prefix] self.ld_flags = ['-L%s/lib' % self.staged_prefix] self.configure_flags = [] self.env.set('bockbuild version', git_shortid(bockbuild, bockbuild.root)) self.env.set('BUILD_PREFIX', '%{prefix}') self.env.set('PATH', ':', '%{toolchain_root}/bin', '%{staged_prefix}/bin', '/usr/bin', '/bin', '/usr/local/git/bin') self.env.set('C_INCLUDE_PATH', '%{staged_prefix}/include') #self.env.set ('LD_LIBRARY_PATH', '%{staged_prefix}/lib') self.env.set('ACLOCAL_FLAGS', '-I%{staged_prefix}/share/aclocal') self.env.set('PKG_CONFIG_PATH', ':', '%{staged_prefix}/lib/pkgconfig', '%{staged_prefix}/share/pkgconfig', '%{toolchain_root}/lib/pkgconfig', '%{toolchain_root}/share/pkgconfig') self.env.set('XDG_CONFIG_DIRS', '%{staged_prefix}/etc/xdg') self.env.set('XDG_DATA_DIRS', '%{staged_prefix}/share') self.env.set('XDG_CONFIG_HOME', '$HOME/.config')
def on_profile_clicked(self): self.setVisible(False) profile_win=self.wm.get_window('profile') if profile_win is None: profile_win=Profile(self.wm, self.user) else: profile_win.setVisible(True)
def runProfiler(logger, func, args=tuple(), kw={}, verbose=True, nb_func=25, sort_by=('time', ), nb_cal=0): """ Run a function in a profiler and then display the functions sorted by time. """ profile_filename = "/tmp/profiler" prof = Profile(bias=calibrate(nb_cal)) try: logger.warning("Run profiler") result = prof.runcall(func, *args, **kw) logger.error("Profiler: Process data...") prof.dump_stats(profile_filename) stat = pstats.Stats(prof) stat.strip_dirs() stat.sort_stats(*sort_by) logger.error("Profiler: Result:") log = StringIO() stat.stream = log stat.print_stats(nb_func) log.seek(0) for line in log: logger.error(line.rstrip()) return result finally: unlink(profile_filename)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--num_profiles', type=int, default=5, help='There are a maximum of 5 profiles') parser.add_argument( '--save_history', type=str2bool, default='t', help= 't/f: whether or not to save the browsing history from this session') args = parser.parse_args() num_profiles = args.num_profiles save_history = args.save_history start = time.time() # run this each day for a week for i in range(num_profiles): profile_num = i + 1 print('$$$$$$$$$$$$$$$$$$') print('PROFILE ' + str(profile_num) + '\n $$$$$$$$$$$$$$$$') print('Saving History: ' + str(save_history)) current_profile = Profile(profile_num) current_profile.visit_sites(save_history=save_history) time.sleep(15) print('\n \n') end = time.time() print(end - start) # including time.sleep()'s, one run took about 600s (10 min)
def __init__ (self, prefix = False): Profile.__init__ (self, prefix) self.name = 'unix' self.gcc_flags = ['-I%s/include' % self.staged_prefix] self.ld_flags = ['-L%s/lib' % self.staged_prefix] self.env.set ('PATH', ':', '%{toolchain_root}/bin', '%{staged_prefix}/bin', '/usr/bin', '/bin', '/usr/local/git/bin') self.env.set ('C_INCLUDE_PATH', '%{staged_prefix}/include') #self.env.set ('LD_LIBRARY_PATH', '%{staged_prefix}/lib') self.env.set ('ACLOCAL_FLAGS', '-I%{staged_prefix}/share/aclocal') self.env.set ('PKG_CONFIG_PATH', ':', '%{staged_prefix}/lib/pkgconfig', '%{staged_prefix}/share/pkgconfig', '%{toolchain_root}/lib/pkgconfig', '%{toolchain_root}/share/pkgconfig') self.env.set ('XDG_CONFIG_DIRS', '%{staged_prefix}/etc/xdg') self.env.set ('XDG_DATA_DIRS', '%{staged_prefix}/share') self.env.set ('XDG_CONFIG_HOME', '$HOME/.config')
def keyPressEvent(self, event): if event.matches(QtGui.QKeySequence.Paste): mimeData = QtWidgets.QApplication.clipboard().mimeData() if mimeData.hasUrls(): for url in mimeData.urls(): self.pasteEvent(url.toString()) else: self.pasteEvent() elif event.key() in (QtCore.Qt.Key_Return, QtCore.Qt.Key_Enter): modifiers = event.modifiers() if modifiers & QtCore.Qt.ControlModifier or modifiers & QtCore.Qt.ShiftModifier: self.insertPlainText('\n') else: if self.timer.isActive(): self.timer.stop() self.parent.profile.send_typing(False) self.parent.send_message() elif event.key() == QtCore.Qt.Key_Up and not self.toPlainText(): self.appendPlainText(Profile.get_instance().get_last_message()) elif event.key( ) == QtCore.Qt.Key_Tab and not self.parent.profile.is_active_a_friend( ): text = self.toPlainText() pos = self.textCursor().position() self.insertPlainText(Profile.get_instance().get_gc_peer_name( text[:pos])) else: self.parent.profile.send_typing(True) if self.timer.isActive(): self.timer.stop() self.timer.start(5000) super(MessageArea, self).keyPressEvent(event)
def prepare(self): """ prepare new root system suitable to create an initrd from it """ self.__load_boot_xml_description() boot_image_name = self.boot_xml_state.xml_data.get_name() self.__import_system_description_elements() log.info('Preparing boot image') system = System(xml_state=self.boot_xml_state, root_dir=self.boot_root_directory, allow_existing=True) manager = system.setup_repositories() system.install_bootstrap(manager) system.install_system(manager) profile = Profile(self.boot_xml_state) profile.add('kiwi_initrdname', boot_image_name) defaults = Defaults() defaults.to_profile(profile) setup = SystemSetup(self.boot_xml_state, self.__boot_description_directory(), self.boot_root_directory) setup.import_shell_environment(profile) setup.import_description() setup.import_overlay_files(follow_links=True) setup.call_config_script() system.pinch_system(manager=manager, force=True) setup.call_image_script() setup.create_init_link_from_linuxrc()
def profile(): profile = request.form['username'] if profile == current_user.username: return redirect(url_for('profile_page')) post = Profile(profile) post.read_from_db() return render_template("profile.html", title='Profile', posts=post)
def process_package(self, package): failure_count = 0 def staging_harness(path, func, failure_count=failure_count): def relocate_to_profile(token): if token.find(package.staged_prefix) == -1 and token.find( package.staged_profile) == -1: newtoken = token.replace(package.package_prefix, package.staged_profile) else: newtoken = token.replace(package.staged_prefix, package.staged_profile) if newtoken != token: package.trace('%s:\n\t%s\t->\t%s' % (os.path.basename(path), token, newtoken)) return newtoken if (path.endswith('.release')): error('Staging backup exists in dir we' 're trying to stage: %s' % path) backup = path + '.release' shutil.copy2(path, backup) try: trace('Staging %s' % path) func(path, relocate_to_profile) if os.path.exists(path + '.stage'): os.remove(path) shutil.move(path + '.stage', path) shutil.copystat(backup, path) except CommandException as e: package.rm_if_exists(path) shutil.copy2(backup, path) package.rm(backup) warn('Staging failed for %s' % os.path.basename(path)) error(str(e)) failure_count = failure_count + 1 if failure_count > 10: error('Possible staging issue, >10 staging failures') extra_files = [ os.path.join(package.staged_prefix, expand_macros(file, package)) for file in package.extra_stage_files ] procs = [] if package.name in self.debug_info: procs.append(self.generate_dsyms()) procs.append( self.stage_textfiles(harness=staging_harness, match=match_stageable_text, extra_files=extra_files)) procs.append( self.stage_binaries(harness=staging_harness, match=match_stageable_binary)) Profile.postprocess(self, procs, package.staged_prefix)
def file_recv_control(tox, friend_number, file_number, file_control, user_data): """ Friend cancelled, paused or resumed file transfer """ if file_control == TOX_FILE_CONTROL['CANCEL']: Profile.get_instance().cancel_transfer(friend_number, file_number, True)
def test_loading_simple_profiles(self, fn): p = Profile() with _open_t27(fn) as fd: p.load_simc(fd) self.assertNotEqual(p.player_class, '') self.assertTrue(p.baseline.valid_weapons(p.player_class, p.spec)) self.assertTrue(p.baseline.valid_loadout())
def __init__(self, name, CertClass): Profile.__init__(self, name, CertClass) self._certs_by_subj.set_filename("users/"+str(name)+"/certs.subj") self._certs_by_issuer.set_filename("users/"+str(name)+"/certs.issuer") # overload meaning of FileCertifications here to store user-profile. self.info = FileCertifications() self.info.set_filename("users/"+str(name)+"/profile")
def __init__(self): self.profile_list = [ Profile('1', 'Pooja Jain', '23', 'Young', 'pooja', 'pooja'), Profile('2', 'Rakesh Shrivastav', '54', 'Old', 'rakesh', 'rakesh') ] self.startInterface()
def callback_audio(toxav, friend_number, samples, audio_samples_per_channel, audio_channels_count, rate, user_data): """ New audio chunk """ Profile.get_instance().call.audio_chunk( bytes(samples[:audio_samples_per_channel * 2 * audio_channels_count]), audio_channels_count, rate)
def unblock_user(self): if not self.comboBox.count(): return title = QtGui.QApplication.translate("privacySettings", "Add to friend list", None, QtGui.QApplication.UnicodeUTF8) info = QtGui.QApplication.translate("privacySettings", "Do you want to add this user to friend list?", None, QtGui.QApplication.UnicodeUTF8) reply = QtGui.QMessageBox.question(None, title, info, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) Profile.get_instance().unblock_user(self.comboBox.currentText(), reply == QtGui.QMessageBox.Yes) self.close()
def get_ep_profile( profiledb, edgepayload ): combiner = Profile() streets = unpack_streets( edgepayload ) for street in streets: combiner.add( get_street_profile(profiledb, street) ) return combiner.concat() #reduce( lambda x,y:x+y, profile )
def callback_audio(toxav, friend_number, samples, audio_samples_per_channel, audio_channels_count, rate, user_data): """ New audio chunk """ Profile.get_instance().call.chunk( bytes(samples[:audio_samples_per_channel * 2 * audio_channels_count]), audio_channels_count, rate)
def build_profile(host, path): print("Profiling {0} with Host: {1}, Path: {2}".format( collection, host, path)) bm_id = "H{0}P{1}".format(host, path) profile_id = "{0}-{1}".format(col_id, bm_id) profiling_start = time.time() p = Profile( name="{0}Hosts {1} Paths {2}".format(collection, host, path), description= "{0} collection profile with maximum {1} host and {2} path secgment(s)." .format(collection, host, path), homepage="http://www.webarchive.org.uk/ukwa/", accesspoint="http://www.webarchive.org.uk/wayback/", memento_compliance= "https://oduwsdl.github.io/terms/mementosupport#native", timegate="http://www.webarchive.org.uk/wayback/archive/", timemap="http://www.webarchive.org.uk/wayback/archive/timemap/link/", established="2004", profile_updated=time.strftime("%Y-%m-%dT%H:%M:%SZ"), mechanism="https://oduwsdl.github.io/terms/mechanism#cdx") cp = CDXProfiler(max_host_segments=host, max_path_segments=path, global_stats=True) cp.process_cdxes(sys.argv[1:]) cdx_processing_done = time.time() cp.calculate_stats() stats_calculation_done = time.time() p.stats = cp.stats jsonstr = p.to_json() opf = "profile-{0}.json".format(profile_id) opfpath = os.path.join(bmdir, opf) write_json(jsonstr, filepath=opfpath, compress=True) profiling_done = time.time() bm = { "profile": opf, "collection": col_id, "max_host": host, "max_path": path, "cdx_size": cdx_size, "cdx_lines_total": cp.total_lines, "cdx_lines_skipped": cp.skipped_lines, "profile_size": os.path.getsize(opfpath), "profile_size_compressed": os.path.getsize(opfpath + ".gz"), "urir_count": p.stats["urir"], "urim_count": p.stats["urim"]["total"], "suburi_keys": len(p.stats["suburi"]), "time_keys": len(p.stats["time"]), "mediatype_keys": len(p.stats["mediatype"]), "language_keys": len(p.stats["language"]), "cdx_processing_time": cdx_processing_done - profiling_start, "stats_calculation_time": stats_calculation_done - cdx_processing_done, "profiling_time": profiling_done - profiling_start } all_bms["bms"][bm_id] = bm jsonstr = json.dumps(bm, sort_keys=True, indent=4, separators=(",", ": ")) opf = "bm-{0}.json".format(profile_id) opfpath = os.path.join(bmdir, opf) write_json(jsonstr, filepath=opfpath)
def path(self, lat1, lng1, lat2, lng2, transfer_penalty=0, walking_speed=1.0, hill_reluctance=20, narrative=True, jsoncallback=None): t0 = time.time() origin = "osm-%s"%self.osmdb.nearest_node( lat1, lng1 )[0] dest = "osm-%s"%self.osmdb.nearest_node( lat2, lng2 )[0] endpoint_find_time = time.time()-t0 print origin, dest t0 = time.time() wo = WalkOptions() #wo.transfer_penalty=transfer_penalty #wo.walking_speed=walking_speed wo.walking_speed=4 wo.walking_overage = 0 wo.hill_reluctance = 20 wo.turn_penalty = 15 edgepayloads = self.ch.shortest_path( origin, dest, State(1,0), wo ) wo.destroy() route_find_time = time.time()-t0 t0 = time.time() names = [] geoms = [] profile = Profile() total_dist = 0 total_elev = 0 if narrative: names, total_dist = get_full_route_narrative( self.osmdb, edgepayloads ) for edgepayload in edgepayloads: geom, profile_seg = self.shortcut_cache.get( edgepayload.external_id ) #geom = get_ep_geom( self.osmdb, edgepayload ) #profile_seg = get_ep_profile( self.profiledb, edgepayload ) geoms.extend( geom ) profile.add( profile_seg ) route_desc_time = time.time()-t0 ret = json.dumps( (names, encode_pairs( [(lat, lon) for lon, lat in geoms] ), profile.concat(300), { 'route_find_time':route_find_time, 'route_desc_time':route_desc_time, 'endpoint_find_time':endpoint_find_time,}, { 'total_dist':total_dist, 'total_elev':total_elev}) ) if jsoncallback: return "%s(%s)"%(jsoncallback,ret) else: return ret
def test_delete_profile(self): ''' create test to delete profile object ''' self.new_profile.save_profile() test_profile = Profile("Test_Account", "meed") # new test profile test_profile.save_profile() self.new_profile.delete_profile() self.assertEqual(len(Profile.profile), 1)
def test_t27_dk_unholy(self): p = Profile() with _open_t27('T27_Death_Knight_Unholy.simc') as fd: p.load_simc(fd) self.assertEqual(p.player_class, 'deathknight') self.assertEqual(p.profile_name, 'T27_Death_Knight_Unholy') self.assertEqual(p.spec, 'unholy') self.assertEqual(p.level, '60') self.assertEqual(p.race, 'troll') self.assertEqual(p.role, 'attack') self.assertEqual(p.position, 'back') self.assertEqual(p.covenant, 'necrolord') self.assertEqual( p.soulbind, 'plague_deviser_marileth,' 'volatile_solvent/eternal_hunger:9:1/convocation_of_the_dead:9:1/' 'adaptive_armor_fragment:9:1/kevins_oozeling') self.assertEqual(p.renown, '80') self.assertEqual(p.potion, 'potion_of_spectral_strength') self.assertEqual(p.flask, 'spectral_flask_of_power') self.assertEqual(p.augmentation, 'veiled') self.assertEqual(p.temporary_enchant, 'main_hand:shaded_sharpening_stone') self.assertEqual(p.baseline.head.item_id, 186350) self.assertEqual(p.baseline.neck.item_id, 186379) self.assertEqual(p.baseline.shoulder.item_id, 186349) self.assertEqual(p.baseline.back.item_id, 173242) self.assertEqual(p.baseline.chest.item_id, 186347) self.assertIsNone(p.baseline.shirt) self.assertIsNone(p.baseline.tabard) self.assertEqual(p.baseline.wrist.item_id, 186351) self.assertEqual(p.baseline.hands.item_id, 186311) self.assertEqual(p.baseline.waist.item_id, 178734) self.assertEqual(p.baseline.legs.item_id, 178701) self.assertEqual(p.baseline.feet.item_id, 186353) self.assertEqual(p.baseline.main_hand.item_id, 186410) self.assertIsNone(p.baseline.off_hand) # Our ring/trinket assignment always puts the higher item ID first. # This is the reverse of what appears in the source data. self.assertEqual(p.baseline.finger1.item_id, 186377) self.assertEqual(p.baseline.finger2.item_id, 178869) self.assertEqual(p.baseline.trinket1.item_id, 186438) self.assertEqual(p.baseline.trinket2.item_id, 179350) self.assertTrue(p.baseline.valid_weapons(p.player_class, p.spec)) self.assertTrue(p.baseline.valid_loadout()) legendaries = p.baseline.shadowlands_legendaries() self.assertEqual(len(legendaries), 1) self.assertEqual(legendaries[0].item_id, 173242) self.assertEqual(len(p.baseline.weekly_rewards()), 0) self.assertEqual(p.class_spec, 'Unholy Death Knight') self.assertEqual(p.class_role, 'strattack')
def callback_audio(toxav, friend_number, samples, audio_samples_per_channel, audio_channels_count, rate, user_data): """New audio chunk""" print audio_samples_per_channel, audio_channels_count, rate Profile.get_instance().call.chunk( ''.join( chr(x) for x in samples[:audio_samples_per_channel * 2 * audio_channels_count]), audio_channels_count, rate)
def onEnter(self, e): name = str(self.control.GetValue()) self.qrprofile = Profile(name) self.png = wx.Image(self.qrprofile.getFileName(), wx.BITMAP_TYPE_PNG).ConvertToBitmap() self.bitmap = wx.StaticBitmap(self, wx.ID_ANY, self.png, (10, 5), (150, 150)) self.bitmap.SetPosition((0, 50)) self.Update()
def file_recv_control(tox, friend_number, file_number, file_control, user_data): """ Friend cancelled, paused or resumed file transfer """ if file_control == TOX_FILE_CONTROL['CANCEL']: invoke_in_main_thread(Profile.get_instance().cancel_transfer, friend_number, file_number, True) elif file_control == TOX_FILE_CONTROL['PAUSE']: invoke_in_main_thread(Profile.get_instance().pause_transfer, friend_number, file_number, True) elif file_control == TOX_FILE_CONTROL['RESUME']: invoke_in_main_thread(Profile.get_instance().resume_transfer, friend_number, file_number, True)
def getFriends(ids, method): if method == 'list': info = vk.method('friends.get', {"user_id": ids}) for i in range(len(info['items'])): print(Profile.getProfileInfo(info['items'][i])) sleep(1) elif method == 'online': info = vk.method('friends.getOnline', {"user_id": ids}) for i in range(len(info)): print(Profile.getProfileInfo(info[i])) sleep(1)
def build_profile(host, path): print("Profiling {0} with Host: {1}, Path: {2}".format(collection, host, path)) bm_id = "H{0}P{1}".format(host, path) profile_id = "{0}-{1}".format(col_id, bm_id) profiling_start = time.time() p = Profile(name="{0}Hosts {1} Paths {2}".format(collection, host, path), description="{0} collection profile with maximum {1} host and {2} path secgment(s).".format(collection, host, path), homepage="http://www.webarchive.org.uk/ukwa/", accesspoint="http://www.webarchive.org.uk/wayback/", memento_compliance="https://oduwsdl.github.io/terms/mementosupport#native", timegate="http://www.webarchive.org.uk/wayback/archive/", timemap="http://www.webarchive.org.uk/wayback/archive/timemap/link/", established="2004", profile_updated=time.strftime("%Y-%m-%dT%H:%M:%SZ"), mechanism="https://oduwsdl.github.io/terms/mechanism#cdx") cp = CDXProfiler(max_host_segments=host, max_path_segments=path, global_stats=True) cp.process_cdxes(sys.argv[1:]) cdx_processing_done = time.time() cp.calculate_stats() stats_calculation_done = time.time() p.stats = cp.stats jsonstr = p.to_json() opf = "profile-{0}.json".format(profile_id) opfpath = os.path.join(bmdir, opf) write_json(jsonstr, filepath=opfpath, compress=True) profiling_done = time.time() bm = { "profile": opf, "collection": col_id, "max_host": host, "max_path": path, "cdx_size": cdx_size, "cdx_lines_total": cp.total_lines, "cdx_lines_skipped": cp.skipped_lines, "profile_size": os.path.getsize(opfpath), "profile_size_compressed": os.path.getsize(opfpath + ".gz"), "urir_count": p.stats["urir"], "urim_count": p.stats["urim"]["total"], "suburi_keys": len(p.stats["suburi"]), "time_keys": len(p.stats["time"]), "mediatype_keys": len(p.stats["mediatype"]), "language_keys": len(p.stats["language"]), "cdx_processing_time": cdx_processing_done - profiling_start, "stats_calculation_time": stats_calculation_done - cdx_processing_done, "profiling_time": profiling_done - profiling_start } all_bms["bms"][bm_id] = bm jsonstr = json.dumps(bm, sort_keys=True, indent=4, separators=(",", ": ")) opf = "bm-{0}.json".format(profile_id) opfpath = os.path.join(bmdir, opf) write_json(jsonstr, filepath=opfpath)
def process_package(self, package): failure_count = 0 def staging_harness(path, func, failure_count=failure_count): def relocate_to_profile(token): if token.find(package.staged_prefix) == -1 and token.find(package.staged_profile) == -1: newtoken = token.replace( package.package_prefix, package.staged_profile) else: newtoken = token.replace( package.staged_prefix, package.staged_profile) if newtoken != token: package.trace('%s:\n\t%s\t->\t%s' % (os.path.basename(path), token, newtoken)) return newtoken if (path.endswith('.release')): error('Staging backup exists in dir we''re trying to stage: %s' % path) backup = path + '.release' shutil.copy2(path, backup) try: trace('Staging %s' % path) func(path, relocate_to_profile) if os.path.exists(path + '.stage'): os.remove(path) shutil.move(path + '.stage', path) shutil.copystat(backup, path) except CommandException as e: package.rm_if_exists(path) shutil.copy2(backup, path) package.rm(backup) warn('Staging failed for %s' % os.path.basename(path)) error(str(e)) failure_count = failure_count + 1 if failure_count > 10: error('Possible staging issue, >10 staging failures') extra_files = [os.path.join(package.staged_prefix, expand_macros(file, package)) for file in package.extra_stage_files] procs = [] if package.name in self.debug_info: procs.append(self.generate_dsyms()) procs.append(self.stage_textfiles(harness=staging_harness, match=match_stageable_text, extra_files=extra_files)) procs.append(self.stage_binaries( harness=staging_harness, match=match_stageable_binary)) Profile.postprocess(self, procs, package.staged_prefix)
def set_avatar(self): choose = QtGui.QApplication.translate("ProfileSettingsForm", "Choose avatar", None, QtGui.QApplication.UnicodeUTF8) name = QtGui.QFileDialog.getOpenFileName(self, choose, None, 'Images (*.png)', options=QtGui.QFileDialog.DontUseNativeDialog) if name[0]: bitmap = QtGui.QPixmap(name[0]) bitmap.scaled(QtCore.QSize(128, 128), aspectMode=QtCore.Qt.KeepAspectRatio, mode=QtCore.Qt.SmoothTransformation) byte_array = QtCore.QByteArray() buffer = QtCore.QBuffer(byte_array) buffer.open(QtCore.QIODevice.WriteOnly) bitmap.save(buffer, 'PNG') Profile.get_instance().set_avatar(bytes(byte_array.data()))
def restart_core(self): try: settings = Settings.get_instance() settings['ipv6_enabled'] = self.ipv.isChecked() settings['udp_enabled'] = self.udp.isChecked() settings['proxy_type'] = 2 - int(self.http.isChecked()) if self.proxy.isChecked() else 0 settings['proxy_host'] = str(self.proxyip.text()) settings['proxy_port'] = int(self.proxyport.text()) settings.save() # recreate tox instance Profile.get_instance().reset(self.reset) self.close() except Exception as ex: log('Exception in restart: ' + str(ex))
def test_addon_perf(): firefox = FirefoxRunner(binary='firefox/firefox') profile = Profile() profile.initialize(runner=firefox) t = Talos(profile=profile, firefox=firefox, talos_dir='talos') results = [] cycles = 10 for i in range(0, cycles): #time.sleep(10) results.append(t.run_ts(cycles=1)[0]) print results
def testPerformance(self): from babel import localedata, core # Babel FIX: We need to adjust the dir name for locales since they need to be outside the .egg file localedata._dirname = localedata._dirname.replace('.egg', '') core._filename = core._filename.replace('.egg', '') languageService = LanguageServiceBabelAlchemy() ioc.initialize(languageService) profile = Profile() qlang = QLanguage(name='rom%') try: profile = profile.runctx("languageService.getAllAvailable(['en'], 0, 10, qlang)", globals(), locals()) except SystemExit: pass pstats.Stats(profile).sort_stats('time', 'cum').print_stats()
def getList(self, soup): '获取每一页中的所有用户' body = soup.body userlist = body.find_all('table') #print userlist.__len__() weiboid_regex = ur'http://weibo.cn/attention/.*?uid=(\d+)' for u in userlist: p = Profile() p.nickname = u.text.strip().replace('\n','').split(' ')[0] if re.search(weiboid_regex, str(u)): p.weiboid = re.search(weiboid_regex, str(u)).group(1) GetAllUserList.profile_d.insertIntoDB(p) print '',p else: print "can't find"
def closeEvent(self, *args, **kwargs): settings = Settings.get_instance() old_data = str(settings['ipv6_enabled']) + str(settings['udp_enabled']) + str(bool(settings['proxy_type'])) new_data = str(self.ipv.isChecked()) + str(self.udp.isChecked()) + str(self.proxy.isChecked()) changed = old_data != new_data if self.proxy.isChecked() and (self.proxyip.text() != settings['proxy_host'] or self.proxyport.text() != unicode(settings['proxy_port'])): changed = True if changed or self.reconn: settings['ipv6_enabled'] = self.ipv.isChecked() settings['udp_enabled'] = self.udp.isChecked() settings['proxy_type'] = 2 - int(self.http.isChecked()) settings['proxy_host'] = self.proxyip.text() settings['proxy_port'] = int(self.proxyport.text()) settings.save() # recreate tox instance Profile.get_instance().reset(self.reset)
def wrapped(tox, friend_number, file_number, file_type, size, file_name, file_name_size, user_data): profile = Profile.get_instance() settings = Settings.get_instance() if file_type == TOX_FILE_KIND['DATA']: print('File') try: file_name = str(file_name[:file_name_size], 'utf-8') except: file_name = 'toxygen_file' invoke_in_main_thread(profile.incoming_file_transfer, friend_number, file_number, size, file_name) if not window.isActiveWindow(): friend = profile.get_friend_by_number(friend_number) if settings['notifications'] and profile.status != TOX_USER_STATUS['BUSY'] and not settings.locked: file_from = QtGui.QApplication.translate("Callback", "File from", None, QtGui.QApplication.UnicodeUTF8) invoke_in_main_thread(tray_notification, file_from + ' ' + friend.name, file_name, tray, window) if settings['sound_notifications'] and profile.status != TOX_USER_STATUS['BUSY']: sound_notification(SOUND_NOTIFICATION['FILE_TRANSFER']) invoke_in_main_thread(tray.setIcon, QtGui.QIcon(curr_directory() + '/images/icon_new_messages.png')) else: # AVATAR print('Avatar') invoke_in_main_thread(profile.incoming_avatar, friend_number, file_number, size)
def friend_name(tox, friend_num, name, size, user_data): """ Friend changed his name """ profile = Profile.get_instance() print('New name friend #' + str(friend_num)) invoke_in_main_thread(profile.new_name, friend_num, name)
def save_profile(self, profile): # above we have user_required=True, so we know user is authorized if # we're in the body of this method; otherwise, endpoints_proto_datastore # will have thrown a 401 error user = get_current_user() # if user is None: # raise UnauthorizedException("Authorization required.") # see whether profile already exists and then update or insert new one query = Profile.query(Profile.userId == user.user_id()).get() if query: query.update(profile.displayName, profile.teeShirtSize) profile = query else: profile.userId = user.user_id() profile.mainEmail = user.email() profile.displayName = (profile.displayName or _extractDefaultDisplayNameFromEmail(user.email())) profile.teeShirtSize = profile.teeShirtSize or 'NOT_SPECIFIED' profile.put() # save to database return profile