def main(): # Test file path #path = "input/MC_SigBkgElectrons_2M.h5" path = "output/data_test_00000000.h5" dataset_name = 'egamma' # Read entire HDF5 file """ with Profile("Read entire file"): with h5py.File(path, 'r') as hf: data_h5 = hf[dataset_name][:] pass pass #""" # Read batched HDF5 file with Profile("Read batched file"): directory = 'input/' paths = sorted(glob.glob('output/data_test_0000000*.h5.gz')) batch_size = 1024 maxsize = 8 with Generator(paths, dataset_name, batch_size, maxsize) as generator: with Profile("Looping"): for step, batch in enumerate(generator): # Do someting... pass pass pass pass return 0
def createGraph(name, canvas): canvas.delete('all') #time.sleep(1) results = find_relations(name) profile = Profile(canvas, name=name, score=int(personal_score(name)), size=150) #韓國瑜 canvas_profile = canvas.create_window(base_x, base_y, window=profile) for i in range(len(results)): result = results[i] x, y = profile_offset[i] profile = Profile(canvas, name=result['name'], score=int(personal_score(result['name']))) # canvas.create_window(base_x + x, base_y + y, window=profile) x_0, y_0, x_1, y_1 = line_offset[i] canvas.create_line(base_x + x_0, base_y + y_0, base_x + x_1, base_y + y_1, fill="#476042", width=3) x, y = docList_offset[i] docList = DocList(canvas, related_docs=result['related_docs']) canvas_docList1 = canvas.create_window(base_x + x, base_y + y, window=docList)
def __init__(self): self.profile_list = [ Profile('1', 'Pooja Jain', '23', 'Young', 'pooja', 'pooja'), Profile('2', 'Rakesh Shrivastav', '54', 'Old', 'rakesh', 'rakesh') ] self.startInterface()
def __init__(self): self.profile = Profile() self.option = Option() self.kmatrix = Profile() self.chanprof = Chanprof() self.emissivity = Emissivity() self.reflectance = Reflectance() self.misc = Misc() self.scaled = False self.scalecoef = 1.0 self.scaledfirst = True
def generate_fold_groups(fold_number, candidates): testing_group, training_group = [], [] for candidate in candidates: testing_sample, training_sample = candidate.generate_fold_samples(fold_number) testing_profile = VEAProfile(Profile(testing_sample)) testing_group.append(testing_profile) training_profile = VEAProfile(Profile(training_sample)) training_group.append(training_profile) return testing_group, training_group
def main(): # Test file path path = "input/MC_SigBkgElectrons_2M{}" # Read files # -------------------------------------------------------------------------- with Profile("Read CSV"): # Pandas DataFrame df_csv = pd.read_csv(path.format('.csv')) pass # Remove additional index columns df_csv = df_csv.drop(['Unnamed: 0', 'Unnamed: 0.1'], axis=1) with Profile("Read HDF5"): with h5py.File(path.format('.h5'), 'r') as hf: # Numpy recarray data_h5 = hf['dataset'][:] pass pass with Profile("Read HDF5 (gzipped)"): with h5py.File(path.format('.gzh5'), 'r') as hf: # Numpy recarray data_gzh5 = hf['dataset'][:] pass pass with Profile("Read MsgPack"): # Pandas DataFrame df_msp = pd.read_msgpack(path.format('.msp')) pass # Write files # -------------------------------------------------------------------------- """ # Store test file as HDF with h5py.File(path.format('.h5'), 'w') as hf: hf.create_dataset('dataset', data=df_csv.to_records(index=False)) pass # Store test file as gzipped HDF5 with h5py.File(path.format('.gzh5'), 'w') as hf: hf.create_dataset('dataset', data=df_csv.to_records(index=False), compression='gzip') pass # Store test file as msp df_csv.to_msgpack(path.format('.msp')) #""" return 0
def main(): parser = argparse.ArgumentParser() parser.add_argument('--num_profiles', type=int, default=5, help='There are a maximum of 5 profiles') parser.add_argument( '--save_history', type=str2bool, default='t', help= 't/f: whether or not to save the browsing history from this session') args = parser.parse_args() num_profiles = args.num_profiles save_history = args.save_history start = time.time() # run this each day for a week for i in range(num_profiles): profile_num = i + 1 print('$$$$$$$$$$$$$$$$$$') print('PROFILE ' + str(profile_num) + '\n $$$$$$$$$$$$$$$$') print('Saving History: ' + str(save_history)) current_profile = Profile(profile_num) current_profile.visit_sites(save_history=save_history) time.sleep(15) print('\n \n') end = time.time() print(end - start) # including time.sleep()'s, one run took about 600s (10 min)
def extractDeepArgoProfiles(jsonfile): profiles= [] with open(jsonfile) as f: data = json.load(f) for prof in Bar("deep profiles").iter(data): profiledata = {} profiledata["lon"] = prof["lon"] profiledata["lat"] = prof["lat"] if geoFilter(profiledata["lon"],profiledata["lat"]): profiledata["cruise"] = prof["platform_number"] profiledata["station"] = prof["cycle_number"] profiledata["time"] = prof["date"] profiledata["sal"]=[] profiledata["temp"]=[] profiledata["pres"]=[] for m in prof["measurements"]: if "psal" in m.keys() and "temp" in m.keys(): profiledata["sal"].append(m["psal"]) profiledata["temp"].append(m["temp"]) profiledata["pres"].append(m["pres"]) s = np.argsort(profiledata["pres"]) profiledata["sal"] = np.asarray(profiledata["sal"])[s] profiledata["temp"] = np.asarray(profiledata["temp"])[s] profiledata["pres"] = np.asarray(profiledata["pres"])[s] #profiledata["cruise"] = ncdf.WOCE_ID #profiledata["station"] = ncdf.STATION_NUMBER if len(profiledata["pres"])>4 and max(profiledata["pres"])>1500: if {"sal","temp","pres","lat","lon"}.issubset(profiledata.keys())\ and abs(max(profiledata["pres"])-min(profiledata["pres"])) > 100: eyed=idgenerator() prof=Profile(eyed,profiledata,"insitu","practical") profiles.append(prof) return profiles
def index(user): user_count = Profile.objects(user_id=user[0]).count() role = Role.objects(name="Citizen").first() channel = Channel.objects(slug=map_channel(user[4])).first() if user_count == 0: #channels = [] coordinates = str(user[11]) + "," + str(user[12]) profile = Profile(user_id=int(user[0]), full_name=user[1], sign_up_with="mobile_number", sign_up_ip_address=str(ipaddress.IPv4Address(user[2])), avatar=str(user[3]), roles=[role.id], channels=[{"id": channel.id, "slug": channel.slug, "mac_address": user[5], "sign_up": True, "device_token": user[6], "last_login_at": user[7], "settings": {"email_notifications_preferred": True, "sms_notifications_preferred": True, "push_notifications_preferred": True}}], locations=[{"app_name": channel.app_name, "location": {"name": user[8], "coordinates": coordinates, "ward_id": user[9], "city_id": user[10]}}]) profile.save() print('User: '******' indexed') else: print('User: '******' already exists') return True
def __init__(self, conn): self.conn = conn self.suitecursor = SuiteCursor(self.conn) self.aptsrc = AptSourceHandler(self.conn) self.main_path = None self.profile = Profile(self.conn) self.family = Family(self.conn)
def runProfiler(logger, func, args=tuple(), kw={}, verbose=True, nb_func=25, sort_by=('time', ), nb_cal=0): """ Run a function in a profiler and then display the functions sorted by time. """ profile_filename = "/tmp/profiler" prof = Profile(bias=calibrate(nb_cal)) try: logger.warning("Run profiler") result = prof.runcall(func, *args, **kw) logger.error("Profiler: Process data...") prof.dump_stats(profile_filename) stat = pstats.Stats(prof) stat.strip_dirs() stat.sort_stats(*sort_by) logger.error("Profiler: Result:") log = StringIO() stat.stream = log stat.print_stats(nb_func) log.seek(0) for line in log: logger.error(line.rstrip()) return result finally: unlink(profile_filename)
def add_profile_CLI(): # enter name name = input("Enter your name: ") new_profile = Profile(name) print(new_profile) # enter address # street = input("Enter your street: ") # city = input("Enter your city: ") # state = input("Enter your state (ex: WA): ") # zip = input("Enter your zip: ") # # new_profile.edit_address(street, city, state, zip) # print(new_profile) # print(new_profile.address) # enter skills skills = input("Enter your skills delimited by commas: ") skills = skills.split(',') skills = [skill.strip() for skill in skills] print(skills) new_profile.edit_skills(skills) # print(new_profile.skills) skills = input("Enter more skills: ") skills = skills.split(',') skills = [skill.strip() for skill in skills] print(skills) new_profile.edit_skills(skills)
def parse_profiles(self): profiles = dict() for p in self.get_profiles_section(): profile = Profile(p) profiles[profile.name] = profile return profiles
def print_stats(self): if self.profiler is None: self.stats = pstats.Stats(Profile()) else: self.profiler.close() self.stats = hotshot.stats.load(self.logfilename) super(HotShotFuncProfile, self).print_stats()
def get_object_connection(obj, connection, partition, parser): """ get an object specific connection @param obj - the object type @param connection - the f5 connection to use @param partition - the partition to work with @param parser - our args parser for object specifc items """ if obj == "pool": object_connection = Pool(connection, partition, parser) elif obj == "node": object_connection = Node(connection, partition, parser) elif obj == "virtual_server": object_connection = Virtual_server(connection, partition, parser) elif obj == "ssl_file": object_connection = Ssl_file(connection, partition, parser) elif obj == "ssl_profile": object_connection = Ssl_profile(connection, partition, parser) elif obj == "profile": object_connection = Profile(connection, partition, parser) else: raise Exception("Unknown object {}".format(obj)) return object_connection
def add_to_database(name, database, *pics): """ Adds a profile to the database with a name and descriptors for any pictures added alongside. If a profile already exists with the same name, adds the pictures to that profile. Parameters ---------- name: [String] An input string containing the name of the person to be added. *pics: 1 or more [np.array] One or more pictures whose descriptors will be added to the profile specified by the name. database: Dictionary{String name : Profile profile} Returns ------- None. Profiles will be updated on the spot. If a profile with the name specified doesn't exist, a new one will be created. """ detections = [] for pic in pics: detections.append(face_detect(pic)) if name in database: #idk what the database is called database[name].descriptors.append(*detections) else: new_profile = Profile(name, detections) database[name] = new_profile
def handle_api(): log.info("/api is alive") log.info("bottle request") log.info(bottle.request.json) # run a kiln schedule if bottle.request.json['cmd'] == 'run': wanted = bottle.request.json['profile'] log.info('api requested run of profile = %s' % wanted) # start at a specific minute in the schedule # for restarting and skipping over early parts of a schedule startat = 0; if 'startat' in bottle.request.json: startat = bottle.request.json['startat'] # get the wanted profile/kiln schedule profile = find_profile(wanted) if profile is None: return { "success" : False, "error" : "profile %s not found" % wanted } # FIXME juggling of json should happen in the Profile class profile_json = json.dumps(profile) profile = Profile(profile_json) oven.run_profile(profile, ovenWatcher, startat=startat) ovenWatcher.record(profile) if bottle.request.json['cmd'] == 'stop': log.info("api stop command received") oven.abort_run() return { "success" : True }
def extractWoceProfiles(ncfolder): profiles = [] for f in Bar("WOCE").iter( glob.glob(ncfolder + "/*.nc") + glob.glob(ncfolder + "/**/*.nc", recursive=True)): ncdf = Dataset(f, 'r') # Dataset is the class behavior to open the file profiledata = {} profiledata["lon"] = ncdf.variables["longitude"][0] profiledata["lat"] = ncdf.variables["latitude"][0] profiledata["cruise"] = ncdf.WOCE_ID profiledata["station"] = ncdf.STATION_NUMBER profiledata["time"] = julian.to_jd( datetime.datetime(1980, 1, 1, 0) + datetime.timedelta(minutes=int(ncdf.variables["time"][0]))) profiledata["cruise"] = ncdf.WOCE_ID profiledata["station"] = ncdf.STATION_NUMBER if "temperature" in ncdf.variables.keys(): profiledata["temp"] = np.asarray(ncdf.variables["temperature"][:]) if "salinity" in ncdf.variables.keys(): profiledata["sal"] = np.asarray(ncdf.variables["salinity"][:]) if "pressure" in ncdf.variables.keys(): profiledata["pres"] = np.asarray(ncdf.variables["pressure"][:]) if "CTDTMP" in ncdf.variables.keys(): profiledata["temp"] = np.asarray(ncdf.variables["CTDTMP"][:]) if "CTDSAL" in ncdf.variables.keys(): profiledata["sal"] = np.asarray(ncdf.variables["CTDSAL"][:]) if len(profiledata["pres"])>4 and max(profiledata["pres"])>1500\ and geoFilter(profiledata["lon"],profiledata["lat"]): eyed = idgenerator() prof = Profile(eyed, profiledata, "insitu", "practical") profiles.append(prof) return profiles
def viewProfile(self): profile_widget = Profile(self) self.central_widget.addWidget(profile_widget) self.central_widget.setCurrentWidget(profile_widget) #return to dashboard profile_widget.backButton.clicked.connect(self.login)
def handle_control(): wsock = get_websocket_from_request() log.info("websocket (control) opened") while True: try: message = wsock.receive() log.info("Received (control): %s" % message) msgdict = json.loads(message) if msgdict.get("cmd") == "RUN": log.info("RUN command received") profile_name = msgdict.get('profile') if profile_name: profile = find_profile(profile_name) oven.run_profile(profile, ovenWatcher) ovenWatcher.record(profile) elif msgdict.get("cmd") == "SIMULATE": log.info("SIMULATE command received") profile_obj = msgdict.get('profile') if profile_obj: profile_json = json.dumps(profile_obj) profile = Profile(profile_json) #simulated_oven = Oven(simulate=True, time_step=0.05) #simulation_watcher = OvenWatcher(simulated_oven) #simulation_watcher.add_observer(wsock) #simulated_oven.run_profile(profile) #simulation_watcher.record(profile) elif msgdict.get("cmd") == "STOP": log.info("Stop command received") oven.abort_run() except WebSocketError: break log.info("websocket (control) closed")
def prepare(self): """ prepare new root system suitable to create an initrd from it """ self.__load_boot_xml_description() boot_image_name = self.boot_xml_state.xml_data.get_name() self.__import_system_description_elements() log.info('Preparing boot image') system = System(xml_state=self.boot_xml_state, root_dir=self.boot_root_directory, allow_existing=True) manager = system.setup_repositories() system.install_bootstrap(manager) system.install_system(manager) profile = Profile(self.boot_xml_state) profile.add('kiwi_initrdname', boot_image_name) defaults = Defaults() defaults.to_profile(profile) setup = SystemSetup(self.boot_xml_state, self.__boot_description_directory(), self.boot_root_directory) setup.import_shell_environment(profile) setup.import_description() setup.import_overlay_files(follow_links=True) setup.call_config_script() system.pinch_system(manager=manager, force=True) setup.call_image_script() setup.create_init_link_from_linuxrc()
def extractArgoProfiles(ncfolder): profiles= [] for f in Bar("file:" ).iter(glob.glob(ncfolder+"/**/*.nc",recursive=True)): ncdf = Dataset(f, 'r') # Dataset is the class behavior to open the file #pdb.set_trace() for prof in range(ncdf.dimensions["N_PROF"].size): profiledata = {} profiledata["lon"] = ncdf.variables["LONGITUDE"][prof] profiledata["lat"] = ncdf.variables["LATITUDE"][prof] if geoFilter(profiledata["lon"],profiledata["lat"]) and prof%5 ==0: #profiledata["cruise"] = ncdf.WOCE_ID #profiledata["station"] = ncdf.STATION_NUMBER profiledata["time"] = ncdf.variables["JULD"][prof] #profiledata["cruise"] = ncdf.WOCE_ID #profiledata["station"] = ncdf.STATION_NUMBER if "TEMP_ADJUSTED" in ncdf.variables.keys(): profiledata["temp"] = np.asarray(ncdf.variables["TEMP_ADJUSTED"][prof][:]) if "PSAL_ADJUSTED" in ncdf.variables.keys(): profiledata["sal"] = np.asarray(ncdf.variables["PSAL_ADJUSTED"][prof][:]) if "PRES_ADJUSTED" in ncdf.variables.keys(): profiledata["pres"] = np.asarray(ncdf.variables["PRES_ADJUSTED"][prof][:]) if len(profiledata["pres"])>4 and max(profiledata["pres"])>1500\ and geoFilter(profiledata["lon"],profiledata["lat"]): if {"sal","temp","pres","lat","lon"}.issubset(profiledata.keys())\ and abs(max(profiledata["pres"])-min(profiledata["pres"])) > 100\ and 99999 not in profiledata["pres"] and 99999 not in profiledata["temp"]\ and 99999 not in profiledata["sal"]: eyed=idgenerator() prof=Profile(eyed,profiledata,"insitu","practical") profiles.append(prof) #print(profiledata) del ncdf return profiles
def extractInvitation(obj): return Profile(first_name=obj['firstName'], last_name=obj['lastName'], occupation=obj['occupation'], entity_urn=obj['entityUrn'].split(':')[-1], public_identifier=obj['publicIdentifier'])
def _exec_main(parser, values): sconsflags = os.environ.get('SCONSFLAGS', '') all_args = string.split(sconsflags) + sys.argv[1:] options, args = parser.parse_args(all_args, values) if type(options.debug) == type([]) and "pdb" in options.debug: import pdb pdb.Pdb().runcall(_main, parser) elif options.profile_file: from profile import Profile # Some versions of Python 2.4 shipped a profiler that had the # wrong 'c_exception' entry in its dispatch table. Make sure # we have the right one. (This may put an unnecessary entry # in the table in earlier versions of Python, but its presence # shouldn't hurt anything). try: dispatch = Profile.dispatch except AttributeError: pass else: dispatch['c_exception'] = Profile.trace_dispatch_return prof = Profile() try: prof.runcall(_main, parser) except SConsPrintHelpException, e: prof.dump_stats(options.profile_file) raise e except SystemExit: pass
def profile(): profile = request.form['username'] if profile == current_user.username: return redirect(url_for('profile_page')) post = Profile(profile) post.read_from_db() return render_template("profile.html", title='Profile', posts=post)
def process(parser, args): result = list() log_level = logging.ERROR if args.logging: log_level = getattr(logging, args.logging.upper(), None) if not isinstance(log_level, int): logger.warning('Invalid log level: %s' % log_level) logger.info('Default log level: ERROR') logging.basicConfig( level=log_level, handler=NullHandler(), format="%(asctime)s (%(name)s) [%(levelname)s] %(message)s") if args.profile: for profile_path in args.profile: if not os.path.exists(profile_path): logger.error('The path to profile does not exist, %s' % profile_path) continue stages = args.stage if isinstance(args.stage, list) else [] try: result.append(Profile(profile_path, stages=stages).process()) except Exception, err: logger.error(err) continue
def on_profile_clicked(self): self.setVisible(False) profile_win=self.wm.get_window('profile') if profile_win is None: profile_win=Profile(self.wm, self.user) else: profile_win.setVisible(True)
def process(self): self.manual = Help() if self.__help(): return Privileges.check_for_root_permissions() self.load_xml_description(self.command_args['--description']) if self.command_args['--set-repo']: (repo_source, repo_type, repo_alias, repo_prio) = \ self.quadruple_token(self.command_args['--set-repo']) self.xml_state.set_repository(repo_source, repo_type, repo_alias, repo_prio) if self.command_args['--add-repo']: for add_repo in self.command_args['--add-repo']: (repo_source, repo_type, repo_alias, repo_prio) = \ self.quadruple_token(add_repo) self.xml_state.add_repository(repo_source, repo_type, repo_alias, repo_prio) if os.path.exists('/.buildenv'): # This build runs inside of a buildservice worker. Therefore # the repo defintions is adapted accordingly self.xml_state.translate_obs_to_suse_repositories() elif self.command_args['--obs-repo-internal']: # This build should use the internal SUSE buildservice # Be aware that the buildhost has to provide access self.xml_state.translate_obs_to_ibs_repositories() log.info('Preparing system') system = System(self.xml_state, self.command_args['--root'], self.command_args['--allow-existing-root']) manager = system.setup_repositories() system.install_bootstrap(manager) system.install_system(manager) profile = Profile(self.xml_state) defaults = Defaults() defaults.to_profile(profile) setup = SystemSetup(self.xml_state, self.command_args['--description'], self.command_args['--root']) setup.import_shell_environment(profile) setup.import_description() setup.import_overlay_files() setup.call_config_script() setup.import_image_identifier() setup.setup_groups() setup.setup_users() setup.setup_keyboard_map() setup.setup_locale() setup.setup_timezone() system.pinch_system(manager)
def get_ep_profile( profiledb, edgepayload ): combiner = Profile() streets = unpack_streets( edgepayload ) for street in streets: combiner.add( get_street_profile(profiledb, street) ) return combiner.concat() #reduce( lambda x,y:x+y, profile )
def initial_random_greedy_alignment(seqs): rem_seqs = seqs.copy() # secuencias sin alinear i, j, res = weighted_random_choice(align_all_pairs(seqs)) alignment, score = res profile = Profile(*alignment, score) for idx, x in enumerate([i, j]): rem_seqs.pop(x - idx) # elimino las que ya alineé return profile, rem_seqs