def generate_quests(num_quest_per_campaign=10, num_campaigns=5, quiet=False): """ Generates 100 quests and 10 badges with prerequisites... just for fun """ start_badge = Badge.objects.get( import_id="fa3b0518-cf9c-443c-8fe4-f4a887b495a7") if not quiet: print("Generating content...") for i in range(num_campaigns): initial_quest = Quest.objects.create(name=namegenerator.gen(), xp=random.randint(0, 20)) initial_quest.add_simple_prereqs([start_badge]) last_quest = initial_quest for j in range(num_quest_per_campaign - 1): quest = Quest.objects.create(name=namegenerator.gen(), xp=random.randint(0, 20)) quest.add_simple_prereqs([last_quest]) last_quest = quest if not quiet: print(quest) if not quiet: print( f"{num_quest_per_campaign*num_campaigns} Quests generated in {num_campaigns} campaigns." )
def write_proveedor(wrt, n): global idp for i in range(n): row = [] IDPro = idp IDPro_reg.append(IDPro) idp += 1 nombre = namegenerator.gen().split("-")[0] while nombre in prov_used: nombre = namegenerator.gen().split("-")[0] prov_used.append(nombre) row.append(IDPro) row.append(nombre) wrt.writerow(row)
def write_marca(wrt, n): global idm for i in range(n): row = [] IDMa = idm IDM_reg.append(IDMa) idm += 1 nombre = namegenerator.gen().split("-")[0] while nombre in marc_used: nombre = namegenerator.gen().split("-")[0] marc_used.append(nombre) row.append(IDMa) row.append(nombre) wrt.writerow(row)
def dapp_store_dashboard(request): context = {} did = request.session['did'] track_page_visit(did, 'elastOS dApp Store Dashboard', 'elastos_trinity:dapp_store_dashboard', False) context['recent_services'] = get_recent_services(did) dapp_store = DAppStore() dapps_list = dapp_store.get_apps_list() dapp_store_url = config('ELASTOS_TRINITY_DAPPSTORE_URL') for dapp in dapps_list: createdAt = dapp["createdAt"][:10] strippedName = dapp["appName"].replace(" ", "") removedName = ''.join([i for i in strippedName if not i.isdigit()]) uniqueName = removedName + namegenerator.gen() dapp["uniqueName"] = uniqueName dapp["createdAt"] = createdAt dapp["id"] = dapp["_id"] dapp["icon_url"] = f"{dapp_store_url}/apps/{dapp['id']}/icon" dapp[ "download_url"] = f"https://scheme.elastos.org/app?id={dapp['packageName']}" context['dapps_list'] = dapps_list context['top_downloads'] = sorted(dapps_list, key=lambda k: k['downloadsCount'], reverse=True)[:4] return render(request, "elastos_trinity/dapp_store_dashboard.html", context)
def load_dataset(): '''Return Wines and labels.''' col_Names = [i[0] for i in cols] col_Names.append(labelcol) from pyspark.sql import SparkSession from pyspark.sql.types import Row, StructField, StructType, StringType, IntegerType spark = SparkSession.builder\ .appName("Import Wine Table")\ .config("spark.yarn.access.hadoopFileSystems",s3_bucket)\ .config("spark.hadoop.fs.s3a.s3guard.ddb.region", s3_bucket_region)\ .getOrCreate() df = spark.sql("SELECT * FROM `default`.`wine`").toPandas() spark.stop() df = drop_missing(df).reset_index() df.index.name = 'id' clean(df) #Add a (random) wine label name in order to have an identifier df['ranName'] = df.index.to_series().map(lambda x: namegenerator.gen()) features, labels = utils.splitdf(df, labelcol) features = utils.drop_non_features(features, cols) features = utils.categorize(features, cols) #labels = pd.Categorical(labels) return features, labels
def create_vms(configurations, conn, image, flavor, nVMs): for _ in range(nVMs): server = conn.create_server(name=namegenerator.gen(), image=image, flavor=flavor, network="internal") configurations.put(server)
def __init__(self, data, params): # TODO: test different time scales # TODO: make class return results for multiple years, check for consistency # TODO: strip down to bare minimum # TODO: combine trading with other momentum signals #best_features = ['ad', 'adj close', 'adosc', 'apo', 'atr', 'bbands_lower', 'bbands_middle', 'beta', 'dx', 'fama', 'high', 'ht_dcphase', 'ht_leadsine', 'ht_phasor_inphase', 'ht_phasor_quadrature', 'ht_trendmode', 'kama', 'linearreg_angle', 'low', 'ma', 'macd', 'macd_hist', 'macdfix', 'macdfix_hist', 'mfi', 'minus_dm', 'mom', 'natr', 'open', 'plus_dm', 'ppo', 'rocp', 'rocr100', 'stddev', 't3', 'tema', 'trange', 'trix', 'tsf', 'ultosc', 'willr', 'wma'] self.conn = sqlite3.connect('markov_models.db') self.data = data #print(params) # iterate through these params self.name = params.get('name', namegenerator.gen()) self.scoring = params.get('scoring', 'r2') self.k_features = params.get('k_features', 6) self.max_depth = params.get('max_depth', 6) self.with_original = params.get('with_original', True) self.period = params.get('period', 'max') #self.without_bad_year = params.get('without_bad_year', False) self.without_bad_year = False self.test_dates = [ { 'year': 1, 'start_date': '2017-01-01', 'end_date': '2019-12-31' }, #{'year': 2, 'start_date': '2018-01-01', 'end_date': '2018-12-31'}, #{'year': 3, 'start_date': '2019-01-01', 'end_date': '2019-12-31'}, ] self.run_generator()
def process_config() -> flags.FlagValues: config = flags.FLAGS # Process running experiment name. if (config.mode == "evaluate" or config.mode == "restore") and config.name == "auto": logging.fatal( "Cannot automatically generate a name for the chosen mode.") elif config.mode == "train" and config.name == "auto": config.name = os.path.join(time.strftime("%Y-%m-%d"), namegenerator.gen()) logging.info("Experiment Name: {}".format(config.name)) # Convert shapes to tuples. config.input_shape = tuple(config.input_shape) config.output_shape = tuple(config.output_shape) # Set directories to their appropriate paths. config.execution_dir = os.path.join(os.curdir, "executions", config.name) config.checkpoint_dir = os.path.join(config.execution_dir, "checkpoint") config.evaluate_dir = os.path.join(config.execution_dir, "result") config.log_dir = os.path.join(config.execution_dir, "log") config.presentation_dir = os.path.join(config.execution_dir, "presentation") config.summary_dir = os.path.join(config.execution_dir, "summary") create_directories(config, directories=(config.checkpoint_dir, config.evaluate_dir, config.log_dir, config.presentation_dir, config.summary_dir)) # Log out the command for using TensorBoard. print('tensorboard --logdir="{}" --host 127.0.0.1 --port 8080'.format( os.path.abspath(config.summary_dir))) return config
def __init__(self, params_dict=None, model_name=None): self.conn = sqlite3.connect('hmm.db') if model_name is None: self.name = namegenerator.gen() self.pca_n_components = params_dict.get('pca_n_components', 3) self.k_features = params_dict.get('k_features', 3) elif model_name is not None: self.name = model_name pca_n_components, k_features, features_found = self.get_model_from_db( ) self.pca_n_components = pca_n_components self.k_features = k_features self.features_found = features_found self.cutoff_date = '2017-01-01' self.n_experiments = 35 self.get_data() if model_name is not None: # run production print('running production') self.run_pipeline(production=True) print('predicting completed') else: self.run_decision_tree() self.run_pipeline() if self.pipeline_failed == False: print('found model', self.features_found) self.get_results() self.store_results()
def __init__(self, params): self.conn = sqlite3.connect('markov_models.db') #print(params) # iterate through these params self.name = params.get('name', namegenerator.gen()) self.scoring = params.get('scoring', 'max_error') self.k_features = params.get('k_features', 25) self.max_depth = params.get('max_depth', 2) self.period = params.get('period', '3y') tickers = ['SPY'] self.data = get_data(tickers,period=self.period) """ self.test_dates = [ {'year': 1, 'start_date': '2017-01-01', 'end_date': '2017-12-31'}, {'year': 2, 'start_date': '2018-01-01', 'end_date': '2018-12-31'}, {'year': 3, 'start_date': '2019-01-01', 'end_date': '2019-12-31'}, ] """ self.run_generator()
def recreate_party(request): """ (not a view) creates a copy of the last party's settings with a new party name and the same players """ if not request.POST: return redirect('/') party_id = request.POST.get('party_id') party = Party.objects.get(party_name=party_id) gen_hash = hashlib.sha256() gen_hash.update(str(datetime.now()).encode('utf-8')) hex_digest = gen_hash.hexdigest() new_party_id = namegenerator.gen() + '-' + hex_digest[-4:] p = Party(party_name=new_party_id, num_players=party.num_players, admin=party.admin, num_rounds=party.num_rounds, party_type=party.party_type, party_subtype=party.party_subtype) p.save() for player in party.players.all(): print(player.player_name) pl = Player.objects.get(player_name=player.player_name) p.players.add(pl) add_party_content(p) Party.objects.filter(party_name=new_party_id).update(status=1) add_extra_party_content(p) msg = "party_recreated" channel_layer = channels.layers.get_channel_layer() async_to_sync(channel_layer.group_send)('chat_%s' % party_id, { 'type': 'chat_message', 'submission_score': new_party_id, 'message': msg, 'player_name': '', }) return redirect(f'/start/{new_party_id}')
def devices(identifier=None): if request.method == 'GET': # query db doc = db.collection('devices').document(identifier) # load query response device = doc.get().to_dict() if device is None: return Response(status=404) else: return jsonify(device) elif request.method == 'POST': data = request.get_json() if data is None: data = {} else: data = data.get('params') # create new device device = { 'agent': request.headers.get('User-Agent', 'unknown'), 'name': data.get('name', namegenerator.gen()) } # add device to db collection (_, doc) = db.collection('devices').add(device) return jsonify({'uuid': doc.id}), 200
def __init__(self): self.x=1 self.y=10 self.oldx=1 self.oldy=10 self.move_dir="" self.health=4 self.name = namegenerator.gen();
def __init__(self, run_type, params_dict = None): self.conn = sqlite3.connect('hmm_rolling.db') self.run_type = run_type self.pca_n_components = params_dict.get('pca_n_components', 3) self.k_features = params_dict.get('k_features', 3) self.train_length = params_dict.get('train_length', 3) if self.run_type == 'find_features': # run decision tree and features finder algorithms self.name = params_dict.get('name', namegenerator.gen()) self.cutoff_date = params_dict.get('cutoff_date', '2017-01-01') elif self.run_type == 'rolling_test': # run test on new test data cutoffs using features already defined self.name = params_dict.get('name') self.features_found = params_dict.get('features') self.cutoff_date = params_dict.get('cutoff_date') print('testing using\n', self.features_found, '\nand date of', self.cutoff_date) elif self.run_type == 'production': # read params from database self.production = True self.name = params_dict.get('name') pca_n_components, k_features, features_found = self.get_model_from_db() self.pca_n_components = pca_n_components self.k_features = k_features self.features_found = features_found print('using features', features_found) #self.cutoff_date = '2017-01-01' self.n_experiments = 30 self.get_data() if self.run_type == 'production': # run production print('running production') self.run_pipeline() print('predicting completed') elif self.run_type == 'rolling_test': print('running test on other dates', self.cutoff_date) self.run_pipeline() if self.pipeline_failed == False: self.get_results() self.store_results() elif self.run_type == 'find_features': # run feature selection and test data prediction self.run_decision_tree() self.run_pipeline() if self.pipeline_failed == False: print('found model', self.features_found) self.get_results() self.store_results()
def show_index(): generated_pseudo = namegenerator.gen() user_ip_address = request.remote_addr user = User(generated_pseudo, user_ip_address, Roles.CREATOR) generated_room_id = room_manager.add_new_room__(user) print("Création de la room %d." % generated_room_id, end="\n\n") return redirect(url_for('show_room_page', room_id=generated_room_id))
def rename_files_for_download(self): original = "https://archives.nseindia.com/content/historical/EQUITIES/2020/FEB/cm27FEB2020bhav.csv.zip" print(f"Original file is: {original}") a = (self.weekdays_date()) self.save_path() try: os.mkdir( 'NSE_Downloads' ) # makes a separate folder "NSE_Downloads" and saves the downloaded files except FileExistsError: print('File exists already') os.chdir('NSE_Downloads') current = os.getcwd() # print(a) predefined = [ "FEB", "MAR" ] # I'm defining the months only to download for Feb & March for i in range(a): for x in predefined: # print(i+1) modified_file = f"https://archives.nseindia.com/content/historical/EQUITIES/2020/{x}/cm{str(i).zfill(2)}{x}2020bhav.csv.zip" print( f"Modified file {i + 1} is: {modified_file}" ) # New file generated dynamically based on original file pattern try: wget.download(modified_file, current) except ConnectionResetError: print('Error Handled!') except TimeoutError: print( "Oops this was a weekend, we don't have file for this date" ) # time.sleep(4) for i in os.listdir(current): # Unzip the downloaded files with ZipFile(i, 'r') as zip: zip.printdir() print('Extracting files now..') zip.extractall('temp') print('Done!') print("Finished in rename_files_for_download") print('Path after extraction:', os.getcwd()) uff = os.chdir('temp') print('Temp Directory ?', uff) temp_dir = os.getcwd() print(temp_dir) col_list = 'SYMBOL', 'SERIES', 'OPEN', 'HIGH', 'LOW', 'CLOSE', 'LAST', 'PREVCLOSE', 'TOTTRDQTY', 'TOTTRDVAL', 'TIMESTAMP' for i in os.listdir( temp_dir ): # Parses the unzipped files and saves the files in .CSV format print("Currently opening: ", i, "file") a = (pd.read_csv(i, usecols=col_list)) df = pd.DataFrame(a) print('Saving file:') df.to_csv(namegenerator.gen() + '.csv') # print(i) print("Files Parsed Successfully")
def lambda_handler(event, context): try: # this will create dynamodb resource object and # here dynamodb is resource name client = boto3.resource('dynamodb') # this will search for dynamoDB table # your table name may be different if (DYNAMODB == ''): print( 'You need to set the DYNAMODB_TABLE environment variable to continue.' ) exit(0) table = client.Table(DYNAMODB) print(table.table_status) i = 0 #serialNumber: '', #group: '', #deviceId: '', #name: '', #activationCode: '', #activated: '', #type: '', #endpoint: '' fake = Faker() while i < NUM_DEVICES: i += 1 id = str(uuid.uuid4()) name = namegenerator.gen() group = fake.country() deviceId = randint(10000, 100000) serialNumber = randint(10000, 100000) createdAt = datetime.datetime.utcnow().strftime( '%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z' updatedAt = createdAt type = 'Default' activated = 'On' location = [47, -122] response = table.put_item( Item={ 'id': id, 'name': name, 'group': group, 'deviceId': deviceId, 'serialNumber': serialNumber, 'createdAt': createdAt, 'updatedAt': updatedAt, 'activated': activated, 'type': type }) print(json.dumps(response, indent=4, cls=DecimalEncoder)) except Exception as e: print("Unexpected error: %s" % e)
def train_index(): if user_utils.is_normal(): return redirect(url_for('index')) currently_training = Engine.query.filter_by(uploader_id = user_utils.get_uid()) \ .filter(Engine.status.like("training")).all() if (len(currently_training) > 0): return redirect( url_for('train.train_console', id=currently_training[0].id)) currently_launching = Engine.query.filter_by(uploader_id = user_utils.get_uid()) \ .filter(Engine.status.like("launching")).all() if (len(currently_launching) > 0): return redirect( url_for('train.train_launching', task_id=currently_launching[0].bg_task_id)) random_name = namegenerator.gen() tryout = 0 while len(Engine.query.filter_by(name=random_name).all()): random_name = namegenerator.gen() tryout += 1 if tryout >= 5: random_name = "" break random_name = " ".join(random_name.split("-")[:2]) library_corpora = user_utils.get_user_corpora().filter( LibraryCorpora.corpus.has(Corpus.type == "bilingual")).all() corpora = [c.corpus for c in library_corpora] languages = UserLanguage.query.filter_by(user_id=current_user.id).order_by( UserLanguage.name).all() return render_template('train.html.jinja2', page_name='train', page_title='Train', corpora=corpora, random_name=random_name, languages=languages)
def newRandomResultSheet(self): try: title = namegenerator.gen() self.deletesheet(title) worksheet = self.wb.add_worksheet(title=title, rows="100", cols="20") worksheet.append_row(sb.result_sheet_head) return worksheet.title except Exception as ex: error.handle(ex, traceback.format_exc())
def new_party(request): """ Shows form to create a new party and generates the party name """ gen_hash = hashlib.sha256() gen_hash.update(str(datetime.now()).encode('utf-8')) hex_digest = gen_hash.hexdigest() cah_decks = load_decks() return render( request, 'new.html', { "hex_digest": namegenerator.gen() + '-' + hex_digest[-4:], "cah_decks": cah_decks, })
def make_machine(): dict = {} dict["id"] = str(uuid.uuid1()) dict["name"] = namegenerator.gen() dict["type"] = random.choice(["washer", "dryer"]) dict["running"] = False dict["status"] = "normal" dict["room_id"] = random.choice([ "gay", "independence", "green_prairie", "the_apartments", "blakely", "pine", "spooner" ]) return dict
def __init__(self, name=None, age=None, list_note=None): if name is None: self.name = namegenerator.gen() else: self.name = name if age is None: self.age = generate_random_age() else: self.age = age if list_note is None: self.list_note = generate_list_random_note() else: self.list_note = list_note
def __init__(self): self.profile = { "Username": namegenerator.gen(), "Address": "", "Email": "", "Password": "", "PhoneNumber": 0, "AccountBalance": randint(100, 99999), } self.user_id = None self.transactions_from_api = [] self.transactions_to_api = [] self.transfers_from_api = [] self.transfers_to_api = []
def __init__(self, n_objects=10, n_users=10): # list of objects to consider objects = [namegenerator.gen() for _ in range(n_objects)] # fields to consider (features) fields = [ "unicorniness", "magic_skills", "number_of_trump_appearances" ] # some users users = [names.get_full_name() for _ in range(n_users)] super(ToyRandomDataset, self).__init__(objects=objects, fields=fields, users=users)
def connect_to_existing_room(room_id, ip_address): user = None if not room_manager.get_room_list()[room_id].get_creator_connected_state(): user = room_manager.get_room_list()[room_id].get_creator() room_manager.get_room_list()[room_id].set_creator_connected_state(True) else: pseudo = namegenerator.gen() user = User(pseudo, ip_address, Roles.LAMBDA) room_manager.get_room_list()[room_id].add_new_user(user) print( "Nouvelle connection à la room {%s} : login[%s] - ip[%s] - role[%s]" % (str(room_id), user.get_pseudo(), user.get_ip_address(), user.get_role().name)) return user
def __init__(self, name=""): super().__init__() scf(self) # Just for fun if name: self.window_name = name else: try: import namegenerator self.window_name = namegenerator.gen().replace("-", " ").title() except ImportError: self.window_name = "VTK figure" self.plots = set()
def __init__(self, name=""): scf(self) self.renderer = vtk.vtkRenderer() # Just for fun if name: self.window_name = name else: try: import namegenerator self.window_name = namegenerator.gen().replace("-", " ").title() except ImportError: self.window_name = "VTK figure" self.plots = set() self.background_color = "light grey"
def load_dataset(): col_Names = [i[0] for i in cols] col_Names.append(labelcol) df = pd.read_csv(csvpath, sep=";", header=None, names=col_Names, index_col=None) df = drop_missing(df).reset_index() df.index.name = 'id' clean(df) #Add a (random) wine label name in order to have an identifier df['ranName'] = df.index.to_series().map(lambda x: namegenerator.gen()) features, labels = utils.splitdf(df, labelcol) features = utils.drop_non_features(features, cols) features = utils.categorize(features, cols) #labels = pd.Categorical(labels) return features, labels
def __init__(self, params): self.conn = sqlite3.connect('markov_models.db') self.name = params.get('name', namegenerator.gen()) self.period = params.get('period', 'Max') self.train_length = params.get('train_length', 3) self.test_length = params.get('test_length', 3) self.cutoff_date = '2017-01-01' self.train_length = 365 * self.train_length self.test_length = 365 * self.test_length self.target_variable = 'return' self.k_features = params.get('k_features', 25) self.k_neighbors = params.get('k_neighbors', 3) tickers = ['SPY'] self.data = get_data(tickers,period=self.period,pattern=False) #self.run_generator() self.run_generator_pipeline()
def __init__(self, x = None, y = None): self.x = x; self.y = y; self.name = trans.translate(namegenerator.gen(), dest='de').text.replace("-", ' '); print(self.name);