def get_conditions(characteristics_list): """Get a list of Conditions given a list of Characteristics""" conditions = [] for char in characteristics_list: logging.debug(f"Working on {char}") conditions += char.conditions conditions_set = set(conditions) logging.debug(f"Conditions: {conditions_set}") return [Condition(uuid=str(get_uuid()), name=condition) for condition in list(conditions_set)]
def ajax_schedule_last_changed_cached(self, prog): # ret['val'] should be a valid nonce that's regenerated no less often than whenever the data changes ret = { 'val': str(get_uuid()), 'msg': 'UUID that changes every time the schedule is updated' } response = HttpResponse(content_type="application/json") simplejson.dump(ret, response) response.raw_value = ret # So that other functions can call this view and get the original return value back return response
def get_characteristics(data): """Generate a list of characteristics from data in a dictionary""" char_list = [] for char, info in data.items(): logging.debug(f"Char: {char}, Info: {info}") char_list.append(Characteristic( uuid=get_uuid(), text=info['text'], category=info['category'], condition_weightings=info['condition_weightings'] )) return char_list
def get_basket_items(session, transactions: list) -> list: basket_items = [] for transaction in transactions: basket_items += [ BasketItem( id=str(get_uuid()), transaction_id=transaction["id"], product_id=_get_existing_product_id(session, basket_item), ) for basket_item in transaction["basket"] ] return basket_items
def get_locations(transactions: list) -> list: """ Extract a list of unique locations from the transactions list. Each location is assigned a UUID string Returns ------- list A list containing the unique locations as dictionaries """ locations = [ Location(id=str(get_uuid()), name=location) for location in set(transaction["location"] for transaction in transactions) ] return locations
def assign_new_ID(request: Request, temp_id) -> UUID: # If there isn't yet a session cookie, we have to be careful: there # might be concurrent requests from this cookieless session and they all # need to receive the same cookie otherwise things get duplicated. To # achieve this, we'll maintain a dict of temp_id -> uuid which is only # accessed if there's no session id. If a user joins without a cookie, # we store their uuid in the dict. Any other requests from their temp_id with # no cookie are given the same uuid. Once a request arrives from this temp_id # which has a cookie, delete them from the dict. with no_cookie_lock: if temp_id in no_cookie_clients: session_UUID = no_cookie_clients[temp_id] else: session_UUID = get_uuid() no_cookie_clients[temp_id] = session_UUID request.session["UUID"] = str(session_UUID) return session_UUID
def get_raw_transactions(data) -> list: """ Transform and clean the raw data from the CSV file into a list of transactions in which we are able to find unique products and locations. Each transaction is assigned a UUID string Returns ------- list A list of transactions along with the basket of items purchased in the transaction. """ transactions = [] # Each transaction contains a basket for row in data: # Split the comma delimited order section and pass that into the # `_basket()` function order = row["Orders"].split(",") basket = _basket(order) card_details = row["Card Details"].split(",")[0] transactions.append({ "id": str(get_uuid()), "basket": basket, "datetime": int( time.mktime( time.strptime(row["Timestamp"], "%Y-%m-%d %H:%M:%S"))), "location": row["Location"], "payment_type": row["Payment Type"], "transaction_total": row["Cost"], "card_details": None if card_details == "None" else card_details, }) return transactions
def get_unique_products(transactions: list) -> list: """ Extract a list of unique products from the transactions list. Each product is assigned a UUID string Returns ------- list A list containing the unique products as dictionaries """ return [ Product(**dict(product, **{"id": str(get_uuid())})) for product in _deduplicate_products( list( chain.from_iterable( [transaction["basket"] for transaction in transactions] ) ) ) ]
def add_entity(self, *components, parent=None, uuid=None): """Add entity to store. :param *components: Components to add to the entity. :param parent: Entity to register as a parent entity. (Default value = None) :param uuid: Override UUID, if given this skips the automatic UUID generation. (Default value = None) """ if uuid and uuid in self.entities: raise KeyError("Entity uuid collision.") uuid = uuid or get_uuid() self.entity_hirarchy[uuid] = set() if parent is not None: self.entity_hirarchy[parent.uuid] |= {uuid} self.entity_hirarchy_rev[uuid] = parent.uuid else: self.entity_hirarchy_rev[uuid] = None self.entities[uuid] = set() self.add_components(uuid, *components) return Entity(self, uuid)
def get_new_uuid(): return unicode(get_uuid().hex)
def random_uuid(): """Generates a random UUID""" return str(get_uuid())
def ajax_schedule_get_uuid(self, prog): return get_uuid()
def get_new_uuid(): return str(get_uuid().hex)
def _create_new_interpolator(self, *, track_grid, nodes, smoothing, db_session, vs_log_age, log_quantity, num_threads, name=None): """ Generate the specified interpolation and add it to the archive. Args: track_grid: See result of _track_grid_from_files() nodes: see get_interpolator(). smoothing: see get_interpolator(). vs_log_age: see get_interpolator(). log_quantity: see get_interpolator(). num_threads: The number of simultaneous threads to use when constructing the interpolation. db_session: The database query session to use. name: The name to assign to the new interpolator. If None, the UUID used to form the filename is used. Returns: VarChangingInterpolator: Created from scratch based on the given arguments. """ interp_str = str(get_uuid()) interp_fname = os.path.join(self._serialization_path, interp_str) db_interpolator = SerializedInterpolator( id=(db_session.query(SerializedInterpolator).count() + 1), name=(name or interp_str), filename=interp_str) if (db_session.query(SerializedInterpolator).filter_by( name=name).count()): raise ValueError('Interpolator named %s already exists, with a ' 'different configuration than the one being ' 'constructed!' % repr(name)) with tempdir_scope() as track_dir: for mass, mass_row in track_grid.items(): for feh, (track_fname, track_id) in mass_row.items(): track = db_session.query(Track).filter_by( id=track_id).one() db_interpolator.tracks.append(track) shutil.copy( track_fname, os.path.join(track_dir, library_track_fname(mass, feh))) interp_smoothing = numpy.empty(len( VarChangingInterpolator.quantity_list), dtype=ctypes.c_double) interp_nodes = numpy.empty(len( VarChangingInterpolator.quantity_list), dtype=ctypes.c_int) interp_vs_log_age = numpy.empty(len( VarChangingInterpolator.quantity_list), dtype=ctypes.c_bool) interp_log_quantity = numpy.empty(len( VarChangingInterpolator.quantity_list), dtype=ctypes.c_bool) for q_name, q_index in \ VarChangingInterpolator.quantity_ids.items(): interp_smoothing[q_index] = smoothing[q_name] interp_nodes[q_index] = nodes[q_name] interp_vs_log_age[q_index] = vs_log_age[q_name] interp_log_quantity[q_index] = log_quantity[q_name] db_interpolator.parameters = [ InterpolationParameters(quantity_id=q.id, nodes=nodes[q.name], smoothing=smoothing[q.name], vs_log_age=vs_log_age[q.name], log_quantity=log_quantity[q.name], interpolator=db_interpolator) for q in self._quantities ] actual_interpolator = ManagedInterpolator( db_interpolator=db_interpolator, serialization_path=self._serialization_path, db_session=db_session, mesa_dir=track_dir, smoothing=interp_smoothing, nodes=interp_nodes, vs_log_age=interp_vs_log_age, log_quantity=interp_log_quantity, num_threads=num_threads) actual_interpolator.save(interp_fname) db_interpolator.checksum = checksum_filename(interp_fname) db_session.add(db_interpolator) db_session.add_all(db_interpolator.parameters) return actual_interpolator
def __init__(self, cons): self.cons = cons #connections from facotry self.uuid = str(get_uuid()) #connection uuid
def id(self): if not hasattr(self, "_id"): self._id = get_uuid() return self._id