def dictionary_data(): sys_logging.info(f'------dictionary data-------') create_dictionary() access_dictionary() reassign_dictionary() delete_dictionary() dict_built_in_method()
def array_from_numerical_ranges(): # np.arange(start, stop, step, dtype) # start: the start of an interval. If omitted, defaults to 0 # stop: the end of an interval. (not including this number) # step: Spacing between values, default is 1 # dtype: data type of resulting ndarray. If not given, data type of input is used. x = np.arange(5) sys_logging.info(x) y = np.arange(10, 20, 2) sys_logging.info(y) # np.linspace(start, stop, num, endpoint, retstep, dtype) # start: the starting value of the sequence # stop: the end value of the sequence, included in the sequence if endpoint set to true # num: the number of evenly spaced samples to be generated. Default is 50 # endpoint: True by default, hence the stop value is included in the sequence. If false, it is not include # retstep: If true, returns samples and step between the consecutive numbers # dtype: Data type of output ndarray z = np.linspace(10, 20, 5) sys_logging.info(z) a = np.linspace(10, 20, 5, endpoint=False) sys_logging.info(a) # np.logspace(start, stop, num, endpoint, base, dtype) # start: the starting point of the sequence is start's start # base: base of log space, default is 10 b = np.logspace(1, 2, num=10) sys_logging.info(b)
def broadcast(): x = np.array([[1], [2], [3]]) y = np.array([4, 5, 6]) b = np.broadcast(x, y) for i in b: sys_logging.info(i) sys_logging.info(x + y)
def thread_method(): thread_one = Thread(target=function_one, args=(10, 'ThreadOne')) thread_one.start() thread_two = Thread(target=function_two, args=(5, 'thread two')) thread_two.start() thread_two.join() sys_logging.info('print after child threads')
def delete(): sys_logging.info('np.delete') d = np.arange(12).reshape((3, 4)) sys_logging.info(d) sys_logging.info(np.delete(d, 5)) sys_logging.info(np.delete(d, 1, axis=0)) e = np.arange(10) sys_logging.info(np.delete(e, np.s_[::2]))
def lock_method(): s_counter = SharedCounter() t1 = threading.Thread(target=task, args=(s_counter, 't1',)) t1.start() t2 = threading.Thread(target=task, args=(s_counter, 't2',)) t2.start() t1.join() t2.join() sys_logging.info(f'Counter: {s_counter.counter}')
def delete_set(): sys_logging.info(f'------delete set-------') numbers = {3, 2, 1, 4, 5, 6, 7} sys_logging.info(f'numbers set {numbers}') numbers.discard(3) # would not raise a KeyError if the value doesn't exist sys_logging.info(f'numbers.discard(3) {numbers}') numbers.remove(2) # would raise a KeyError if the value exist sys_logging.info(f'numbers.remove(3) {numbers}') a = numbers.pop() sys_logging.info(f'numbers.pop() a = {a}, numbers = {numbers}')
def transpose_operation(): # transpose operation # transpose(): np.transpose(ndarray), permutes the dimensions of an array # ndarray.T: Same as self.transpose # rollaxis: rolls the specified axis backwards # swapaxis: Interchange the two axis of an array sys_logging.info(f'Transpose operation') a = np.arange(0, 60, 5) a = a.reshape(3, 4) sys_logging.info(a.T)
def change_dimensions(): # Changing dimensions # broadcast: Produce an object that mimics broadcasting # broadcast_to: Broadcast an array to a new shape # expand_dims: Expands the shape of an array # squeeze: Removes single-dimensional entries from the shape of array sys_logging.info(f'Changing dimensions') broadcast() broadcast_to() expand_dims() squeeze()
def rename_columns(): header( "10.1 df.rename(columns = {'avg_precipitation': 'avg_rain'}, inplace = True)" ) df.rename(columns={'avg_precipitation': 'avg_rain'}, inplace=True) sys_logging.info(df.head()) header( "10.2 df.columns = ['month', 'av_hi', 'av_lo', 'rec_hi', 'rec_lo', 'av_rain']" ) df.columns = ['month', 'av_hi', 'av_lo', 'rec_hi', 'rec_lo', 'av_rain'] sys_logging.info(df.head())
def thread_basic_print(): # This returns the number of alive(currently) Thread objects. # This is equal to the length the of the list that enumerate() returns. sys_logging.info(f'threading.active_count() = {threading.active_count()}') sys_logging.info(f'current thread = {threading.current_thread()}') sys_logging.info(f'current thread id = {threading.get_ident()}') # This returns a list of all alive(currently) Thread objects. # This includes the main thread, daemonic threads, and dummy thread objects created by current_thread(). # This obviously doesn’t include terminated threads as well as those that haven’t begun yet. sys_logging.info(f'thread enumerate = {threading.enumerate()}') sys_logging.info(f'main thread = {threading.main_thread()}')
def array_from_existing_data(): x = [(1, 2, 3), (2, 3, 4)] a = np.asarray(x, dtype=float) sys_logging.info(a) # s = 'Hello World' # b = np.frombuffer(s, dtype='S1') # sys_logging.info(b) list_a = range(5) it = iter(list_a) # use iterator to create ndarray c = np.fromiter(it, dtype=float) sys_logging.info(c)
def stack(): a = np.array([[1, 2], [3, 4]]) b = np.array([[5, 6], [7, 8]]) sys_logging.info(f'hstack \n {np.hstack((a, b))}') sys_logging.info(f'vstack \n {np.vstack((a, b))}') sys_logging.info(f'stack axis = 0 \n {np.stack((a, b))}') sys_logging.info(f'stack axis = 1 \n {np.stack((a, b), axis=1)}')
def reassign_dictionary(): sys_logging.info(f'------reassign dictionary------') numbers = {1: 1, 2: 2, 3: 3} sys_logging.info(f'thr original numbers = {numbers}') numbers[2] = 4 sys_logging.info(f'update numbers[2] = 4, numbers = {numbers}') numbers[4] = 4 sys_logging.info(f'add a new element numbers[4] = 4, numbers = {numbers}')
def nd_array_object(): a = np.array([1, 2, 3]) sys_logging.info(a) b = np.array([[1, 2], [3, 4]]) sys_logging.info(b) c = np.array([1, 2, 3, 4, 5], ndmin=2) sys_logging.info(c) d = np.array([1, 2, 3, 4], dtype=complex) sys_logging.info(d)
def deep_copy(): sys_logging.info(f'------ deep copy ------') a = [1, 23, [4, 5], 'Stephen'] sys_logging.info(f'a = {a}, id(a) = {id(a)}') b = copy.deepcopy(a) sys_logging.info(f'b = {b}, id(b) = {id(b)}') b[2][0] = 23 b[3] = 'Young' sys_logging.info(f'a = {a}, b = {b}, id(a) = {id(a)}, id(b) = {id(b)}')
def shallow_copy(): sys_logging.info(f'------- shallow copy ------') a = [1, 23, [4, 5], 'Stephen'] sys_logging.info(f'a = {a}, id(a) = {id(a)}') b = copy.copy(a) sys_logging.info(f'b = {b}, id(b) = {id(b)}') b[2][0] = 45 b[3] = 'Young' sys_logging.info(f'a = {a}, b = {b}, id(a) = {id(a)}, id(b) = {id(b)}')
def condition_task(): main_cond = threading.Condition() main_si = SomeItem() c_thread = Thread(name='Consumer-Thread', target=consumer, args=(main_si, main_cond)) c_thread.start() p_thread = Thread(name='Producer-Thread', target=producer, args=( main_si, main_cond, )) p_thread.start() c_thread.join() p_thread.join() sys_logging.info(f'Main Done')
def load_hard_coded_data(): # 1. load hard-coded data into a dataframe header(f'1. Load hard-coded data into df') global df df = pd.DataFrame( [['Jan', 58, 42, 74, 22, 2.95], ['Feb', 61, 45, 78, 26, 3.02], ['Mar', 65, 48, 84, 25, 2.34], ['Apr', 67, 50, 92, 28, 1.02], ['May', 71, 53, 98, 35, 0.48], ['Jun', 75, 56, 107, 41, 0.11], ['Jul', 77, 58, 105, 44, 0], ['Aug', 77, 59, 102, 43, 0.03], ['Sep', 77, 57, 103, 40, 0.17], ['Oct', 73, 54, 96, 34, 0.81], ['Nov', 64, 48, 84, 30, 1.7], ['Dec', 58, 42, 73, 21, 2.56]], index=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], columns=[ 'month', 'avg_high', 'avg_low', 'record_high', 'record_low', 'avg_precipitation' ]) sys_logging.info(df)
def array_creation_routines(): x = np.empty([3, 2], dtype=int) sys_logging.info(x) y = np.zeros(5) sys_logging.info(y) z = np.zeros([ 5, ], dtype=int) sys_logging.info(z) a = np.zeros([2, 2], dtype=[('x', 'i4'), ('y', 'i4')]) sys_logging.info(a)
def iterating_over_array(): a = np.arange(0, 60, 5) a = a.reshape(3, 4) sys_logging.info(a) for x in np.nditer(a, flags=['c_index']): sys_logging.info(x) # broadcasting iterating sys_logging.info(f'start broadcasting iterating') b = np.array([1, 2, 3, 4]) for x, y in np.nditer([a, b]): sys_logging.info(f'{x}:{y}')
def data_assignment(): header(f"9.1 df.loc[9, ['avg_precipitation']] = 101.3") df.loc[9, ['avg_precipitation']] = 101.3 sys_logging.info(df.iloc[9:11]) header(f"9.2 df.loc[9, ['avg_precipitation']] = np.nan") df.loc[9, ['avg_precipitation']] = np.nan sys_logging.info(df.iloc[9:11]) header(f"9.3 df.loc[:,['avg_low']] = np.array([5]*len(df))") df.loc[:, 'avg_low'] = np.array([5] * len(df)) sys_logging.info(df.head()) header(f"9.4 df['avg_day'] = (df.avg_low + df.avg_high)/2") df['avg_day'] = (df.avg_low + df.avg_high) / 2 sys_logging.info(df.head())
def event_task(event): event_set = event.wait() sys_logging.info(f'{threading.current_thread()}') if event_set: sys_logging.info(f'Event received, releasing thread...') else: sys_logging.info(f'Time out, moving ahead without event')
def increment(self, name): sys_logging.info(f'Waiting for a lock at {name}') self.lock.acquire() sys_logging.info(f'{name} Acquired a lock, counter value = {self.counter}') self.counter += 1 self.lock.release() sys_logging.info(f'{name} Released a lock, counter value = {self.counter}')
def update_set(): sys_logging.info(f'------update set------') numbers = {1, 3, 4, 5, 8} numbers.add(3.5) sys_logging.info(f'numbers.add(3.5). numbers = {numbers}') numbers.update({1, 3, 4, 9}, {'a', 'b'}) sys_logging.info(f'numbers.update() method. numbers = {numbers}')
def barrier_thread(barrier, timeout): thread_name = threading.current_thread().getName() sys_logging.info(f'{thread_name} is start') time.sleep(timeout) if thread_name == 'Thread-Three': barrier.abort() return sys_logging.info(f'{thread_name} is ready') try: barrier.wait() except threading.BrokenBarrierError: sys_logging.info(f'{thread_name} is excepted') finally: sys_logging.info(f'{thread_name} is finished')
def expand_dims(): d = np.array([[1, 2], [3, 4]]) e = np.expand_dims(d, axis=0) f = np.expand_dims(d, axis=1) g = np.expand_dims(d, axis=2) sys_logging.info(f'shape = {e.shape} \n {e}') sys_logging.info(f'shape = {f.shape} \n {f}') sys_logging.info(f'shape = {g.shape} \n {g}')
def create_list(): # python list can hold different types of values sys_logging.info(f'------start create list------') colors = ['red', 'green', 'blue'] days = ['Monday', 'Tuesday', 'Wednesday', 4, 5, 6, 7] sys_logging.info(colors) sys_logging.info(f'create days {days} id = {id(days)}') return days
def get_second_line(): sys_logging.info(f'start second line') second_acquire = rlock.acquire() sys_logging.info(f'second acquire = {second_acquire}') try: with open('write.txt', 'r') as write: second_line = write.readlines() sys_logging.info(f'second: {second_line}') finally: rlock.release()
def load_file_data(): header(f'2. load file data') global df df = pd.read_csv('colors.csv') sys_logging.info(df) header(f'3.1 df.head()') sys_logging.info(df.head()) header(f'3.2 df.tail(3)') sys_logging.info(df.tail(3))