def add_product(): name = extract('name') description = extract('description') price = extract('price') qty = extract('qty') new_product = Product(name, description, price, qty) db.session.add(new_product) db.session.commit() return product_schema.jsonify(new_product)
def setup_weights(self, weights): """ Setup weights """ if type(weights) in [int,float]: weights = np.ones((1,)*len(self.source.shape))*weights dtype = weights.dtype # Is kernel already a sparse array ? if sparse.issparse(weights): if weights.shape != (self.target.size, self.source.size): raise ConnectionError, \ 'weights matrix shape is wrong relative to source and target' else: W = weights.tocoo() data, row, col = W.data,W.row,W.col i = (1 - np.isnan(data)).nonzero() data, row, col = data[i], row[i], col[i] data = np.where(data, data, np.NaN) weights = sparse.coo_matrix((data,(row,col)), shape=W.shape) weights.data = np.nan_to_num(data) # Else, we need to build it elif weights.shape != (self.target.size,self.source.size): if len(weights.shape) == len(self.source.shape): # If we have a toric connection, weights cannot be greater than source # in any dimension if self._toric: s = np.array(self.source.shape) w = np.array(weights.shape) weights = extract(weights, np.minimum(s,w), w//2) weights = convolution_matrix(self.source, self.target, weights, self._toric) else: raise ConnectionError, \ 'weights matrix shape is wrong relative to source and target' self._weights = csr_array(weights, dtype=dtype)
def update_product(id): product = Product.query.get(id) name = extract('name') description = extract('description') price = extract('price') qty = extract('qty') product.name = name product.description = description product.price = price product.qty = qty db.session.commit() return product_schema.jsonify(product)
def setup_weights(self, weights): """ Setup weights """ if type(weights) in [int, float]: weights = np.ones((1, ) * len(self.source.shape)) * weights dtype = weights.dtype # Is kernel already a sparse array ? if sparse.issparse(weights): if weights.shape != (self.target.size, self.source.size): raise ConnectionError, \ 'weights matrix shape is wrong relative to source and target' else: W = weights.tocoo() data, row, col = W.data, W.row, W.col i = (1 - np.isnan(data)).nonzero() data, row, col = data[i], row[i], col[i] data = np.where(data, data, np.NaN) weights = sparse.coo_matrix((data, (row, col)), shape=W.shape) weights.data = np.nan_to_num(data) # Else, we need to build it elif weights.shape != (self.target.size, self.source.size): if len(weights.shape) == len(self.source.shape): # If we have a toric connection, weights cannot be greater than source # in any dimension if self._toric: s = np.array(self.source.shape) w = np.array(weights.shape) weights = extract(weights, np.minimum(s, w), w // 2) weights = convolution_matrix(self.source, self.target, weights, self._toric) else: raise ConnectionError, \ 'weights matrix shape is wrong relative to source and target' self._weights = csr_array(weights, dtype=dtype)
def setup_weights(self, weights): """ Setup weights """ if type(weights) in [int, float]: weights = np.ones((1, ) * len(self.source.shape)) * weights if weights.shape == (self.target.size, self.source.size): self._weights = weights self._mask = 1 - np.isnan(self._weights).astype(np.int32) if self._mask.all(): self._mask = 1 return if len(weights.shape) != len(weights.shape): raise ConnectionError, \ 'Weights matrix shape is wrong relative to source and target' # If we have a toric connection, weights cannot be greater than source # in any dimension if self._toric: s = np.array(self.source.shape) w = np.array(weights.shape) weights = extract(weights, np.minimum(s, w), w // 2) K = convolution_matrix(self.source, self.target, weights, self._toric) nz_rows = K.row nz_cols = K.col self._weights = K.todense() self._mask = np.zeros(K.shape) self._mask[nz_rows, nz_cols] = 1 if self._mask.all(): self._mask = 1 self._weights = np.array(K.todense())
def setup_weights(self, weights): """ Setup weights """ if type(weights) in [int,float]: weights = np.ones((1,)*len(self.source.shape))*weights if weights.shape == (self.target.size, self.source.size): self._weights = weights self._mask = 1-np.isnan(self._weights).astype(np.int32) if self._mask.all(): self._mask = 1 return if len(weights.shape) != len(weights.shape): raise ConnectionError, \ 'Weights matrix shape is wrong relative to source and target' # If we have a toric connection, weights cannot be greater than source # in any dimension if self._toric: s = np.array(self.source.shape) w = np.array(weights.shape) weights = extract(weights, np.minimum(s,w), w//2) K = convolution_matrix(self.source, self.target, weights, self._toric) nz_rows = K.row nz_cols = K.col self._weights = K.todense() self._mask = np.zeros(K.shape) self._mask[nz_rows, nz_cols] = 1 if self._mask.all(): self._mask = 1 self._weights = np.array(K.todense())
def main(args=None): #Check the options and arguments parsed parser = functions.defineParser() (options, args) = parser.parse_args(args) if len(args) == 0: parser.error("You have to put a file name") elif len(args) == 1: parser.error("You didn't put any name for the database") elif len(args) != 2: parser.error("Incorrect number of arguments") logging.basicConfig(format = '%(asctime)s -> %(levelname)s : %(message)s', level = options.level, datefmt = '%m/%d/%Y %I:%M:%S %p') for arg in args: logging.info("Argument passed %s", arg) if os.path.exists(args[0]): print "Started ..." logging.debug('Started') tarball = tarfile.open(args[0]) db = DataBase(args[1]) #Go throw the tarball and extract the tar file # k -> name of the job # v dictionary with the data ( { 'connections':[], 'inputs': [], 'site': '' } ) for k,v in functions.extract(tarball, condition = myCondition_1, extractor = myExtractor_1): print "Add data to DB... => ",k ,v try: db.add_Data(k, v) except KeyError: logging.warning("Wrong tarfile") logging.debug("************************************************************************************") print "Extraction done" logging.debug('Done') return SUCCESS else: print 'The file does not exist' return FAILURE
def myExtractor_1(tarFile,tarInfo): """To define the way we extract the data from a tarfile""" logging.debug("***********************************************************************************") logging.debug("Find a zip in tarFile: %s", tarInfo.name) exFileObject = tarFile.extractfile(tarInfo) logging.debug("Extraction...") tarFileObject = tarfile.open(fileobj = exFileObject) logging.debug("Turning on : %s", tarFileObject.name) key = re.split(r"(\d{8}_\d{8})", tarInfo.name) return key[1], dict(functions.extract(tarFileObject, extractor = myExtractor_2, condition = myCondition_2)) logging.debug("******************** End of the research for this tar file ************************")
def main(): print('\nFractions\n') print(functions.fractions("OTTACACTTAT")) # ⇒ (0.2,0.0,0.5,0.3) print(functions.fractions("CC")) # ⇒ (1.0,0.0,0.0,0.0) print(functions.fractions("F")) # ⇒ (0.0,0.0,0.0,0.0) print(functions.fractions("FT")) # ⇒ (0.0,0.0,1.0,0.0) print(functions.fractions("1")) # ⇒ (0.0,0.0,0.0,0.0) print(functions.fractions(1)) # ⇒ "input must be a string character" print(functions.fractions('OTTACACTTATTTTTTC')) print(functions.fractions(['O', 'T', 'T', 'A', 'C', 'A', 'C', 'T', 'T', 'A', 'T', 'T', 'T', 'T', 'T', 'T', 'C'])) print(functions.fractions('')) print('\nF\n') S1 = [105, 101, 105, 100, 105] S2 = [100, 102, 101, 102, 101] print(functions.F(S1, S2)) print(functions.F_while(S1, S2)) print(functions.F_list_comp(S1, S2)) print(functions.F_lambda(S1, S2)) print(functions.F_error(S1, S2)) print() S3 = [254, 255, 255, 260, 256, 256, 255, 253, 259, 250] S4 = [256, 259, 260, 256, 256, 250] print(functions.F(S3, S4)) print(functions.F_while(S3, S4)) print(functions.F_list_comp(S3, S4)) print(functions.F_lambda(S3, S4)) print(functions.F_error(S3, S4)) print('\nFrequencies\n') # ⇒ {"A":3, "B":2, "C":4, "D":1} print(functions.frequencies("CCABBADCAC")) print(functions.frequencies([4, 7, 4, 7, 4])) # ⇒ {4:3, 7:2} print(functions.frequencies(1)) # ⇒ "input is incorrect" print('\nFirsts\n') print(functions.firsts("mississippi")) # ⇒ "misp" print(functions.firsts("abcdefg")) # ⇒ " abcdefg " print(functions.firsts("aaaaaaaa")) # ⇒ "a" print(functions.firsts("")) # ⇒ "" print(functions.firsts(123454321)) # ⇒ "12345" print(functions.firsts([1, 2, 3, 4, 5, 4, 3, 2, 1])) print(functions.firsts((0, 2, 6, 9, 5, 4, 3, 2, 1))) print('\nExtract\n') print(functions.extract("Yesterday I saw an aardvark while walking my pet tortoise, Frank. What a sight this was! Aardvarks are nocturnal animals appearing in daylight with caution. Make sure to bring kippers when you visit", 1, 5))
def main(args): parser = argparse.ArgumentParser( description= 'Stego tool to hide secrets in messages using homographic chars from cyrillic dictionaries. Texts ' 'with hidden messages are uploaded to pastebin directly but downloaded through google translator' ) parser.add_argument('mode', choices=["upload", "download", "hide", "extract"], nargs='?', help=''' upload/downloads uses pastebin to store the message and google translates API to download. This might bypass some security boundaries at downloading hide/extract modes uses local files to store the secret ''') parser.add_argument("-dk", "--dev-key", dest="dev_key", help="Dev key used when uploading to pastebin") # parser.add_argument("-uk", "--user-key", dest="user_key", help="User key used when uploading to pastebin") parser.add_argument( '-m', '--message_file', dest='message_file', help= 'path to a test file containing a message to use as medium. If not provided, Lorem ipsum ' 'will be used') parser.add_argument('-s', '--secret', dest='secret', help='The secret text that will be embedded') parser.add_argument( '-d', '--destination_file', dest='destination_file', help="destination file where the stego message will be stored") parser.add_argument('-o', '--stego_message_file', dest='stego_file', help="Path to a file containing a stego message") parser.add_argument( '-u', '--pastebin_url', dest='pastebin_url', help='Pastebin url used in download to retrieve the secret from') parser.add_argument( '-k', '--key', dest='key', help='They key used to encrypt and the crypt the secret.') args = parser.parse_args() if args.mode == "upload": if args.secret is None or args.dev_key is None: print("[-] In upload mode the following arguments are required") print("[-] -s <secret>") print("[-] -dk <dev_pastebin_key>") print("[-] -uv <user_pastebin_key") print("[-] -k <key>") sys.exit(1) message = check_message_file_and_secret(args.secret, args.message_file) max_len = check_max_secret_length(message) print("[+] Message maximum possible secret len:", max_len) print("[+] Secret length:", len(args.secret)) message_stego = hide(message, args.secret, args.key) if message_stego is None: print( "[-] The secret cannot be embedded within the message. Exiting..." ) exit(1) else: url = upload(message_stego, args.dev_key) print("[+] The message with the secret has been uploaded to:", url) exit(0) elif args.mode == "download": if args.pastebin_url is None and args.key is None: print("[-] Some of the options have not been provided") print( "[-] Use the -u argument and provide the url returned in upload stage" ) print("[-] Use: -k <key>") exit(1) print("[+] Downloading the text from", args.pastebin_url) message_stego = download(args.pastebin_url) secret = extract(message_stego, args.key) print("[+] The secret is showed below:") print("----------------") print(secret) print("----------------") exit(0) elif args.mode == "hide": if args.secret is None or args.destination_file is None or args.key is None: print("[-] In hide mode the following arguments are required") print("[-] -s <secret>") print("[-] -d <path_to_destination_file>") print("[-] -k <key>") exit(1) message = check_message_file_and_secret(args.secret, args.message_file) max_len = check_max_secret_length(message) print("[+] Message maximum possible secret len:", max_len) print("[+] Secret length:", len(args.secret)) message_stego = hide(message, args.secret, args.key) try: with open(args.destination_file, 'w', encoding='utf-8') as f: f.write(message_stego) print("[+] Message correctly written to", args.destination_file) exit(0) except PermissionError as e: print("[-] The file couldn't be written due to", e) exit(1) elif args.mode == "extract": if args.stego_file is None or args.key is None: print( "[-] A path to a file containing a stego message and the key is needed" ) print("[-] Use: -o <path>") print("[-] Use: -k <key>") exit(1) try: with open(args.stego_file, 'r', encoding='utf-8') as f: lines = f.readlines() stego_text = ''.join(lines) secret = extract(stego_text, args.key) print("[+] The secret is showed below:") print("----------------") print(secret) print("----------------") exit(0) except Exception as e: print("[-] The file couldn't be read due to", e) exit(1) else: parser.print_help()
def setup_weights(self, weights): """ Setup weights """ # If we have a toric connection, kernel cannot be greater than source # in any dimension if self._toric: s = np.array(self.source.shape) w = np.array(weights.shape) weights = extract(weights, np.minimum(s, w), w // 2) # 1d convolution case # ------------------- if len(self.source.shape) == len(self.target.shape) == 1: if len(weights.shape) != 1: raise ConnectionError, \ '''Shared connection requested but weights matrix shape does not match.''' if self.source.shape != self.target.shape: rows = np.rint((np.linspace(0, 1, self.target.shape[0]) * (self.source.shape[0] - 1))).astype(int) self._src_rows = rows if self._fft: src_shape = np.array(self.source.shape) wgt_shape = np.array(weights.shape) K = np.nan_to_num(weights)[::-1] if self._toric: K_ = extract(K, src_shape, wgt_shape // 2) self._fft_weights = rfft(ifftshift(K_)) else: size = src_shape + wgt_shape // 2 shape = best_fft_shape(size) self._fft_weights = rfft(K, shape[0]) i0 = wgt_shape[0] // 2 i1 = i0 + src_shape[0] self._fft_indices = slice(i0, i1) self._fft_shape = shape # m = self.source.shape[0] # p = weights.shape[0] # if self._toric: # _weights = extract(weights[::-1], (m,), (np.floor(p/2.0),) ) # else: # self._src_holder = np.zeros(2*m+1) # _weights = extract(weights[::-1], (2*m+1,), (np.floor(p/2.0),) ) # self._fft_weights = fft(ifftshift(np.nan_to_num(_weights))) self._mask = np.ones(weights.shape) self._mask[np.isnan(weights).nonzero()] = 0 self._weights = np.nan_to_num(weights) # 2d convolution case # ------------------- elif len(self.source.shape) == len(self.target.shape) == 2: if len(weights.shape) != 2: raise ConnectionError, \ '''Shared connection requested but weights matrix shape does not match.''' if self.source.shape != self.target.shape: rows = np.rint((np.linspace(0, 1, self.target.shape[0]) * (self.source.shape[0] - 1))).astype(int) cols = np.rint((np.linspace(0, 1, self.target.shape[1]) * (self.source.shape[1] - 1))).astype(int) self._src_rows = rows.reshape((len(rows), 1)) self._src_cols = cols.reshape((1, len(cols))) if self._fft: src_shape = np.array(self.source.shape) wgt_shape = np.array(weights.shape) K = np.nan_to_num(weights)[::-1, ::-1] if self._toric: K_ = extract(K, src_shape, wgt_shape // 2) self._fft_weights = rfft2(ifftshift(K_)) else: size = src_shape + wgt_shape // 2 shape = best_fft_shape(size) self._fft_weights = rfft2(K, shape) i0 = wgt_shape[0] // 2 i1 = i0 + src_shape[0] j0 = wgt_shape[1] // 2 j1 = j0 + src_shape[1] self._fft_indices = slice(i0, i1), slice(j0, j1) self._fft_shape = shape self._mask = np.ones(weights.shape) self._mask[np.isnan(weights).nonzero()] = 0 self._weights = np.nan_to_num(weights) dtype = weights.dtype self._USV = scipy.linalg.svd(np.nan_to_num(weights)) U, S, V = self._USV self._USV = U.astype(dtype), S.astype(dtype), V.astype(dtype) # Higher dimensional case # ------------------------ else: raise ConnectionError, \ '''Shared connection requested but dimensions are too high (> 2).'''
sumsq = 0 i = 0 while i < N: sumsq = sumsq + (x[i] - mu)**2 i += 1 return np.sqrt((1 / (N - 1)) * sumsq) ##Test against H0 lowZdeviation = [] midZdeviation = [] highZdeviation = [] i = 0 while i < len(lowZ): lowZdeviation.append((fun.D(fun.HubbleIntegrate(fun.extract(lowZ, 4)[i])) - fun.extract(lowZ, 5)[i]) / fun.D(fun.HubbleIntegrate(fun.extract(lowZ, 4)[i]))) i += 1 i = 0 while i < len(midZ): midZdeviation.append((fun.D(fun.HubbleIntegrate(fun.extract(midZ, 4)[i])) - fun.extract(midZ, 5)[i]) / fun.D(fun.HubbleIntegrate(fun.extract(midZ, 4)[i]))) i += 1 ##(D_L(z) - D_L) / D_L(z) i = 0 while i < len(highZ): highZdeviation.append( (fun.D(fun.HubbleIntegrate(fun.extract(highZ, 4)[i])) -
import numpy as np import matplotlib.pylab as plt import os from functions import WriteListToCSV, extract from sklearn.model_selection import train_test_split from sklearn.ensemble import RandomForestRegressor from sklearn.decomposition import PCA from sklearn.model_selection import RandomizedSearchCV #URL URL = "https://www.quandl.com/api/v3/datasets/EURONEXT/NOKIA.json?api_key=F9xUFfqyGZdfFeh9stsv" # DATA COLLECTION quandlData = extract(URL) columns = quandlData['dataset']['column_names'] raw_data = quandlData['dataset']['data'] currentPath = os.getcwd() csv_file = currentPath + "/allData.csv" WriteListToCSV(csv_file, columns, raw_data) pd_data = pd.read_csv('allData.csv') dateparser = lambda date: pd.datetime.strptime(date, '%Y-%m-%d') ts_data = pd.read_csv('allData.csv', parse_dates=['Date'], index_col='Date', date_parser=dateparser) # DATA CLEANING
def setup_weights(self, weights): """ Setup weights """ # If we have a toric connection, kernel cannot be greater than source # in any dimension if self._toric: s = np.array(self.source.shape) w = np.array(weights.shape) weights = extract(weights, np.minimum(s,w), w//2) # 1d convolution case # ------------------- if len(self.source.shape) == len(self.target.shape) == 1: if len(weights.shape) != 1: raise ConnectionError, \ '''Shared connection requested but weights matrix shape does not match.''' if self.source.shape != self.target.shape: rows = np.rint((np.linspace(0,1,self.target.shape[0]) *(self.source.shape[0]-1))).astype(int) self._src_rows = rows if self._fft: src_shape = np.array(self.source.shape) wgt_shape = np.array(weights.shape) K = np.nan_to_num(weights)[::-1] if self._toric: K_ = extract(K, src_shape, wgt_shape//2) self._fft_weights = rfft(ifftshift(K_)) else: size = src_shape+wgt_shape//2 shape = best_fft_shape(size) self._fft_weights = rfft(K,shape[0]) i0 = wgt_shape[0]//2 i1 = i0+src_shape[0] self._fft_indices = slice(i0,i1) self._fft_shape = shape # m = self.source.shape[0] # p = weights.shape[0] # if self._toric: # _weights = extract(weights[::-1], (m,), (np.floor(p/2.0),) ) # else: # self._src_holder = np.zeros(2*m+1) # _weights = extract(weights[::-1], (2*m+1,), (np.floor(p/2.0),) ) # self._fft_weights = fft(ifftshift(np.nan_to_num(_weights))) self._mask = np.ones(weights.shape) self._mask[np.isnan(weights).nonzero()] = 0 self._weights = np.nan_to_num(weights) # 2d convolution case # ------------------- elif len(self.source.shape) == len(self.target.shape) == 2: if len(weights.shape) != 2: raise ConnectionError, \ '''Shared connection requested but weights matrix shape does not match.''' if self.source.shape != self.target.shape: rows = np.rint((np.linspace(0,1,self.target.shape[0]) *(self.source.shape[0]-1))).astype(int) cols = np.rint((np.linspace(0,1,self.target.shape[1]) *(self.source.shape[1]-1))).astype(int) self._src_rows = rows.reshape((len(rows),1)) self._src_cols = cols.reshape((1,len(cols))) if self._fft: src_shape = np.array(self.source.shape) wgt_shape = np.array(weights.shape) K = np.nan_to_num(weights)[::-1,::-1] if self._toric: K_ = extract(K, src_shape, wgt_shape//2) self._fft_weights = rfft2(ifftshift(K_)) else: size = src_shape+wgt_shape//2 shape = best_fft_shape(size) self._fft_weights = rfft2(K,shape) i0 = wgt_shape[0]//2 i1 = i0+src_shape[0] j0 = wgt_shape[1]//2 j1 = j0+src_shape[1] self._fft_indices = slice(i0,i1),slice(j0,j1) self._fft_shape = shape self._mask = np.ones(weights.shape) self._mask[np.isnan(weights).nonzero()] = 0 self._weights = np.nan_to_num(weights) dtype = weights.dtype self._USV = scipy.linalg.svd(np.nan_to_num(weights)) U,S,V = self._USV self._USV = U.astype(dtype), S.astype(dtype), V.astype(dtype) # Higher dimensional case # ------------------------ else: raise ConnectionError, \ '''Shared connection requested but dimensions are too high (> 2).'''