def temp_product_storage(): id_product = 0 cur.execute("DELETE FROM "+database+".temps") for s in range(len(group_name)): for i in range (page_number): initdata = initialize.init(i+1,get_url(s),attributes[s]) if(group_name[s] == "JLaptops" and initdata.count < 47): break data = [[0 for j in range(initdata.count)] for i in range(page_number)] for j in range(initdata.count): if(group_name[s] == "Ephonesfr" or group_name[s] == "ELaptopsfr" or group_name[s] == "Ephones" or group_name[s] == "ELaptops" or group_name[s] == "EMacbooks"): data[i][j] = Ebay_Scrapping.phone_scrap(initdata.section, j) if(group_name[s] == "Jphones" or group_name[s] == "JLaptops"): data[i][j] = Jumia_Scrapping.phone_scrap(initdata, j) if(group_name[s] == "WMphones" or group_name[s] == "WMLaptops"): data[i][j] = WallMart_Scrapping.phone_scrap(initdata.section, j) if data[i][j] is None: continue name = data[i][j].name prices = data[i][j].price img_link = data[i][j].img link = data[i][j].link prices = prices.replace(' ', '') prices = prices.replace('Dhs', '') prices = prices.replace('$', '') if(group_name[s] == "Ephonesfr" or group_name[s] == "ELaptopsfr"): prices = re.sub("\,.*", '' , prices) prices = prices.replace(',', '') prices = re.sub(' .*', '' , prices) prices = re.sub("\..*", '' , prices) name = list(name) prices = list(prices) img_link = list(img_link) for k in range(len(name)): if name[k] == "'" or name[k] == '"' or name[k] == '-' : name[k] = "" name = "".join(name) for k in range(len(prices)): if prices[k] == "'" or prices[k] == '"' or prices[k] == '-' or ord(prices[k]) == 160: prices[k] = "" prices = "".join(prices) for k in range(len(img_link)): if img_link[k] == "'" or img_link[k] == '"': img_link[k] = " " img_link = "".join(img_link) data[i][j].name = name prices = int(prices) if(group_name[s] == "Jphones" or group_name[s] == "JLaptops"): prices = prices / dollar_to_mad prices = int(prices) data[i][j].price = prices data[i][j].img = img_link id_product = hashlib.sha256(data[i][j].link.encode('utf-8')).hexdigest() sql_statement = "INSERT INTO "+database+".temps VALUES (%s, %s, %s, %s, %s, %s)" values = (id_product, get_grp(s),data[i][j].name, data[i][j].img, data[i][j].price, data[i][j].link) cur.execute(sql_statement, values) db.commit()
def main(): args = get_args() f = os.path.normpath device = torch.device( "cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu") if args.do_init: init(f(args.train_file), f(args.dev_file), f(args.test_file), f(args.word_dict_path), f(args.tag_dict_path)) return if args.do_train: do_train(f(args.train_file), f(args.output_dir), f(args.word_dict_path), f(args.tag_dict_path), args.max_seq_len, args.embed_dim, args.hidden_dim, args.lr, args.batch_size, args.epochs, args.print_step, device) if args.do_eval: do_eval(f(args.test_file), f(args.word_dict_path), f(args.tag_dict_path), args.max_seq_len, args.embed_dim, args.hidden_dim, f(args.output_dir), f(args.eval_log_dir), device)
def start(): started = False initialize.init() x=1 while x>0: try: bCluster, bSpace = cassandraHelper.makeConnection() x=0 except Exception as er: print er if started == False: print "Starting cassandra",x os.system("nohup x-terminal-emulator -e ~/workspace/bemoss_os/bemoss_lib/databases/cassandraAPI/casstart.sh &") time.sleep(15) started = True print 'Waiting for cassandra ...' x=1 time.sleep(5) print "Cassandra connected"
def run_optimizer(method, gp, opts, Y, X_r, Icv, cv_idx, X_o=None): if 'min_iter' in opts: min_iter = opts['min_iter'] else: min_iter = 10 if 'max_iter' in opts: max_iter = opts['max_iter'] else: max_iter = 100 # initialize LG.info('Optimize %s' % method) converged = False lmltest_global = SP.inf hyperparams_global = None Ypred_global = None r2_global = -SP.inf # hold nfolds of the data out Itrain = Icv != cv_idx Itest = Icv == cv_idx i = 1 while True: LG.info('Iteration: %d' % i) converged = False # stop, if maximum number of iterations is reached if i > max_iter: break # set data if X_o == None: gp.setData(Y=Y[Itrain], X_r=X_r[Itrain]) else: gp.setData(Y=Y[Itrain], X_r=X_r[Itrain], X_o=X_o[Itrain]) hyperparams, Ifilter, bounds = initialize.init(method, Y[Itrain].T, X_r[Itrain], opts) try: [hyperparams_opt, lmltrain] = opt.opt_hyper(gp, hyperparams, opts=opts, Ifilter=Ifilter, bounds=bounds) # gradient need not to be 0, because we have bounds on the hyperparameters... gradient = SP.array([ LA.norm(x) for x in gp.LMLgrad(hyperparams_opt).values() ]).mean() LG.info('LMLtrain: %.3f' % gp.LML(hyperparams_opt)) LG.info('Gradient: %.3f' % (gradient)) converged = True except AssertionError, error: print 'Assertion Error: %s' % error continue except:
def start(): started = False initialize.init() x = 1 while x > 0: try: bCluster, bSpace = cassandraHelper.makeConnection() x = 0 except Exception as er: print er if started == False: print "Starting cassandra", x os.system( "nohup x-terminal-emulator -e ~/workspace/bemoss_os/bemoss_lib/databases/cassandraAPI/casstart.sh &" ) time.sleep(15) started = True print 'Waiting for cassandra ...' x = 1 time.sleep(5) print "Cassandra connected"
def run_optimizer(method,gp,opts,Y,X_r,Icv,cv_idx,X_o=None): if 'min_iter' in opts: min_iter = opts['min_iter'] else: min_iter = 10 if 'max_iter' in opts: max_iter = opts['max_iter'] else: max_iter = 100 # initialize LG.info('Optimize %s'%method) converged = False lmltest_global = SP.inf hyperparams_global = None Ypred_global = None r2_global = -SP.inf # hold nfolds of the data out Itrain = Icv!=cv_idx Itest = Icv==cv_idx i=1 while True: LG.info('Iteration: %d'%i) converged = False # stop, if maximum number of iterations is reached if i>max_iter: break # set data if X_o==None: gp.setData(Y=Y[Itrain],X_r=X_r[Itrain]) else: gp.setData(Y=Y[Itrain],X_r=X_r[Itrain],X_o=X_o[Itrain]) hyperparams,Ifilter,bounds = initialize.init(method,Y[Itrain].T,X_r[Itrain],opts) try: [hyperparams_opt,lmltrain] = opt.opt_hyper(gp,hyperparams,opts=opts,Ifilter=Ifilter,bounds=bounds) # gradient need not to be 0, because we have bounds on the hyperparameters... gradient = SP.array([LA.norm(x) for x in gp.LMLgrad(hyperparams_opt).values()]).mean() LG.info('LMLtrain: %.3f'%gp.LML(hyperparams_opt)) LG.info('Gradient: %.3f'%(gradient)) converged = True except AssertionError, error: print 'Assertion Error: %s'%error continue except:
def measure_runtime(env,N,D,n_reps=10,time_out=10000): opts = {'messages':False} out_dir = os.path.join(env['out_dir'],'simulations_runtime') if not os.path.exists(out_dir): os.makedirs(out_dir) t_fast = SP.zeros(n_reps) t_slow = SP.zeros(n_reps) lml_fast = SP.zeros(n_reps) lml_slow = SP.zeros(n_reps) for i in range(n_reps): # load data var_signal = 0.5 data,RV = load_simulations(env,var_signal,N,D,i) # initialize covar_c = lowrank.LowRankCF(n_dimensions=RV['n_c']) covar_r = linear.LinearCF(n_dimensions=RV['n_r']) covar_s = lowrank.LowRankCF(n_dimensions=RV['n_sigma']) covar_o = fixed.FixedCF(n_dimensions=RV['n_r']) X = data.getX(standardized=False) Y = data.getY(standardized=False).T hyperparams,Ifilter,bounds = initialize.init('GPkronsum_LIN',Y.T,X,RV) covar_r.X = X covar_o.X = X covar_o._K = SP.eye(RV['N']) covar_s.X = hyperparams['X_s'] covar_c.X = hyperparams['X_c'] kgp_fast = gp_kronsum.KronSumGP(covar_r=covar_r,covar_c=covar_c,covar_s=covar_s,covar_o=covar_o) kgp_fast.setData(Y=Y) # measure time signal.signal(signal.SIGALRM,handler) signal.alarm(time_out) try: t_start = time.clock() hyperparams_opt,lmltrain = opt.opt_hyper(kgp_fast,hyperparams,Ifilter=Ifilter,bounds=bounds,opts=opts) t_stop = time.clock() signal.alarm(0) t_fast[i] = t_stop - t_start lml_fast[i] = lmltrain except Exception, e: print e t_slow += time_out break
def run(): init("recipes") scrape("recipes")
def setup_function(self): init(app.name())
from fa.database.models import IncomeStatement import initialize """ Enrich financial data with extra data columns calculated from existing columns """ initialize.init() IncomeStatement \ .update(operating_income=( IncomeStatement.gross_income - IncomeStatement.sg_a_expense - IncomeStatement.other_operating_expense )) \ .where(IncomeStatement.operating_income >> None) \ .execute()
from mysql.connector import connect import actions import initialize import management import medicine_info import sale import stock import traceback conn = connect(user='******', passwd='1234') cur = conn.cursor() medicine_info.conn = sale.conn = stock.conn = management.conn = initialize.conn = actions.conn = conn medicine_info.cur = sale.cur = stock.cur = management.cur = initialize.cur = actions.cur = cur initialize.init() cur.execute("use MedicalStore") msg = """ ========== MEDICAL STORE ========== 0: quit 1: Sales 2: Stock 3: Medicine Information 4: Management Use ctrl+C in the program to go one step back """ while True:
f['t_fast'] = t_fast f['t_slow'] = t_slow f['lml_fast'] = lml_fast f['lml_slow'] = lml_slow f.close() for i in range(n_reps): # initialize data,RV = load_simulations(env,var_signal,N,D,i) covar_c = lowrank.LowRankCF(n_dimensions=RV['n_c']) covar_r = linear.LinearCF(n_dimensions=RV['n_r']) covar_s = lowrank.LowRankCF(n_dimensions=RV['n_sigma']) covar_o = fixed.FixedCF(n_dimensions=RV['n_r']) X = data.getX(standardized=False) Y = data.getY(standardized=False).T hyperparams,Ifilter,bounds = initialize.init('GPkronsum_LIN',Y.T,X,RV) covar_r.X = X covar_o.X = X covar_o._K = SP.eye(RV['N']) covar_s.X = hyperparams['X_s'] covar_c.X = hyperparams['X_c'] kgp_slow = gp_kronsum_naive.KronSumGP(covar_r=covar_r,covar_c=covar_c,covar_s=covar_s,covar_o=covar_o) kgp_slow.setData(Y=Y) # measure time signal.signal(signal.SIGALRM,handler) signal.alarm(time_out) try: t_start = time.clock() hyperparams_opt,lmltrain = opt.opt_hyper(kgp_slow,hyperparams,Ifilter=Ifilter,bounds=bounds,opts=opts) t_stop = time.clock()
def index(): r = url.initial_url() print(r) weather = initialize.init(r) return render_template("index.html", params=params, weather=weather)
# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ================================================================================================== import logging from initialize import init try: from twitter.common import app app.on_initialization(lambda: init(app.name()), description="Logging subsystem.") except ImportError: # Do not require twitter.common.app pass debug = logging.debug info = logging.info warning = logging.warning error = logging.error fatal = logging.fatal __all__ = [ 'debug', 'info', 'warning',
# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ================================================================================================== import logging from initialize import init try: from twitter.common import app app.on_initialization( lambda: init(app.name()), description="Logging subsystem.") except ImportError: # Do not require twitter.common.app pass debug = logging.debug info = logging.info warning = logging.warning error = logging.error fatal = logging.fatal __all__ = [ 'debug', 'info', 'warning',
num_trainset = X.shape[0] alpha = 0.0001 # UPDATE THESE num_class = 10 num_layers = 3 hidden = [784, 30, 10] # No. of nodes in each layer # UPDATE THESE num_epochs = 10000 print num_trainset num_batches = num_trainset / 10 out, net_in, net_in_bias, theta, error, dtheta, loss = initialize.init( X, num_layers, hidden, num_epochs) #print theta[1] # h = 0.02 # x_min, x_max = X[:, 0].min()-1, X[:, 0].max()+1 # y_min, y_max = X[:, 1].min()-1, X[:, 1].max()+1 # xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) # Z = np.c_[xx.ravel(), yy.ravel()] for epoch in xrange(num_epochs): print epoch, for i in xrange(0, num_batches): if i == num_batches - 2:
fl = open('initFile.txt', 'w') fl.write('title: My Project\ndescription: My Project discription\n exclude: Html arxiv CMakeFiles') fl.close() sys.exit() #----------------------------------------------------------- arg = '' args = sys.argv if(len(args) == 2): arg = args[1] elif(len(args) == 1): arg = '.' else: print 'wrong argument: exiting' sys.exit() style.style() dirList, title, description = initialize.init(arg) writeTOC(dirList, arg) for dirr in dirList: writeHeader(dirr) writeSource(dirr) writeFiles(dirr) writeIndex(dirList) writeMain(title, description) writeBlank() # table of content for each alphabet directory al = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z'] for a in al: