Example #1
0
def resample_data(symbol, freqs, resample_method='default',
                  new_col_names=['Ticker','Open','High','Low','Close','Volume','TotalTrades']):
    if 'default' == resample_method:
        resample_rule = {'Ticker': lambda x: x.head(1),
                         'FirstTradePrice': lambda x: x.head(1),
                         'HighTradePrice': np.max,
                         'LowTradePrice': np.min,
                         'LastTradePrice': lambda x: x.tail(1),
                         'Volume': np.sum,
                         'TotalTrades': np.sum}

    astype_dict = {'FirstTradePrice': 'str',
                   'HighTradePrice': 'str',
                   'LowTradePrice': 'str',
                   'LastTradePrice': 'str',
                   'Volume': 'str',
                   'TotalTrades': 'str'}

    start_time = time.time()
    print('Updating ' + symbol + '...')
    for freq in freqs:
        print(symbol + ' ' + freq)

        # get latest date in stock_freq table
        sm_f = StockMethodFreq(symbol, freq)
        latest_date_freq = Utility().as_str(sm_f.get_latest_date())
        print('latest_date_freq: ' + latest_date_freq)

        # get latest date in algoseek table
        sm_as = StockMethodAS(symbol)
        latest_date_as = Utility().as_str(sm_as.get_latest_date())
        print('latest_date_as: ' + latest_date_as)

        if not latest_date_freq or latest_date_freq != latest_date_as:
            sm = StockMethodAS(symbol)
            if latest_date_freq: # if not None
                stock_data = sm.query_by_symbol(start_date=latest_date_freq)
            else:
                stock_data = sm.query_by_symbol()

            stock_data = Utility().DateTimeAsIndex(stock_data)
            res = stock_data.resample(freq).apply(resample_rule).dropna()
            res = res.astype(astype_dict)
            res.columns = new_col_names
            try:
                db_con = DBConnection().db_sqlalchemy()
                res.to_sql(name=(symbol + '_' + freq).lower(), con=db_con, if_exists='replace', index=True, index_label='DateTime')
            except Exception as e:
                print(e)
            finally:
                db_con.close()
        else:
            print(symbol + ' ' + freq + ' is skipped.')

    end_time = time.time()
    print('End of updating ' + symbol + '...')
    print(end_time - start_time)
Example #2
0
def generate_token():
    if not request.json or not 'identifier' in request.json:
        abort(400)
    name = request.json['identifier']
    room = request.json['roomName']
    uti = Utility(name, room)
    return jsonify({'accessToken': uti.generate_token().decode('utf-8')})
def insert_table(row):
    """
        This function will create a row against a conversion, footfall, queue_wait_time.
    """
    utility = Utility(log_file="big_data_process_log_file.txt", debug=1)

    df_ = pd.DataFrame(columns=columns)
    df_.loc[0] = row
    # utility.loginfofile(str(df_))
    # sql = "INSERT INTO BIG_DATA (date, time, image_key, face_id, type, gender, age, matched_grp_key, similarity, location, emotion, psngr_pnr, psngr_flight, flight_name, destination, time_grp, age_grp, valid_status) values {};".format((str(row[0]), str(row[1]), row[2], row[3], row[4], row[5], str(row[6]), row[7], str(row[8]), row[9], row[10], row[11], row[12], row[13],row[14], row[15], str(row[16]), row[17]))
    try:
        # print sql
        # cursor.execute(sql)
        df_.to_sql("BIG_DATA",
                   con=engine,
                   flavor=None,
                   schema=None,
                   if_exists='append',
                   index=False,
                   index_label=None,
                   chunksize=None,
                   dtype=None)
        utility.loginfofile("row inserted in big_data")
    except Exception, e:
        utility.loginfofile("issue occurred while updating big data table " +
                            str(e))
        # rollback of the transaction into the database
        return False
Example #4
0
def generate_report(unprocessed_data_list):
    
    utility = Utility(log_file="big_data_process_log_file.txt", debug=1, db='Analytics')

    for data in unprocessed_data_list:
        image_key = data[0]

        sql = "SELECT location, similarity, count(*) FROM BIG_DATA WHERE image_key = '{}' and device_type = '{}' GROUP BY location;".format(image_key,'footfall')
        matched_location_list, matched_location_list_count = utility.query_database(sql)
        
        if not matched_location_list_count:
            print("passenger not found at any of the shops")
            continue

        for matched_location in matched_location_list_count:
            similarity, location =  matched_location[0], matched_location[1]

            if int(similarity) < 85:
                continue

            location = "".join(location.split(" "))

            try:
                location_list_footfall[location] += 1
            except:
                print("appending new location in list")
                location_list_footfall[location] = 1

        #insert row into the database   #cursor.execute('SELECT last_insert_id()')
        print(location_list_footfall)
    insert_data()
Example #5
0
def get_psngr_flight_info(airline, airport):
    """

    :param airline:
    :param airport:
    :return: passenger flight number
    """
    sql_get_airport_code = "SELECT Airport_3LC FROM Airports_Info where City_Desc = '{}'".format(
        airport)
    utility = Utility(log_file="data_gen_process_log_file.txt", debug=1)
    airport_code, airport_count = utility.query_database(sql_get_airport_code)
    if airport_count == 0:
        print("'{}': airport not served".format(airport))
        return None
    sql_get_airline_code = "SELECT Airline2LC FROM Airlines_Info where Description = '{}'".format(
        airline)
    airline_code, airline_count = utility.query_database(sql_get_airline_code)
    if airline_count == 0:
        print("'{};: airline service not available".format(airline))
        return None

    sql_get_flight_code = "SELECT FLNO3 FROM Flights_Info where Destination='{}' and FLNO3 LIKE '{}%'".format(
        airport_code[0][0], airline_code[0][0])
    psngr_flight, flight_count = utility.query_database(sql_get_flight_code)
    if flight_count == 0:
        print("{} not served to {}".format(airline, airport))
        return None
    return psngr_flight[0][0]
Example #6
0
    def check(indice,attrnames,attrtypes,attrvalues,conjunctions,operations):
    
        utility = Utility()
        error = []
        
        if not set(conjunctions).issubset(set(['AND','OR'])):
            error.append({'error':'Conjuntion must be AND/OR'})
        
        if utility.CheckStockCode(indice) is False:
            error.append({'error':'Wrong index code'})

        else:
            if not set(attrtypes).issubset(set(['string','date','number'])):
                error.append({'error':'Type not in string/date/number'})
                
            else:
                if utility.CheckAttrNamesOfStock(indice,attrnames):

                    if utility.CheckAttrTypesOfStock(indice,attrnames,attrtypes,attrvalues):
    
                        #operation check
                        if  set(operations).issubset(set(['equals','greater','lesser'])) and all(utility.CheckOperation(attrtypes[i],operations[i]) for i in range(len(attrtypes))):
                            pass
 
                        else:
                            error.append({'error':'Operation not in equals/greater/lesser.Note :String can have equals operation only'})                     

                    else:
                        error.append({'error':'Attribute type mismatch or value is null'})

                else:
                    error.append({'error':'Attribute name doesnot exist'})


        return error
Example #7
0
 def setUpClass(self):
     self.utility = Utility()
     # CHANGE THE LOG FILE NAME IN THE NEXT LINE******************************************************************************************
     self.log = open(self.utility.logpath + "/WV-00.txt", "a+")
     self.suite_start_time = time.time()
     self.log.write("Suite started at {}\n".format(
         str(time.ctime(int(self.suite_start_time)))))
     self.url = URL()
     self.loginPageStaticTexts = LoginPageStaticText()
     self.loginPageTestData = LoginPageTestData()
     self.configTestCase = configparser.RawConfigParser()
     # CHANGE THE CONFIG PROPERTY FILE NAME IN THE NEXT LINE******************************************************************************************
     self.configTestCase.read(
         os.path.dirname(os.getcwd()) +
         '/TestCases/WV_00_Config.properties')
     self.configECG = configparser.RawConfigParser()
     self.configECG.read(
         os.path.dirname(os.getcwd()) +
         '/Scripts/ECGRelatedData.properties')
     self.configDevice = configparser.RawConfigParser()
     self.configDevice.read(
         os.path.dirname(os.getcwd()) +
         '/Scripts/DeviceRelatedData.properties')
     self.sendECG = SendECG()
     yield
     self.suite_end_time = time.time()
     self.total_time_taken_suite = self.suite_end_time - self.suite_start_time
     self.log.write("Suite ended at {}\n".format(
         str(time.ctime(int(self.suite_end_time)))))
     self.log.write(
         "Total time taken by Test Suite to finish: {} seconds\n".format(
             self.total_time_taken_suite))
     self.log.close()
Example #8
0
def process_queue_wait_time(log_id, date, time, image_key, face_id, image_type, gender, age, emotion, psngr_pnr, psngr_flight, flight_name, destination, time_grp, age_grp, processed_images):                 
    utility = Utility(log_file="big_data_process_log_file.txt", debug=1)

    utility.loginfofile("Initializing thread- " + threading.currentThread().getName() + ", type-queue_wait")
    
    # queue_row_data = {"matched_grp_key":"", "similarity":"", "img_logdate":""}
    for processed_image in processed_images:
        if processed_image[5] != "queue_wait":
            print("checking queue: {} found !!!!!!!!!".format(processed_image[5]))
            continue
        #check individual logdate must be greater group logdate
        print("checking queue: {} found !!!!!!!!!".format(processed_image[5]))
        utility.loginfofile("passenger found at queue")
        device_type, ind_logdate, grp_logdate, similarity = processed_image[5], processed_image[6], processed_image[7], processed_image[2]

        if (ind_logdate < grp_logdate):
            print("Alert!!!!!!!!!!...............passenger found at queue after sha (sha:{}, queue:{})".format(str(ind_logdate), str(grp_logdate)))
            continue

        matched_grp_key, matched_devid, valid_status = processed_image[1], processed_image[3], "NA"
        queue_wait_time = str((ind_logdate - grp_logdate).total_seconds())
        try:
            location = locations[matched_devid]
        except:
            location = ""
        #now add row in the table
        row = [str(date), str(time), image_key, face_id, image_type, gender, str(age), matched_grp_key,
                           str(similarity)[0:9], location, device_type, emotion, psngr_pnr, psngr_flight, flight_name, destination,
                           time_grp, str(age_grp), queue_wait_time, valid_status]

        #insert_excel(row)
        insert_table(row)
    # update status with 5 in AP_ImageDate
    check_analytic_true(log_id)
    return None
def calc_happiness(H):
    'Function to calculate happiness'
    with open('./gifts_list.csv', 'r') as csvfile:
        ffp = csv.reader(csvfile, delimiter=',')
        gifts = []
        for row in ffp:
            if (row[3] == 'Luxury'):
                gifts += [Luxury(row[0], int(row[1]), int(row[2]), row[3])]
            elif (row[3] == 'Essential'):
                gifts += [Essential(row[0], int(row[1]), int(row[2]), row[3])]
            elif (row[3] == 'Utility'):
                gifts += [Utility(row[0], int(row[1]), int(row[2]), row[3])]
        csvfile.close()

    gifts = sorted(gifts, key=lambda item: item.cost)
    logging.warning('\n\nDetails of Gifts:\n')
    for i in H:
        if (i.boy.type_ == 'Miser'):
            miser(gifts, i)

        if (i.boy.type_ == 'Generous'):
            generous(gifts, i)

        if (i.boy.type_ == 'Geek'):
            geek(gifts, i)

    k = random.randint(1, len(H))
    happy_couple(H, k)
def pax_flow_analyzer(img_date, img_time, image_key, matched_image_list):
    utility = Utility(log_file="big_data_process_log_file.txt", debug=1)
    utility.loginfo("Initializing ......... PaxFlowAnalyzer")
    pax_location, pax_location_type, pax_key, pax_date, pax_sha_enter_time, pax_sha_exit_time = "SHA", "SHA", \
                                                                                                image_key, img_date, img_time, img_time

    # passenger must be at sha
    pax_sha = PaxFlow(pax_key, pax_date, str(pax_sha_enter_time),
                      str(pax_sha_exit_time), pax_location, pax_location_type)
    # PaxFlow("F1XXCdf0000", "2018-01-23", "13:00:00", "21:00:00", "sha1", "sha")
    pax_sha.add_pax_flow(utility)
    del pax_sha

    if len(matched_image_list) == 0:
        return None

    for pax in matched_image_list:
        pax_similarity, pax_location, pax_location_type, pax_location_time = pax[
            2], pax[3], pax[5], pax[7].time()
        utility.loginfo("passenger({}) found at {}, type: {}".format(
            pax_key, pax_location, pax_location_type, str(pax_location_time)))

        # if pax_location_time == "footfall":
        try:
            print(
                "passenger({}) found at {}, type: {}, capture time: {}".format(
                    pax_key, pax_location, pax_location_type,
                    str(pax_location_time)))
            # passenger location changes , break passenger flow.
            if pax_location != pax_flow.get("location"):
                # add passenger flow in the database
                pax_flow.add_pax_flow(utility)
                # update passenger flow for new location
                print(
                    "updating passenger flow for ({}) new location({})........., type: {}"
                    .format(pax_key, pax_location, pax_location_type))
                pax_flow.set("location", pax_location)
                pax_flow.set("location_type", pax_location_type)
                pax_flow.set("enter_time", str(pax_location_time))
                pax_flow.set("exit_time", str(pax_location_time))
                continue

            pax_exit_time = datetime.strptime(pax_flow.get("exit_time"),
                                              '%H:%M:%S').time()

            if pax_location_time > pax_exit_time:
                pax_flow.set("exit_time", str(pax_location_time))

        except NameError:
            utility.loginfo(
                "creating object for passenger({}) found at {}, type:{}".
                format(pax_key, pax_location, pax_location_type))
            pax_flow = PaxFlow(pax_key, str(pax_date), str(pax_location_time),
                               str(pax_location_time), pax_location,
                               pax_location_type)
    # add current passenger flow in the db
    pax_flow.add_pax_flow(utility)
    del pax_flow
    return None
Example #11
0
 def __init__(self, gridRes):
     self.gridRes = gridRes
     self.beamList = list()
     self.gratingList = list()
     self.result = list()
     self.UTIL = Utility()
     self.tempHologram = np.zeros((self.gridRes), dtype = complex)
     self.numCols = 1
     self.multiCols = False
Example #12
0
def main():
	if len(sys.argv)!=3:
		print('python3 '+sys.argv[0]+' [MODEL FILE] [TEST FILE]', file = sys.stderr)
		return
	mod_path = sys.argv[1]
	test_path = sys.argv[2]
	util = Utility('', mod_path, test_path)
	nbc = NBClassifier(False, True, False, util)
	nbc.classify()
def Bundle_Adjustment_motion_refine_optimization(params):
    Util = Utility()
    Util.read_utility_parameters(params)
    hmr_dict, data_dict = Util.load_hmr_data()
    LR_cams = Util.load_camera_pkl()
    LR_cams = LR_cams[0]
    texture_img = cv2.imread(Util.texture_path +
                             "../../output_nonrigid/texture.png")
    texture_vt = np.load(Util.texture_path + "vt.npy")

    LR_path = Util.hmr_path + Util.refine_reference_path
    LR_pkl_files = os.listdir(LR_path)
    LR_pkl_files = sorted(
        [filename for filename in LR_pkl_files if filename.endswith(".pkl")],
        key=lambda d: int((d.split('_')[3]).split('.')[0]))
    LR_length = len(LR_pkl_files)
    LR_array = np.zeros((LR_length, 24 * 3))
    LR_betas = np.zeros([LR_length, 10])
    LR_trans = np.zeros([LR_length, 3])
    HR_path = Util.HR_pose_path
    HR_pkl_files = os.listdir(HR_path)
    HR_pkl_files = sorted(
        [filename for filename in HR_pkl_files if filename.endswith(".pkl")],
        key=lambda d: int((d.split('_')[3]).split('.')[0]))
    HR_length = len(HR_pkl_files)
    HR_array = np.zeros((HR_length, 24 * 3))

    for ind, LR_pkl_file in enumerate(LR_pkl_files):
        LR_pkl_path = os.path.join(LR_path, LR_pkl_file)
        with open(LR_pkl_path) as f:
            param = pickle.load(f)
        pose = param['pose'].squeeze()
        beta = param['betas'].squeeze()
        tran = param['trans'].squeeze()
        for i in range(24 * 3):
            LR_array[ind, i] = pose[i]
        for i in range(10):
            LR_betas[ind, i] = beta[i]
        for i in range(3):
            LR_trans[ind, i] = tran[i]
    for ind, HR_pkl_file in enumerate(HR_pkl_files):
        HR_pkl_path = os.path.join(HR_path, HR_pkl_file)
        with open(HR_pkl_path) as f:
            param = pickle.load(f)
        pose = param['pose'].squeeze()
        for i in range(24 * 3):
            HR_array[ind, i] = pose[i]

    hr_points = algorithms.get_hr_points(HR_array)
    lr_points = algorithms.get_lr_points(LR_array, HR_array, Util.match_idx)

    output = algorithms.periodicDecomp(LR_array, HR_array, lr_points,
                                       hr_points)
    #output = algorithms.refineLR(LR_array, HR_array, Util.match_idx)
    refine_optimization(output, LR_betas, LR_trans, data_dict, hmr_dict,
                        LR_cams, texture_img, texture_vt, Util)
Example #14
0
 def __init__(self):
     self._utility = Utility()
     self._mongo_client = MongoClient(Configurations.MONGO_DB_HOST, int(Configurations.MONGO_DB_PORT))
     print("Successfully connected to Mongo DB host: {0} and port: {1}".format(Configurations.MONGO_DB_HOST,
                                                                               str(Configurations.MONGO_DB_PORT)))
     self._create_db_and_collections_if_not_exist()
     self._create_indexes()
     self._get_all_manufacturers()
     self._get_all_models()
     self._time_format = "%Y-%m-%d"
Example #15
0
    def __init__(self, p, l, beamWidth):
        self.p = p
        self.l = l
        self.beamWidth = beamWidth
        self.SquareRoot2 = math.sqrt(2)
        self.PI = math.pi

        self.LAG = Laguerre()
        self.UTIL = Utility()
        self.w = 0.12077  ############################## still need to change this
 def __init__(self):
     self.newHolo = Hologram((1920, 1080))
     self.UTIL = Utility()
     print(
         "Input the number of required columns\n input l, p and/or other variables in the following format"
     )
     print("pass the matrix via obj.passBeams()")
     print("pass in the gratings with obj.passfGratings()")
     print("generate beams with obj.generateHologram()")
     #self.blaze = Blazed(0, 0)
     self.display = list()
Example #17
0
    def __init__(self):
        if len(sys.argv) < 2:
            print(
                "Please provide the testcase filepath as a command line argument"
            )
            return
        input = Utility().read_file(sys.argv[1])

        s = Solution_test(input)
        s.output_vector()
        print("Your solution")
        print("==========================================")
        print(s.output_vector())
Example #18
0
def create_device_mapper():
    """
    :return: dictionary of device mapping
    """
    utility = Utility(log_file="data_gen_process_log_file.txt", debug=1)
    sql = "SELECT DeviceName, DeviceType, DeviceLocation FROM DeviceMapper"
    mapped_device_list, device_count = utility.query_database(sql)
    if device_count == 0:
        print("no device registered in the database")
        return None
    for mapped_device in mapped_device_list:
        print mapped_device
    return None
Example #19
0
    def __init__(self):
        if len(sys.argv) < 2:
            print(
                "Please provide the testcase filepath as a command line argument"
            )
            return
        input = Utility().read_file(sys.argv[1])

        s = Solution(len(input.women_preferences), input.women_preferences,
                     input.men_preferences)
        sol = s.output_stable_matchings()
        print(len(sol))
        for m in sol:
            print(m)
Example #20
0
 def __init__(self):
     if len(sys.argv) < 2:
         print(
             "Please provide the testcase filepath as a command line argument"
         )
         return
     input = Utility().read_file(sys.argv[1])
     #print(input)
     s = Solution(len(input.women_preferences), input.women_preferences,
                  input.men_preferences)
     sol = s.output_stable_matchings()
     for match in sol:  #Sort by women
         match.sort()
     sol.sort()  #sorting across differnt matchings
Example #21
0
 def __init__(self):
     if len(sys.argv) < 2:
         print(
             "Please provide the testcase filepath as a command line argument"
         )
         return
     input = Utility().read_file(sys.argv[1])
     s = Solution(len(input.hospital_preferences),
                  len(input.student_preferences),
                  input.hospital_preferences, input.student_preferences,
                  input.hospital_open_slots)
     solution = s.get_matches()
     #print("Your Solution")
     #print("=============================================")
     print(solution)
def main():
    global applicants, cache
    bed, parking_spot, spla_list, lahsa_list, applicants = ReadWriteFile.read_input_file(
        "../grading_case/input22.txt")
    SPLA = Utility(bed)
    # Modify SPLA utility for already selected applicants by SPLA
    for i in spla_list:
        applicants[int(i) - 1].selected_by_spla = True
        SPLA.modify(applicants[int(i) - 1])
    LAHSA = Utility(parking_spot)
    # Modify LAHSA utility for already selected applicants by LAHSA
    for i in lahsa_list:
        applicants[int(i) - 1].selected_by_lahsa = True
        LAHSA.modify(applicants[int(i) - 1])
    cache = {}
    leftover_applicant = []
    # creating separate lists depending on
    LAHSA_applicant_list = []
    SPLA_applicant_list = []
    common_applicant_list = []
    for a in applicants:
        if a.selected_by_lahsa or a.selected_by_spla:
            continue
        if a.valid_lahsa and a.valid_spla:
            common_applicant_list.append(a.id)
        else:
            if a.valid_spla:
                SPLA_applicant_list.append(a.id)
            elif a.valid_lahsa:
                LAHSA_applicant_list.append(a.id)
    SPLA_best_score, LAHSA_best_score, best_applicant = game_play_spla(
        common_applicant_list, SPLA_applicant_list, LAHSA_applicant_list, SPLA,
        LAHSA)
    ReadWriteFile.create_output_file(best_applicant)
    print best_applicant
    print "Final Value %s %s" % (SPLA_best_score, LAHSA_best_score)
Example #23
0
 def k_means(self, df, n_clusters):
     '''
     Returns the label encoded clusters for each example in the df.
     '''
     df = Utility().normalize(df)
     kmeans = KMeans(n_clusters = n_clusters)
     kmeans.fit(df)
     print("Reduced inertia:", kmeans.inertia_)
     print("Clusters centers:")
     display(pd.DataFrame(kmeans.cluster_centers_, columns = df.columns,
                          index = ["cluster %i" %i for i in np.arange(n_clusters)]))
     
     centers = pd.DataFrame(kmeans.cluster_centers_, columns = df.columns, index =["cluster %i" %i for i in np.arange(n_clusters)])
     self.plot_cluster_centers(centers)
     # return Utility().label_encode(kmeans.labels_)
     return kmeans.labels_
Example #24
0
    def check(indice,subscribeFieldlist,triggermessage):
        
        utility = Utility()
        error = []

        if not triggermessage:
            error.append({'error':'Message cannot be empty'})
      
        if utility.CheckStockCode(indice) is False:
            error.append({'error':'Wrong index code'})

        else:
            if not utility.CheckAttrNamesOfStock(indice,subscribeFieldlist):
                error.append({'error':'Field name doesnot exist'})

        return error
    def __init__(self):
        self.util = Utility()
        self.screen = self.util.screen

        self.tkinterRoot = tkinter.Tk()
        self.tkinterRoot.withdraw()  #Hides the tkinter window that pops up

        self.qA = QualityAppraiser(clientId, clientSecret)
        self.qualityAppraiser = self.qA.get_token()
        self.url = 'https://api.everypixel.com/v1/quality'
        self.filedialog = filedialog

        self.eventDict = self.get_event_dict()

        self.userPath = None

        self.waitCursor = pygame.cursors.broken_x
        self.arrowCursor = pygame.cursors.arrow
Example #26
0
    def get_resample_rt_data(self, resample_rule="default"):
        data = Utility().DateTimeAsIndex(self.get_real_time_data())

        if "default" == resample_rule:
            resample_rule = {'Ticker': lambda x: x.head(1),
                             'Open': lambda x: x.head(1),
                             'High': np.max,
                             'Low': np.min,
                             'Close': lambda x: x.tail(1),
                             # 'Volume': np.sum}
                             'Volume': lambda x: np.sum(x)*100}

        resampled_data = data.resample(self.freq).apply(resample_rule).dropna()

        # index as column
        resampled_data = resampled_data.reset_index(drop=False)

        return resampled_data
Example #27
0
def check_analytic_true(img_id):
    while True:
        try:
            utility = Utility(log_file="big_data_process_log_file.txt", debug=1)
            print("connected to mysql db")
            break
        except:
            print("mysql server gone....... trying again")

    sql = "Update AP_ImageData SET status = {} where ID = '{}';".format(5, img_id)

    flag = utility.update_database(sql)

    if flag:
        utility.loginfofile("Quitting thread after updating status- " + str(threading.currentThread().getName()))
        return True
    else:
        utility.loginfofile("Database query Failed while updating status for thread " + str(threading.currentThread().getName()) + str(e))
        return False
Example #28
0
def setUpClass(request):
    print("setUpClass started")
    utility = Utility()
    utility.createLogFolder()
    log = open(utility.logpath + "/WV-00.txt", "a+")
    suite_start_time = time.time()
    log.write("Suite started at {}\n".format(
        str(time.ctime(int(suite_start_time)))))
    loginPageStaticTexts = LoginPageStaticText()
    loginPageTestData = LoginPageTestData()
    configTestCase = configparser.RawConfigParser()
    configTestCase.read(
        os.path.dirname(os.getcwd()) + '/TestCases/WV_00_Config.properties')
    configECG = configparser.RawConfigParser()
    configECG.read(
        os.path.dirname(os.getcwd()) + '/Scripts/ECGRelatedData.properties')
    configDevice = configparser.RawConfigParser()
    configDevice.read(
        os.path.dirname(os.getcwd()) + '/Scripts/DeviceRelatedData.properties')
    sendECG = SendECG()

    request.cls.log = log
    request.cls.utility = utility
    request.cls.suite_start_time = suite_start_time
    request.cls.loginPageStaticTexts = loginPageStaticTexts
    request.cls.loginPageTestData = loginPageTestData
    request.cls.configTestCase = configTestCase
    request.cls.configECG = configECG
    request.cls.configDevice = configDevice
    request.cls.sendECG = sendECG

    print("setUpClass END")
    yield
    print("setUpClass_yield")
    suite_end_time = time.time()
    total_time_taken_suite = suite_end_time - suite_start_time
    log.write("Suite ended at {}\n".format(str(time.ctime(
        int(suite_end_time)))))
    log.write("Total time taken by Test Suite to finish: {} seconds\n".format(
        total_time_taken_suite))
    log.close()
Example #29
0
    def DB_Execute_Sql_Parames(self,
                               Sql,
                               Params,
                               host=_Host,
                               user=_User,
                               password=_Password):
        u"""
        Default Value:
        user = '******'  passwrod = 'derbysoft'


        Examples:
        When gizp is a File
        | @{aaa}= | Test Data | maplist.xml |
        | DB_Execute_Sql_Parames  | INSERT INTO marriott_ari.`process` (check_in, create_time, hotel, los_rs,process_key,rate_plan_candidate,rate_plan_candidate_value) VALUES (%s, %s, %s, %s, %s, %s, %s) | '2017-08-05', '2017-07-07 06:23:56', 'HNLOW',r'gzip@{aaa}[0]', 'HNLOW:2017-08-05:CATEGORY:DN1', 'CATEGORY', 'DN1' |


        When gizp is a String
        | @{aaa}= | Test Data | maplist.xml |
        | ${f}  | get file | @{aaa}[0]  |
        | DB_Execute_Sql_Parames  | INSERT INTO marriott_ari.`process` (check_in, create_time, hotel, los_rs,process_key,rate_plan_candidate,rate_plan_candidate_value) VALUES (%s, %s, %s, %s, %s, %s, %s) | '2017-08-05', '2017-07-07 06:23:56', 'HNLOW','''gzip${f}''', 'HNLOW:2017-08-05:CATEGORY:DN1', 'CATEGORY', 'DN1' |
        """

        try:
            conn = mysql.connector.connect(user=user,
                                           password=password,
                                           host=_Host)
            cur = conn.cursor()
            params = ast.literal_eval(Params)
            res = []
            for p in params:
                if "gzip" in p:
                    p = p.replace('gzip', '')
                    u = Utility()
                    p = u.Compress_Gzip(p)
                res.append(p)
            cur.execute(Sql, res)
            conn.commit()
            dlog.write("DB_Execute_Sql_Parames is successfully")
        except Exception, e:
            dlog.fail(e)
Example #30
0
def main():
    utility = Utility(log_file='big_data_process_log_file.txt', debug=1, db='Analytics')
    
    sql = "SELECT ReportID FROM CR_Reports WHERE {}".format(filter_cr_reports)
    print sql
 
    report_list, report_list_count = utility.query_database(sql)

    print report_list, report_list_count

    if report_list_count:
        print("report already processed")
        return None

    sql_1 = "SELECT distinct(image_key) FROM BIG_DATA WHERE {}".format(filter_big_data)
    print sql_1
    unprocessed_data_list, unprocessed_data_list_count = utility.query_database(sql_1)
    print unprocessed_data_list_count

    if unprocessed_data_list_count:
        generate_report(unprocessed_data_list)