Esempio n. 1
0
def SAP_process_table(table, fields, options, columns_dict, model):
    session = Session()
    df = SAP_get_table(table, fields, options)
    df.rename(columns=columns_dict, inplace=True)
    session.execute(model.__table__.insert(), df.to_dict(orient="records"))
    session.commit()
    session.close()
    return df
Esempio n. 2
0
    def __init__(self):
        print 'Initializing Driver'
        session = Session()
        self.modules = session.query(LightModule).all()
        self.strips = {s.name: s for s in session.query(Strip).all()}
        session.close()
        print 'starting engine'

        self.engine = LightEngine(self.modules)
        print 'engine running'
        self.sp = Spotify()
        print 'spotify loaded'
        if SIM:
            self.sim = Client('127.0.0.1:7890')
Esempio n. 3
0
def process_input(header=True):
    csv_file = open(tmp_file, 'r', encoding='utf-8')
    csv_reader = None
    try:
        csv_reader = list(csv.reader(csv_file, delimiter=','))
    except:
        print("Error in opening csv file. Please check the format/encoding!!")
        quit()
    line_no = 0
    if header:
        line_no += 1
        csv_reader = csv_reader[1:]
    session = Session()

    all_reviews = session.query(Review).all()
    reviews_set = {(review.product_id, str(review.review_text).lower())
                   for review in all_reviews}
    reviews_in_file = set()
    print("Processing input file..")
    for line in csv_reader:
        line_no += 1
        review_text = line[0]
        product_id = line[4]
        search_key = (product_id, str(review_text).lower())
        if search_key in reviews_set:
            print("Review at line: {} is already in db!!".format(line_no))
        elif search_key in reviews_in_file:
            print("Review at line: {} is duplicate in file!!".format(line_no))
        else:
            review_ob = Review(product_id, review_text)
            session.add(review_ob)
            reviews_in_file.add(search_key)

    print("Committing data...")
    session.commit()
    session.close()
    try:
        # Remove downloaded file
        os.remove(tmp_file)
    except:
        pass
Esempio n. 4
0
def process_reviews():
    session = Session()
    client = language.LanguageServiceClient()
    reviews_to_analyze = session.query(Review).filter_by(
        review_analyzed=False).all()
    total = len(reviews_to_analyze)
    processed = 0
    step = 1
    throttle_limit = 500
    print("Processing Reviews...")
    print("Processed {}/{} ".format(processed, total), end='\r')
    start_time = time.time()
    one_minute = 60
    for review in reviews_to_analyze:
        try:
            analyze_review(client, review, session)
        except:
            pass
        review.review_analyzed = True
        session.add(review)
        processed += 1

        # if processed % step == 0:
        print("Processed {}/{} ".format(processed, total), end='\r')

        if processed % throttle_limit == 0:
            end_time = time.time()
            time_taken = end_time - start_time
            if time_taken < one_minute:
                time.sleep(one_minute - time_taken)
            start_time = time.time()

    print("Processed {}/{} ".format(processed, total))
    print("Committing data...")
    session.commit()
    session.close()

    print("Processed data stored successfully!!")
Esempio n. 5
0
def SAP_notification_process(date, end_date):
    # Get all notifications from the VIQMEL
    print('Processing notifications')
    #df_notification = SAP_process_table('VIQMEL',['QMNUM','IWERK','QMART','AUFNR','EQUNR','QMTXT','STRMN','LTRMN','ERDAT','PRIOK','ERNAM','TPLNR','ARBPL', 'OBJNR'],\
    #    "IWERK EQ 'TR01' AND ERDAT >= '" + date + "' and ERDAT <= '"+end_date+"'",notification_columns, Notification)
    df_notification = pd.read_sql_table("SAP_notifications", con=engine)

    # Get all notifications´ types TQ80
    print('Processing notifications type')
    #SAP_process_table('TQ80', ['QMART','STSMA'], "MANDT EQ '020'",notification_type_columns, Notification_Type)
    df_type = pd.read_sql_table("SAP_notification_type", con=engine)

    # Get all notifications´ additional texts or comments QMFE
    print('Processing notifications text')
    #SAP_process_table('QMFE', ['QMNUM','FETXT'], "FETXT NE '' AND ERDAT >= '" + date + "'",notification_text_columns, Notification_Text)

    # Get all notifications' activities from the QMMA
    print('Processing notifications activities')
    #SAP_process_table('QMMA', ['MANDT','QMNUM','MATXT','MNGRP','MNCOD','MNKAT'], "MANDT EQ '020' AND ERDAT >= '" + date + "'", notification_activity_columns, Notification_Activity)

    # Get all notification´s activities header for the QPCD
    print('Processing notifications activities header')
    #SAP_process_table('QPCT', ['MANDT','CODEGRUPPE','CODE','KURZTEXT','KATALOGART'], "MANDT EQ '020' AND INAKTIV EQ ''", notification_activity_header_columns, Notification_Activity_Header)

    # Get all notification´s catalog types for the TQ15
    print('Processing notifications catalog')
    #SAP_process_table('TQ15T', ['MANDT','KATALOGART','KATALOGTXT'], "MANDT EQ '020' AND SPRACHE EQ 'S'", notification_catalog_columns, Notification_Catalog)

    # Get all notifications' causes from the QMUR
    print('Processing notifications causes')
    #SAP_process_table('QMUR', ['MANDT','QMNUM','URTXT'], "MANDT EQ '020' AND ERDAT >= '" + date + "'", notification_cause_columns, Notification_Cause)

    # Get all TRANSELCA´s equipment (K: Equipos Transelca - L: MAF Transelca)
    print('Processing equipments')
    #SAP_process_table('EQUI', ['MANDT','EQUNR','EQART','HERST','TYPBZ'], "EQTYP EQ 'K' OR EQTYP EQ 'L'", equipment_columns, Equipment)

    # Get all equipments´ descriptions
    print('Processing equipments description')
    #SAP_process_table('EQKT', ['MANDT','EQUNR','EQKTX'], "MANDT EQ '020'", equipment_text_columns, Equipment_Text)

    # Get all TRANSELCA's workplaces
    print('Processing workplaces')
    #df_workplace_id = SAP_get_table('CRHD',['MANDT','OBJID','ARBPL'],"MANDT EQ '020' AND WERKS EQ 'TR01'")
    #df_workplace_text = SAP_get_table('CRTX',['MANDT','OBJID','KTEXT'],"MANDT EQ '020' AND SPRAS EQ 'S'")
    #df_workplace = pd.merge(df_workplace_id, df_workplace_text, how='inner', on=['OBJID']) # filter per values in df_system
    #df_workplace.rename(columns=work_center_columns,inplace=True)
    #session = Session()
    #session.execute(Work_Center.__table__.insert(),df_workplace.to_dict(orient="records"))
    #session.commit()
    #session.close()

    # Get all TRANSELCA's functional location
    print('Processing functional location')
    #session = Session()
    #print(df_notification['func_location'].unique().shape)
    #df_func = SAP_get_table_by_data('IFLOS',['TPLNR','STRNO', 'ACTVS','TPLKZ','ERDAT','VERSN','ERNAM'],"ACTVS EQ 'X' AND TPLNR EQ '#field#'",df_notification['func_location'].unique())
    #df_func = df_func.drop_duplicates()
    #df_func.rename(columns=func_location_columns,inplace=True)
    #session.execute(Functional_Location.__table__.insert(),df_func.to_dict(orient="records"))
    #session.commit()
    #session.close()

    # Get System Status
    print('Processing System Status Header')
    #df_system = SAP_process_table('TJ02T', ['ISTAT','TXT04','TXT30'], "SPRAS EQ 'S'", system_columns, System_Status)
    df_system = pd.read_sql_table("SAP_system_status", con=engine)

    # Get User Status Header
    print('Processing User Status Header')
    #df_user = SAP_process_table('TJ30T', ['STSMA','ESTAT','TXT04','TXT30'], "MANDT EQ '020' AND SPRAS EQ 'S'", user_columns, User_Status)
    df_user = pd.read_sql_table("SAP_user_status", con=engine)

    # Get all statuses
    print(df_notification.shape)
    print('GET STATUSES START')
    #session = Session()
    #df_status = SAP_get_table_by_data('JEST',['MANDT','OBJNR','STAT','INACT'],"MANDT EQ '020' AND OBJNR EQ '#field#'",df_notification['obj_nr'])
    #df_status = df_status.drop_duplicates()
    #df_status.rename(columns=status_columns,inplace=True)
    #df_status_system = pd.merge(df_status, df_system, how='inner', on=['status_id']) # filter per values in df_system

    #df_status_user =  pd.merge(df_status, df_user['status_id'].drop_duplicates(), how='inner', on=['status_id']) # filter per values in df_user
    #df_notification_2 = pd.merge(df_notification, df_type, how='inner', on=['type_n']) # filter per values in df_user
    #df_status_user_x = pd.merge(df_status_user, df_notification_2[['obj_nr','status_schema']], how='inner', on=['obj_nr'])

    #session.execute(Notification_System_Status.__table__.insert(),df_status_system.to_dict(orient="records"))
    #session.execute(Notification_User_Status.__table__.insert(),df_status_user_x.to_dict(orient="records"))
    #session.commit()
    #session.close()

    # Get log statuses
    print('GET LOG STATUSES START')
    session = Session()
    df_status_system = pd.read_sql_table('SAP_notification_system_status',
                                         con=engine)
    df_status_user_x = pd.read_sql_table('SAP_notification_user_status',
                                         con=engine)
    df_log_status = SAP_get_table_by_data(
        'JCDS', [
            'MANDT', 'OBJNR', 'STAT', 'USNAM', 'UDATE', 'UTIME', 'TCODE',
            'CDTCODE', 'INACT', 'CHIND'
        ], "MANDT EQ '020' AND UDATE >='" + date + "' AND OBJNR EQ '#field#'",
        df_notification['obj_nr'])
    df_log_status = df_log_status.drop_duplicates()
    df_log_status.rename(columns=log_columns, inplace=True)
    df_log_status_system = pd.merge(df_log_status,
                                    df_status_system[['obj_nr', 'status_id']],
                                    how='inner',
                                    on=['obj_nr', 'status_id'])
    df_log_status_user = pd.merge(
        df_log_status,
        df_status_user_x[['obj_nr', 'status_id', 'status_schema']],
        how='inner',
        on=['obj_nr', 'status_id'])  # filter

    session.execute(Notification_Log_User.__table__.insert(),
                    df_log_status_user.to_dict(orient="records"))
    session.execute(Notification_Log_System.__table__.insert(),
                    df_log_status_system.to_dict(orient="records"))
    session.commit()
    session.close()
Esempio n. 6
0
from lighting.basic import Intensity, Color
from lighting.strobe import Strobe, SmoothStrobe
from lighting.kf_intensities import KFIntensities
from lighting.ripple import Ripple
from config import Session
import time




if __name__ == '__main__':
    session = Session()

    modules = session.query(LightModule).all()
    strips = session.query(Strip).all()
    session.close()

    # time.sleep(2)
    # engine = LightEngine(modules)
    # print 'beginning test'
    # for i in range(5):
    #     for strip in strips:
    #         strip.effects.append(Wave(length=strip.length))
    #     time.sleep(3)

    time.sleep(1)
    engine = LightEngine(modules, refresh_rate=60)
    print 'beginning test'
    r = Ripple(speed=2)
    for strip in strips:
        strip.add_intensity_control(Ripple(speed=1, center = 60))
Esempio n. 7
0
class ScriptTestCase(unittest.TestCase):
    def setUp(self):
        self.session = Session()

    def tearDown(self):
        self.session.close()

    def test_replace_literals(self):
        script_in = '%{사람}'
        script = replace_literals(script_in, self.session)

        self.assertNotEqual(script, script_in)

    def test_replace_literals_unknown(self):
        script_in = '%{XXXX}'
        script = replace_literals(script_in, self.session)

        self.assertEqual(script_in, script)

    def test_find_literals(self):
        script = '%{사람 %{사람} } %{랜덤} literals'
        literals = list(find_literals(script))
        self.assertEqual(literals, ['%{사람}', '%{랜덤}'])

        script = '%{1000-2030}'
        literals = list(find_literals(script))
        self.assertEqual(literals, ['%{1000-2030}'])

    def test_find_literals_matchall(self):
        script = '%{{사람}} literals'
        literals = list(find_literals(script))

        self.assertEqual(literals, ['%{{사람}}'])

    def test_particles(self):
        script = '%{{사람}}(이)가 어쨌다고요'
        script = compile_script(script, self.session)
        self.assertFalse('(' in script)

        script = '서울 대표(%{1900-1901})'
        script = compile_script(script, self.session)
        self.assertEqual(script, '서울 대표(1900)')

        script = '서울 대표(%{1900-1901}-%{1949-1950})'
        script = compile_script(script, self.session)
        self.assertEqual(script, '서울 대표(1900-1949)')

        script = '서울 대표(%{장소})'
        script = compile_script(script, self.session)
        self.assertTrue(script.startswith('서울 대표('))
        self.assertTrue(script.endswith(')'))

    def test_numbered_literal(self):
        script = '%{사람}%{1}'
        script = compile_script(script, self.session)
        split = int(len(script) / 2)
        self.assertEqual(script[:split], script[split:])

    def test_numbered_literal_particle(self):
        script = '%{{사람}} %{1}(이)라고'
        script = compile_script(script, self.session)
        self.assertFalse('(' in script)
Esempio n. 8
0
def get_db():
    try:
        db = Session()
        yield db
    finally:
        db.close()