def grab_zips(sftp):
    # dirlist on remote host
    print "%s: Reading directory..." % timestamp()
    dirlist = sftp.listdir('.')

    dirs_count=len(dirlist)
    completed_count=0
    skipped_count=0
    grabbed_count=0
    print "%s: Directory contains %d items" % (timestamp(),dirs_count)
    sys.stdout.write("\rGrabbed: %d    Skipped: %d    Processed: %d (%d%%)" % (grabbed_count,skipped_count,completed_count,0))
    sys.stdout.flush()
    for remotefile in dirlist:
        # processing
        completed_count += 1
        try:
            if is_for_grab(remotefile):
                grabbed_count += 1
                sftp.get(remotefile, remotefile)
                unzip(remotefile)
            else:
                skipped_count += 1
            #sftp.remove(remotefile)
        except:
            print "Error..."
        # statistics
        percent=(completed_count*100)/dirs_count
        sys.stdout.write("\rGrabbed: %d    Skipped: %d    Processed: %d (%d%%)" % (grabbed_count,skipped_count,completed_count,percent))
        sys.stdout.flush()
    print "\n%s: Completed!" % timestamp()
def parse_configurations_csv(csv_lines):
    del csv_lines[0] #header row
    insertions = []
    track_ids = {}
    for line in csv_lines:
        parts = line.split(',')
        if len(parts) >= 22:
            if parts[0] not in track_ids:
                track_ids[parts[0]]=0;
                insertions.append((parts[0], parts[1], parts[21], timestamp()))
    return insertions
def parse_resources_csv(csv_lines):
    track_num_id=0
    start_time_id=1
    items_limit=2
    if csv_lines[0].split(',')[0]=='fileVersion' and csv_lines[1].split(',')[0]=='14':
        track_num_id=1
        start_time_id=2
        items_limit=3
    
    #header row
    del csv_lines[0]
    
    insertions = []
    time_map = {}
    for line in csv_lines:
        parts = line.split(',')
        if len(parts) >= items_limit:
            time_map[parts[track_num_id]]=parts[start_time_id];
    for (tracking_num, start_time) in time_map.iteritems():
            insertions.append((tracking_num, start_time, timestamp()))
    return insertions
def insert_site_detailes(site,domain,installationId,startTime,desktopBuild,userId,userFirstName,userLastName):
    try:
        params=(site,domain,installationId,startTime,desktopBuild,userId,userFirstName,userLastName,timestamp(),installationId)
        db_connection.cursor().execute(insert_site_detail_if_not_exists % params)
        db_connection.commit()
    except mdb.Error, e:
        print "\nError %d: %s" % (e.args[0], e.args[1])
        cursor.execute(insert_into_report % (table_name,siteName,domainName,table_name,siteName,domainName))
        db_connection.commit()

        print_progress("%s table: %s build update" % (table_name,build_num_label))
        cursor.execute(update_report_sql % (table_name,build_num_label,build_num_label,siteName,domainName))
        db_connection.commit()

    except mdb.Error, e:
        print "\nError %d: %s" % (e.args[0], e.args[1])

# Main
try:
    launched=False
    quit_if_launched('reports.index')
    launched=True
    print "Start time: %s" % timestamp()
    
    # MySQL connection
    if debug:
        db_connection = mdb.connect('127.0.0.1', 'root', 'root', 'amazon');
    else:    
        db_connection = mdb.connect('did-db2.cx50jcbnfwsq.us-east-1.rds.amazonaws.com', 'did_db2_master', 'D75QHSq4iz2WTuF', 'did_db2');

    query_result=None
    if prepare_report_tables() is True:
        cursor = db_connection.cursor()
        cursor.execute(select_join_tracking_number % 90)
        print "Result Select has finished"
        query_result = cursor.fetchall() 

    if query_result:
def parse_sessions_csv(csv_lines):
    del csv_lines[0] #header row
    insertions = []
    track_ids = {}
    for line in csv_lines:
        parts = line.split(',')
        if len(parts) >= 6:
            if parts[0] not in track_ids:
                track_ids[parts[0]]=0;
                insert_site(parts[1], parts[2])
                insertions.append((parts[0], parts[1], parts[2], parts[3], parts[4], parts[5], timestamp()))
    return insertions
def insert_site_detailes(site,domain,installationId,startTime,desktopBuild,userId,userFirstName,userLastName):
    table_name = "%s_%s" % (site, domain)
    table_name=table_name.replace('.', '_', 5)
    db_connection.cursor().execute(create_site_table % table_name)
    db_connection.commit()
    params=(table_name,installationId,startTime,desktopBuild,userId,userFirstName,userLastName,timestamp(),table_name,installationId,desktopBuild,userId)
    db_connection.cursor().execute(insert_site_detail_if_not_exists % params)
    db_connection.commit()
# main():
try:
    launched=False
    launched=quit_if_launched('csv_to_db.index')
    launched=True
    
    # MySQL connection
    if debug: db_connection = mdb.connect('127.0.0.1', 'root', 'root', 'amazon');
    else: db_connection = mdb.connect('did-db2.cx50jcbnfwsq.us-east-1.rds.amazonaws.com', 'did_db2_master', 'D75QHSq4iz2WTuF', 'did_db2');

    # process data
    read_bucket()
    
    cursor = db_connection.cursor()
    cursor.execute(select_site_detailes)
    fetch = None #cursor.fetchall()
    if fetch:
        for record in fetch:
            insert_site_detailes(record[0], record[1], record[2], record[3], record[4], record[5], record[6], record[7])

except mdb.Error, e:
    print "Error %d: %s" % (e.args[0], e.args[1])
    sys.exit(1)

finally:
    if db_connection:
        db_connection.close()
    if launched:
        os.remove('csv_to_db.index')
    print "\nEnd time: %s" % timestamp()
def insert_site_detailes(site,domain,installationId,startTime,desktopBuild,userId,userFirstName,userLastName):
    params=(site,domain,installationId,startTime,desktopBuild,userId,userFirstName,userLastName,timestamp(),installationId,desktopBuild,userId)
    db_connection.cursor().execute(insert_site_detail_if_not_exists % params)
    db_connection.commit()