コード例 #1
0
 def setUpClass(cls):
     # Load the CSV file
     pgreaper.copy_csv(path.join(FAKE_CSV_DATA, 'ints_skipline.csv'),
                       dbname=TEST_DB,
                       name='ints2',
                       null_values='NA',
                       header=0,
                       skiplines=1)
コード例 #2
0
def load():
    for f in csvs:
        groups = re.match('StormEvents_(.*)-ftp_v1.0_d([1234567890]*)', f)

        # Merge related datasets into one table
        name = 'storm_events_' + groups.group(1)

        if groups.group(1) == 'details':
            pgreaper.copy_csv(os.path.join('data', f),
                              name=name,
                              compression='gzip',
                              p_key='event_id',
                              dbname='us_wth')
            print("Loaded", name)
コード例 #3
0
def copy_delim(file, zip, zipped_file, delim, header, skiplines, dbname, user,
               host, password):
    # Use is not None because zipped_file = 0 is a legit non-null option
    if zip and (zipped_file is None):
        raise ValueError(
            'Please specify which file in the ZIP archive to upload.')

    # Load the file
    pgreaper.copy_csv(file,
                      delimiter=delim,
                      header=header,
                      skiplines=skiplines,
                      dbname=dbname,
                      user=user,
                      host=host,
                      password=password)

    # Print report
    print('Finished loading file')
コード例 #4
0
def load():
    for f in csvs:
        groups = re.match('StormEvents_(.*)-ftp_v1.0_d([1234567890]*)', f)

        # Merge related datasets into one table
        name = 'storm_events_' + groups.group(1)

        try:
            if groups.group(1) == 'locations':
                pgreaper.copy_csv(os.path.join('data', f),
                                  name=name,
                                  p_key=('episode_id', 'event_id',
                                         'location_index'),
                                  compression='gzip',
                                  dbname='us_wth')
                print("Loaded", name)
        except KeyError:
            # Upon manual inspection, it appears KeyErrors are caused by...
            # empty CSV files... you've got to be s******g me
            pass
コード例 #5
0
def pgreaper_load():
    pgreaper.copy_csv(os.path.join(REAL_CSV_DATA, '2015_StateDepartment.csv'),
                      delimiter=',',
                      name='ca_employees',
                      dbname='pgreaper_test',
                      header=0)
コード例 #6
0
 def setUpClass(self):
     pgreaper.copy_csv(os.path.join(REAL_CSV_DATA, 'compressed',
                                    '2015_StateDepartment.csv.lzma'),
                       name='ca_employees',
                       compression='lzma',
                       dbname=TEST_DB)
コード例 #7
0
 def setUpClass(self):
     pgreaper.copy_csv(os.path.join(REAL_CSV_DATA,
                                    '2015_StateDepartment.csv'),
                       name='ca_employees',
                       dbname=TEST_DB)
コード例 #8
0
 def setUpClass(cls):
     # Load the CSV file
     pgreaper.copy_csv(path.join(FAKE_CSV_DATA, 'ints.csv'),
                       dbname=TEST_DB,
                       name='ints',
                       header=0)
コード例 #9
0
 def setUpClass(cls):
     data = path.join(MIMESIS_CSV_DATA, 'persons.csv')
     pgreaper.copy_csv(data,
                       name='persons',
                       subset=['Full Name', 'Age', 'Email'],
                       dbname=TEST_DB)
コード例 #10
0
ファイル: to_sql.py プロジェクト: vincentlaucsb/US-Hurricanes
import pgreaper

pgreaper.copy_csv('Allstorms.ibtracs_all.v03r09.csv.gz',
                  compression='gzip',
                  name='ibtracs_allstorms',
                  dbname='us_wth',
                  header=1,
                  skip_lines=1)