Пример #1
0
def load_dump_file(instance_dir, savedir, tempdir, datadump, db_engine, db_password):
    if not os.path.exists(datadump):
        # download from URL
        dump_archive = os.path.join(savedir, os.path.basename(datadump))
        log('Downloading %r to %r...', datadump, dump_archive)
        req = urllib2.urlopen(datadump)
        with open(dump_archive, 'wb') as fp:
            shutil.copyfileobj(req, fp)
        req.close()
    else:
        dump_archive = datadump
    
    extension = os.path.splitext(dump_archive)[-1]
    if extension in ('.bz2', '.gz', '.zip'):
        # decompress archive
        log('Expanding %r to %r...', dump_archive, tempdir)
        dump_file = expand(dump_archive, tempdir)
        extension = os.path.splitext(dump_file)[-1]
    else:
        dump_file = dump_archive
        
    log('Importing %r... (this may take awhile!)', dump_file)
    
    if extension in ('.json'):
        load_json_dump(instance_dir, dump_file, tempdir)
    elif 'sqlite' in db_engine:
        import sqlite3
        connection = sqlite3.connect(os.path.join(instance_dir, 'db/ecm.sqlite'))
        cursor = connection.cursor()
        cursor.execute('ATTACH DATABASE \'%s\' AS "eve";' % dump_file)
        cursor.execute('DETACH DATABASE "eve";')
        cursor.close()
        connection.commit()
    else:
        pipe_to_dbshell(dump_file, instance_dir, password=db_password)
Пример #2
0
def run(command, global_options, optionsd, args):
    
    if not args:
        command.parser.error('Missing instance directory.')
    instance_dir = args.pop(0)
    if not args:
        command.parser.error('Missing datadump.')
    datadump = args.pop(0)
    
    config = SafeConfigParser()
    if config.read([os.path.join(instance_dir, 'settings.ini')]):
        db_engine = config.get('database', 'ecm_engine')
        db_password = config.get('database', 'ecm_password')
    else:
        command.parser.error('Could not read "settings.ini" in instance dir.')
    try:
        sql = CCP_DATA_DUMPS[db_engine]
    except KeyError:
        command.parser.error('Cannot load datadump with database engine %r. '
                             'Supported engines: %r' % (db_engine, CCP_DATA_DUMPS.keys()))

    try:
        tempdir = tempfile.mkdtemp()
        
        if not os.path.exists(datadump):
            # download from URL
            dump_archive = os.path.join(tempdir, os.path.basename(datadump))
            log('Downloading EVE original dump from %r to %r...', datadump, dump_archive)
            req = urllib2.urlopen(datadump)
            with open(dump_archive, 'wb') as fp:
                shutil.copyfileobj(req, fp)
            req.close()
            log('Download complete.')
        else:
            dump_archive = datadump
        
        extension = os.path.splitext(dump_archive)[-1]
        if extension in ('.bz2', '.gz', '.zip'):
            # decompress archive
            log('Expanding %r to %r...', dump_archive, tempdir)
            dump_file = expand(dump_archive, tempdir)
            log('Expansion complete to %r.' % dump_file)
        else:
            dump_file = dump_archive
        
        log('Patching and importing data (this can be long)...')
        if 'sqlite' in db_engine:
            import sqlite3
            with open(os.path.join(SQL_ROOT, sql['PATCH'])) as f:
                sql_script = f.read() 
            connection = sqlite3.connect(os.path.join(instance_dir, 'db/ecm.sqlite'))
            cursor = connection.cursor()
            cursor.execute('ATTACH DATABASE \'%s\' AS "eve";' % dump_file)
            cursor.executescript(sql_script)
            cursor.execute('DETACH DATABASE "eve";')
            cursor.close()
            connection.commit()
        else:
            pipe_to_dbshell(dump_file, instance_dir, password=db_password)
            pipe_to_dbshell(os.path.join(SQL_ROOT, sql['PATCH']), instance_dir, password=db_password)
            pipe_to_dbshell(os.path.join(SQL_ROOT, sql['DROP']), instance_dir, password=db_password)
        
        log('EVE data successfully imported.')
    finally:
        log('Removing temp files...')
        shutil.rmtree(tempdir)
        log('done')
Пример #3
0
def load_dump_file(instance_dir, savedir, tempdir, datadump, db_engine, db_password):
    if not os.path.exists(datadump):
        # download from URL
        dump_archive = os.path.join(savedir, os.path.basename(datadump))
        log('Downloading %r to %r...', datadump, dump_archive)
        req = urllib2.urlopen(datadump)
        with open(dump_archive, 'wb') as fp:
            shutil.copyfileobj(req, fp)
        req.close()
    else:
        dump_archive = datadump
    
    extension = os.path.splitext(dump_archive)[-1]
    if extension in ('.bz2', '.gz', '.zip'):
        # decompress archive
        log('Expanding %r to %r...', dump_archive, tempdir)
        dump_file = expand(dump_archive, tempdir)
        extension = os.path.splitext(dump_file)[-1]
    else:
        dump_file = dump_archive
        
    log('Importing %r... (this may take awhile!)', dump_file)
    
    if extension in ('.json'):
        load_json_dump(instance_dir, dump_file, tempdir)
    elif 'sqlite' in db_engine:
        import sqlite3
        config = SafeConfigParser()
        db_dir = ''
        if config.read([os.path.join(instance_dir, 'settings.ini')]):
            db_dir = config.get('database', 'sqlite_db_dir')
        if not db_dir:
            db_dir = os.path.join(instance_dir, 'db')
            
        db_file = os.path.join(db_dir, 'ecm.sqlite')
        
        # Connect to the instance DB and attach the SDE
        connection = sqlite3.connect(db_file)
        cursor = connection.cursor()
        cursor.execute('ATTACH DATABASE \'%s\' AS "eve";' % dump_file)
        
        # Get the tables from the SDE (import them all)
        cursor.execute('SELECT "name","sql" FROM "eve"."sqlite_master" WHERE "type"="table" AND "sql" IS NOT NULL;')
        tables = cursor.fetchall()
        
        # Load the table data as brand new tables matching the dump file (to avoid unexplainable errors, maybe because Django/south doesn't set them up the same as the DB dump conversion scripts)
        for table in tables:
            tablename = table[0]
            tablesql  = table[1]
            
            # Drop and recreate the table
            cursor.execute('DROP TABLE "%s";' % tablename)
            cursor.execute(tablesql)
            
            # Insert the data
            cursor.execute('INSERT INTO "%s" SELECT * FROM "eve"."%s";' % (tablename, tablename))

        # Get the indicies of the attached DB and create them
        cursor.execute('SELECT "sql" FROM "eve"."sqlite_master" WHERE "type"="index" AND "sql" IS NOT NULL;')
        indicies = cursor.fetchall()
        for index in indicies:
            cursor.execute(index[0])
            
        cursor.execute('DETACH DATABASE "eve";')
        cursor.close()
        connection.commit()
    else:
        pipe_to_dbshell(dump_file, instance_dir, password=db_password)
Пример #4
0
def load_dump_file(instance_dir, savedir, tempdir, datadump, db_engine,
                   db_password):
    if not os.path.exists(datadump):
        # download from URL
        dump_archive = os.path.join(savedir, os.path.basename(datadump))
        log('Downloading %r to %r...', datadump, dump_archive)
        req = urllib2.urlopen(datadump)
        with open(dump_archive, 'wb') as fp:
            shutil.copyfileobj(req, fp)
        req.close()
    else:
        dump_archive = datadump

    extension = os.path.splitext(dump_archive)[-1]
    if extension in ('.bz2', '.gz', '.zip'):
        # decompress archive
        log('Expanding %r to %r...', dump_archive, tempdir)
        dump_file = expand(dump_archive, tempdir)
        extension = os.path.splitext(dump_file)[-1]
    else:
        dump_file = dump_archive

    log('Importing %r... (this may take awhile!)', dump_file)

    if extension in ('.json'):
        load_json_dump(instance_dir, dump_file, tempdir)
    elif 'sqlite' in db_engine:
        import sqlite3
        config = SafeConfigParser()
        db_dir = ''
        if config.read([os.path.join(instance_dir, 'settings.ini')]):
            db_dir = config.get('database', 'sqlite_db_dir')
        if not db_dir:
            db_dir = os.path.join(instance_dir, 'db')

        db_file = os.path.join(db_dir, 'ecm.sqlite')

        # Connect to the instance DB and attach the SDE
        connection = sqlite3.connect(db_file)
        cursor = connection.cursor()
        cursor.execute('ATTACH DATABASE \'%s\' AS "eve";' % dump_file)

        # Get the tables from the SDE (import them all)
        cursor.execute(
            'SELECT "name","sql" FROM "eve"."sqlite_master" WHERE "type"="table" AND "sql" IS NOT NULL;'
        )
        tables = cursor.fetchall()

        # Load the table data as brand new tables matching the dump file (to avoid unexplainable errors, maybe because Django/south doesn't set them up the same as the DB dump conversion scripts)
        for table in tables:
            tablename = table[0]
            tablesql = table[1]

            # Drop and recreate the table
            cursor.execute('DROP TABLE "%s";' % tablename)
            cursor.execute(tablesql)

            # Insert the data
            cursor.execute('INSERT INTO "%s" SELECT * FROM "eve"."%s";' %
                           (tablename, tablename))

        # Get the indicies of the attached DB and create them
        cursor.execute(
            'SELECT "sql" FROM "eve"."sqlite_master" WHERE "type"="index" AND "sql" IS NOT NULL;'
        )
        indicies = cursor.fetchall()
        for index in indicies:
            cursor.execute(index[0])

        cursor.execute('DETACH DATABASE "eve";')
        cursor.close()
        connection.commit()
    else:
        pipe_to_dbshell(dump_file, instance_dir, password=db_password)