def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # new table with desired columns, indexes, and constraints
    new_agent_heartbeats = Table('agent_heartbeats', meta, autoload=True)

    try:
        drop_tables([new_agent_heartbeats])
    except OperationalError as e:
        logger.warn("This table may have been dropped by some other means.")
        logger.warn(e)

    # reset the migrate_engine
    meta = MetaData()
    meta.bind = migrate_engine

    # original table from migration 005_heartbeat.py
    previous_agent_heartbeats = Table(
        'agent_heartbeats', meta, Column('id', String(36), primary_key=True,
                                         nullable=False),
        Column('instance_id', String(36), nullable=False),
        Column('updated_at', DateTime()), extend_existing=True)

    create_tables([previous_agent_heartbeats])
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    backups = Table('backups', meta, autoload=True)
    backups_instance_id_idx = Index("backups_instance_id",
                                    backups.c.instance_id)
    backups_deleted_idx = Index("backups_deleted", backups.c.deleted)

    meta.bind = migrate_engine
    backups_instance_id_idx.drop()
    backups_deleted_idx.drop()
def downgrade(migrate_engine):
    print("033 downgrade")
    meta = MetaData()
    meta.bind = migrate_engine

    tables = [define_neutron_backend_table(meta)]
    drop_tables(tables)
def upgrade(migrate_engine):
    print("003 upgrade")
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_template_table(meta),
              define_host_template_table(meta)]
    create_tables(tables)
def downgrade(migrate_engine):
    print("042 downgrade")
    meta = MetaData()
    meta.bind = migrate_engine

    tables = [define_template_config_roles_table(meta)]
    drop_tables(tables)

    configs = Table('configs', meta, autoload=True)
    template_config_id_reserve = getattr(configs.c, 'template_config_id')
    template_config_id_reserve.alter(type=String(36))

    template_config = Table('template_config', meta, autoload=True)
    id_reserve = getattr(template_config.c, 'id')
    id_reserve.alter(type=String(36))
    name_reserve = getattr(template_config.c, 'name')
    name_reserve.alter(type=String(50))

    template_func = Table('template_func', meta, autoload=True)
    id_reserve = getattr(template_func.c, 'id')
    id_reserve.alter(type=String(36))
    name_reserve = getattr(template_func.c, 'name')
    name_reserve.alter(type=String(36))

    template_func_configs = Table('template_func_configs', meta, autoload=True)
    id_reserve = getattr(template_func_configs.c, 'func_id')
    id_reserve.alter(type=String(36))
    name_reserve = getattr(template_func_configs.c, 'config_id')
    name_reserve.alter(type=String(36))

    config_service = Table('config_service', meta, autoload=True)
    config_id_reserve = getattr(config_service.c, 'config_id')
    config_id_reserve.alter(type=String(36))
Example #6
0
def reflect(engine, models, schema = None):
    metadata = MetaData()
    metadata.bind = engine

    with warnings.catch_warnings():
        warnings.simplefilter("ignore", category = SAWarning)
        metadata.reflect(schema = schema, views = False)

    if schema is not None:
        tables = dict((table_name.replace(str(schema) + ".", ""), table)
                      for table_name, table in metadata.tables.iteritems())
    else:
        tables = metadata.tables

    clear_mappers()

    mappers = {}
    for table_name, table in tables.iteritems():
        modelname = "".join([word.capitalize() for word in table_name.split("_")])

        try:
            model = getattr(models, modelname)
        except AttributeError:
            stderr.write("Missing model for table %s\n" % table_name)
        else:
            mappers[modelname] = mapper(model, table)

    Session = sessionmaker(bind = engine, autocommit = False, autoflush = True)

    return mappers, tables, Session
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    modules = Table('modules', meta, autoload=True)
    modules.drop_column(COLUMN_NAME_1)
    modules.drop_column(COLUMN_NAME_2)
    modules.drop_column(COLUMN_NAME_3)
def upgrade(migrate_engine):
    print("001 upgrade")
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_hosts_table(meta),
              define_discover_hosts_table(meta),
              define_clusters_table(meta),
              define_cluster_hosts_table(meta),
              define_networks_table(meta),
              define_ip_ranges_table(meta),
              define_host_interfaces_table(meta),
              define_config_sets_table(meta),
              define_components_table(meta),
              define_services_table(meta),
              define_roles_table(meta),
              define_host_roles_table(meta),
              define_service_roles_table(meta),
              define_config_files_table(meta),
              define_configs_table(meta),
              define_config_set_items_table(meta),
              define_config_historys_table(meta),
              define_tasks_table(meta),
              define_task_infos_table(meta),
              define_repositorys_table(meta),
              define_users_table(meta),
              define_versions_table(meta),
              define_assigned_networks_table(meta),
              define_logic_networks_table(meta),
              define_routers_table(meta),
              define_subnets_table(meta),
              define_float_ip_ranges_table(meta),
              define_dns_nameservers_table(meta),
              define_service_disks_table(meta),
              define_cinder_volumes_table(meta)]
    create_tables(tables)
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # add column:
    backups = Table('backups', meta, autoload=True)
    backups.create_column(Column('parent_id', String(36), nullable=True))
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    tasks_table = Table('tasks', meta, autoload=True)
    task_info_table = Table('task_info', meta, autoload=True)

    for col_name in TASKS_MIGRATE_COLUMNS:
        column = Column(col_name, Text())
        column.create(tasks_table)

    task_info_records = task_info_table.select().execute().fetchall()

    for task_info in task_info_records:
        values = {
            'input': task_info.input,
            'result': task_info.result,
            'message': task_info.message
        }

        tasks_table\
            .update(values=values)\
            .where(tasks_table.c.id == task_info.task_id)\
            .execute()

    drop_tables([task_info_table])
Example #11
0
    def test_clone_table_adds_or_deletes_columns(self):
        meta = MetaData()
        meta.bind = self.engine

        table = Table('dummy',
                      meta,
                      Column('id', String(36), primary_key=True,
                             nullable=False),
                      Column('A', Boolean, default=False)
                      )
        table.create()

        newcols = [
            Column('B', Boolean, default=False),
            Column('C', String(255), default='foobar')
        ]
        ignorecols = [
            table.c.A.name
        ]
        new_table = migrate_utils.clone_table('new_dummy', table, meta,
                                              newcols=newcols,
                                              ignorecols=ignorecols)

        col_names = [c.name for c in new_table.columns]

        self.assertEqual(3, len(col_names))
        self.assertIsNotNone(new_table.c.B)
        self.assertIsNotNone(new_table.c.C)
        self.assertNotIn('A', col_names)
Example #12
0
def migrate(engine, connection, revmap):
    """Given engine, connection and revision map, go through the
    ticket descriptions and comments and migrate the svn revisions to
    git hashes.

    """
    metadata = MetaData()
    metadata.bind = engine

    tickets = Table('ticket', metadata, autoload=True)
    changes = Table('ticket_change', metadata, autoload=True)

    trans = connection.begin()
    try:

        count = migrate_table(connection, revmap,
            tickets, [tickets.c.id], 
            [tickets.c.description]
        )
        count += migrate_table(connection, revmap,
            changes, [changes.c.ticket, changes.c.time, changes.c.field], 
            [changes.c.newvalue]
        )

        trans.commit()
        
        print("Migrated %i records" % count)

    except Exception, e:
        trans.rollback()
        die("Migration error: %s" % repr(e), "Changes were rolled back")
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    instances = Table('instances', meta, autoload=True)
    service_type = Column('service_type', String(36))
    instances.create_column(service_type)
    instances.update().values({'service_type': 'mysql'}).execute()
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # drop column:
    backups = Table('backups', meta, autoload=True)
    backups.drop_column('parent_id')
Example #15
0
 def copy_star_schema(cls, bind=None):
     m = MetaData()
     for t in cls.data_tables:
         i2b2_star.metadata.tables[t].tometadata(m)
     if bind:
         m.bind = bind
     return m
Example #16
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    instances = Table('instances', meta, autoload=True)
    instances.create_column(
        Column(COLUMN_NAME, String(36), ForeignKey('instances.id')),
        nullable=True)
def upgrade(migrate_engine):
    """Create shares and share_access_map tables."""
    meta = MetaData()
    meta.bind = migrate_engine

    shares = Table('shares', meta, autoload=True)
    share_snapshots = Table(
        'share_snapshots', meta,
        Column('created_at', DateTime),
        Column('updated_at', DateTime),
        Column('deleted_at', DateTime),
        Column('deleted', Boolean),
        Column('id', String(length=36), primary_key=True, nullable=False),
        Column('user_id', String(length=255)),
        Column('project_id', String(length=255)),
        Column('share_id', String(36), ForeignKey('shares.id'),
               nullable=False),
        Column('size', Integer),
        Column('status', String(length=255)),
        Column('progress', String(length=255)),
        Column('display_name', String(length=255)),
        Column('display_description', String(length=255)),
        Column('share_size', Integer),
        Column('share_proto', String(length=255)),
        Column('export_location', String(255)),
        mysql_engine='InnoDB')

    try:
        share_snapshots.create()
    except Exception:
        LOG.error(_("Table %r not created!"), share_snapshots)
        raise
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    ip_blocks = Table('ip_blocks', meta, autoload=True)
    network_name = Column('network_name', String(255))
    ip_blocks.create_column(network_name)
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    service_statuses = Table('service_statuses', meta, autoload=True)
    idx = Index("service_statuses_instance_id", service_statuses.c.instance_id)
    idx.drop()
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    ip_blocks = Table('ip_blocks', meta, autoload=True)
    max_allocation = Column('max_allocation', Integer())
    ip_blocks.create_column(max_allocation)
def downgrade(migrate_engine):
    print("036 downgrade")
    meta = MetaData()
    meta.bind = migrate_engine

    tables = [define_component_config_table(meta)]
    drop_tables(tables)
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # add column:
    instances = Table('instances', meta, autoload=True)
    instances.create_column(Column('tenant_id', String(36), nullable=True))
    instances.create_column(Column('server_status', String(64)))
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # add column:
    instances = Table('instances', meta, autoload=True)
    instances.create_column(Column('deleted', Boolean()))
    instances.create_column(Column('deleted_at', DateTime()))
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    # int->char casts in pgsql still work fine without any USING clause,
    #  so downgrade is not affected.
    # modify column:
    instances = Table('instances', meta, autoload=True)
    instances.c.flavor_id.alter(type=String(36))
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_workflow_pattern_table(meta),
              define_ticket_template_table(meta),
              define_ticket_table(meta),
              define_workflow_table(meta)]
    drop_tables(tables)
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    ip_addresses = Table('ip_addresses', meta, autoload=True)
    allocated = Column('allocated', Boolean(), default=False)
    ip_addresses.create_column(allocated)
    migrate_engine.execute(ip_addresses.update().values(allocated=True))
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    backups = Table('backups', meta, autoload=True)
    Table('datastore_versions', meta, autoload=True)
    datastore_version_id = Column('datastore_version_id', String(36),
                                  ForeignKey('datastore_versions.id'))
    backups.create_column(datastore_version_id)
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # drop column:
    instances = Table('instances', meta, autoload=True)
    instances.drop_column('deleted')
    instances.drop_column('deleted_at')
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_metadef_objects_table(meta),
              define_metadef_properties_table(meta),
              define_metadef_namespace_resource_types_table(meta),
              define_metadef_resource_types_table(meta),
              define_metadef_namespaces_table(meta)]
    drop_tables(tables)
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    # drop column:
    instances = Table("instances", meta, autoload=True)

    instances.drop_column("flavor_id")
    instances.drop_column("volume_size")
Example #31
0

#接続情報
user = readProperties("user")
pswd = readProperties("pswd")
host = readProperties("host")
dbname = readProperties("dbname")

connectinfo = 'mysql+mysqlconnector://' + user + ':' + pswd + '@' + host + '/' + dbname
connectinfo2 = 'mysql+mysqlconnector://' + user + ':' + pswd + '@' + host + '/' + dbname + '_log'
ENGINE = create_engine(connectinfo,
                       encoding='utf-8',
                       pool_size=20,
                       max_overflow=0)
METADATA = MetaData()
METADATA.bind = ENGINE

Session = sessionmaker(bind=ENGINE)


class MysqlConnector:
    session = None

    def __init__(self):
        self.session = Session()

    def getTable(self, tableName):
        table_object = Table(tableName, METADATA, autoload=True)
        return table_object

    def getMax(self, tableColumn):
Example #32
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_job_metadata_table(meta)]
    create_tables(tables)
Example #33
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    instances = Table('instances', meta, autoload=True)
    #modify column
    instances.c.flavor_id.alter(type=Integer())
Example #34
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    instances = Table('instances', meta, autoload=True)
    instances.drop_column(COLUMN_NAME)
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_namespace_table(meta)]
    drop_tables(tables)
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_project_item_record_table(meta)]
    drop_tables(tables)
Example #37
0
def db_export_metadefs(engine, metadata_path=None):
    meta = MetaData()
    meta.bind = engine

    _export_data_to_file(meta, metadata_path)
Example #38
0
#接続情報
user = readIni("MYSQL", "USER")
pswd = readIni("MYSQL", "PASS")
url = readIni("MYSQL", "URL")
dbname = readIni("MYSQL", "DBNAME")

connectinfo = 'mysql+mysqlconnector://' + user + ':' + pswd + '@' + url + '/' + dbname
connectinfo2 = 'mysql+mysqlconnector://' + user + ':' + pswd + '@' + url + '/' + dbname + '_log'
#engine = create_engine('mysql://*****:*****@***.***.***.***/dbname', encoding='utf-8')
ENGINE = create_engine(connectinfo,
                       encoding='utf-8',
                       pool_size=20,
                       max_overflow=0)
METADATA = MetaData()
METADATA.bind = ENGINE
Session = sessionmaker(bind=ENGINE)

ENGINE2 = create_engine(connectinfo2,
                        encoding='utf-8',
                        pool_size=20,
                        max_overflow=0)
METADATA2 = MetaData()
METADATA2.bind = ENGINE2


class MysqlConnector(object):
    logger = IaasLogger()
    session = None
    userNo = None
Example #39
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    instances = Table('instances', meta, autoload=True)
    instances.c.task_description.alter(type=String(255))
Example #40
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_schedules_table(meta)]
    drop_tables(tables)
Example #41
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_schedules_table(meta)]
    create_tables(tables)
Example #42
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_job_metadata_table(meta)]
    drop_tables(tables)
Example #43
0
def db_load_metadefs(engine, metadata_path=None):
    meta = MetaData()
    meta.bind = engine

    _populate_metadata(meta, metadata_path)
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_project_item_record_table(meta)]
    create_tables(tables)
Example #45
0
def db_unload_metadefs(engine):
    meta = MetaData()
    meta.bind = engine

    _clear_metadata(meta)
Example #46
0
from os import remove
from flask import jsonify, request
from sqlalchemy import create_engine
from sqlalchemy.schema import MetaData
from sqlite3 import dbapi2 as sqlite

from cv2 import imread, COLOR_RGB2GRAY, cvtColor
from cv2.face import LBPHFaceRecognizer_create
from faces_recognizer import Recognizer

engine = create_engine('sqlite+pysqlite:///../sqlite/cafeDB.db', module=sqlite)
meta = MetaData()
meta.reflect(bind=engine)
meta.bind = engine
users_table = meta.tables['User']
recognizer = Recognizer(LBPHFaceRecognizer_create)
recognizer.set_recognizer_xml('faces.xml')


def add_route(app):
    ''' Add routes to a flask app Class. See API swagger doc'''
    @app.route('/users/<id>', methods=['GET'])
    def get_user(id):
        try:
            id = int(id)
        except ValueError:
            return "Invalid id", 400
        print(id)
        result = users_table.select().where(
            users_table.c.id == id).execute().first()
        if result:
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    # modify column:
    datastore_versions = Table('datastore_versions', meta, autoload=True)
    datastore_versions.c.name.alter(unique=True)
def upgrade(migrate_engine):
    print("003 upgrade")
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_template_table(meta), define_host_template_table(meta)]
    create_tables(tables)
Example #49
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    # modify column:
    instances = Table('instances', meta, autoload=True)
    instances.drop_column('service_type')
Example #50
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_image_properties_table(meta)]
    create_tables(tables)
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_namespace_table(meta)]
    create_tables(tables)
Example #52
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_image_properties_table(meta)]
    drop_tables(tables)
Example #53
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    # modify column:
    instances = Table('instances', meta, autoload=True)
    instances.c.flavor_id.alter(type=String(36))
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    instances = Table('instances', meta, autoload=True)
    instances.create_column(Column('access', Text(), nullable=True))
Example #55
0
def upgrade(migrate_engine):
    print("035 upgrade")
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_host_patch_history_table(meta)]
    create_tables(tables)
Example #56
0
def create_metadata(settings: DatabaseSettings) -> Tuple[MetaData, ModelMap]:
    engine = create_async_engine(settings.connection_url,
                                 echo=settings.echo_statements)
    metadata = MetaData()
    metadata.bind = engine
    return metadata, register_tables(metadata=metadata)
Example #57
0
def downgrade(migrate_engine):
    print("035 downgrade")
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_host_patch_history_table(meta)]
    drop_tables(tables)
Example #58
0
def sample_data(nrand=1000,
                date=datetime.datetime(2015, 1, 1, 0, 0),
                id_=None,
                cat=None):
    """
    Extract the target, cited and random patents from the DB

    Input:
        - nrand: Number of random patents to sample (default: 1000)
        - date: Threshold date to separate target and rest patents (default: 01/01/2015)
        - id_: To be set if the scores should be evaluated only for one target patent
               e.g. for the ones scored by patent attorney (default None)
        - cat: To be set if the scores should be evaluated only for a certain category
               e.g. 'A61M'(default None)

    Returns:
        - random_pats:
        - cited_pats:
        - target_pats:
        - dupl_pats:
        - cited_ids:
        - dupl_ids:
    """
    session = load_session()
    # all patents published after given date are considered as target patents
    target_pats = session.query(Patent).filter(Patent.pub_date >= date)
    rest_set = session.query(Patent).filter(Patent.pub_date < date)
    # if the scores are to be calculted for only one target patent
    if id_:
        print "evaluating simscores for patent %s" % id_
        target_pats = session.query(Patent).filter(Patent.id == id_)
    # if the scores should be evaluated only for a certain category
    elif cat:
        print "evaluating simscores for category %s" % cat
        cat_pats = session.query(Patent).filter(Patent.category.contains(cat))
        cat_pats_ids = [pat.id for pat in cat_pats]
        target_pats = cat_pats.filter(
            Patent.pub_date >= datetime.datetime(2015, 1, 1, 0, 0))
        # the random patents are sampled from the patents published before 2015
        rest_set = cat_pats.filter(
            Patent.pub_date < datetime.datetime(2015, 1, 1, 0, 0))
    else:
        print "evaluating for all target patents"
    # the random patents are sampled from the patents published before given date
    engine = session.get_bind()
    metadata = MetaData()
    metadata.bind = engine
    # create tables for cited and duplicate patents
    cited = Table(
        "cited", metadata,
        Column('id', String, ForeignKey(Patent.id), primary_key=True))
    duplicates = Table(
        "duplicates", metadata,
        Column('id', String, ForeignKey(Patent.id), primary_key=True))
    try:
        cited.drop()
        duplicates.drop()
    except:
        pass
    cited.create()
    duplicates.create()
    conn = engine.connect()
    # collect IDs for each target patent
    cited_ids = {}
    dupl_ids = {}
    ## get the duplicates and cited patents for all target patents
    print "getting duplicates and cited patents"
    for patent in target_pats:
        # get duplicate ids(read CSVs)
        with open(
                '/home/lea/Documents/master_thesis/patent_search/pats_2015_apa_lists/apa_list_%s.csv'
                % str(patent.id)) as apa_file:
            apa_list_reader = csv.reader(apa_file, delimiter='\t')
            duplicates_list = apa_list_reader.next()
            dupl_ids[patent.id] = [
                unicode(re.sub(' ', '', pat)) for pat in duplicates_list
            ]
        # get cited ids
        citations = session.query(Citation).filter(
            Citation.citing_pat == patent.id)
        cited_patents = []
        # check, if cited id is a duplicate
        for pat in citations:
            # if the simcoefs are to be evaluated only for a certain category
            if cat:
                # check if the cited pat is in the given category
                if pat.cited_pat not in cat_pats_ids:
                    continue
            if pat.cited_pat not in dupl_ids[patent.id]:
                cited_patents.append(pat.cited_pat)
        cited_ids[patent.id] = cited_patents
    ## fill tables with cited and duplicate patents
    print "filling tables cited and duplicates"
    # unite all cited and duplicate ids in one list
    all_cited_ids = []
    all_dupl_ids = []
    for pid in cited_ids.keys():
        # fill table with citations
        for cited_id in cited_ids[pid]:
            # check if id equals empty string, if so remove
            if cited_id == u'':
                cited_ids[pid].remove(cited_id)
            # insert patent into table
            else:
                try:
                    ins = cited.insert().values(id=cited_id)
                    conn.execute(ins)
                    all_cited_ids.append(cited_id)
                # error is thrown if patent is already in the DB
                except sqlalchemy.exc.IntegrityError, e:
                    continue
        # fill table with duplicates
        # get duplicate patents for the current target patent
        duplicate_pats = dupl_ids[pid]
        dupls_temp = []
        for dupl_id in duplicate_pats:
            # if the simcoefs are to be evaluated only for a certain category
            if cat:
                # check if the duplicate is in the category
                if pat.cited_pat not in cat_pats_ids:
                    continue
            # check if id equals empty string
            if dupl_id == u'':
                continue
            # check if the duplicate is already in the DB
            elif session.query(Patent).filter(
                    Patent.id == dupl_id).count() == 0:
                continue
            # insert duplicate patent into duplicates table
            else:
                try:
                    ins = duplicates.insert().values(id=dupl_id)
                    conn.execute(ins)
                    all_dupl_ids.append(dupl_id)
                    dupls_temp.append(dupl_id)
                # error is thrown if patent is already in the DB
                except sqlalchemy.exc.IntegrityError, e:
                    continue