def upgrade(migrate_engine):
    print(__doc__)
    metadata.bind = migrate_engine
    metadata.reflect()

    Table("form_definition", metadata, autoload=True)
    Table("form_values", metadata, autoload=True)

    def get_value(lst, index):
        try:
            return str(lst[index]).replace("'", "''")
        except IndexError:
            return ''

    # Go through the entire table and add a 'name' attribute for each field
    # in the list of fields for each form definition
    cmd = "SELECT f.id, f.fields FROM form_definition AS f"
    result = migrate_engine.execute(cmd)
    for row in result:
        form_definition_id = row[0]
        fields = str(row[1])
        if not fields.strip():
            continue
        fields_list = loads(_sniffnfix_pg9_hex(fields))
        if len(fields_list):
            for index, field in enumerate(fields_list):
                field['name'] = 'field_%i' % index
                field['helptext'] = field['helptext'].replace("'", "''").replace('"', "")
                field['label'] = field['label'].replace("'", "''")
            fields_json = dumps(fields_list)
            if migrate_engine.name == 'mysql':
                cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" % (fields_json, form_definition_id)
            else:
                cmd = "UPDATE form_definition SET fields='%s' WHERE id=%i" % (fields_json, form_definition_id)
            migrate_engine.execute(cmd)
    # replace the values list in the content field of the form_values table with a name:value dict
    cmd = "SELECT form_values.id, form_values.content, form_definition.fields" \
          " FROM form_values, form_definition" \
          " WHERE form_values.form_definition_id=form_definition.id" \
          " ORDER BY form_values.id ASC"
    result = migrate_engine.execute(cmd)
    for row in result:
        form_values_id = int(row[0])
        if not str(row[1]).strip():
            continue
        row1 = str(row[1]).replace('\n', '').replace('\r', '')
        values_list = loads(str(row1).strip())
        if not str(row[2]).strip():
            continue
        fields_list = loads(str(row[2]).strip())
        if fields_list and isinstance(values_list, list):
            values_dict = {}
            for field_index, field in enumerate(fields_list):
                field_name = field['name']
                values_dict[field_name] = get_value(values_list, field_index)
            cmd = "UPDATE form_values SET content='%s' WHERE id=%i" % (dumps(values_dict), form_values_id)
            migrate_engine.execute(cmd)
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()
    try:
        Table("form_definition", metadata, autoload=True)
    except Exception as e:
        log.debug("Loading 'form_definition' table failed: %s" % str(e))
    try:
        Table("form_values", metadata, autoload=True)
    except Exception as e:
        log.debug("Loading 'form_values' table failed: %s" % str(e))
    # remove the name attribute in the content column JSON dict in the form_values table
    # and restore it to a list of values
    cmd = (
        "SELECT form_values.id, form_values.content, form_definition.fields"
        " FROM form_values, form_definition"
        " WHERE form_values.form_definition_id=form_definition.id"
        " ORDER BY form_values.id ASC"
    )
    result = migrate_engine.execute(cmd)
    for row in result:
        form_values_id = int(row[0])
        if not str(row[1]).strip():
            continue
        values_dict = loads(str(row[1]))
        if not str(row[2]).strip():
            continue
        fields_list = loads(str(row[2]))
        if fields_list:
            values_list = []
            for field_index, field in enumerate(fields_list):
                field_name = field["name"]
                field_value = values_dict[field_name]
                values_list.append(field_value)
            cmd = "UPDATE form_values SET content='%s' WHERE id=%i" % (dumps(values_list), form_values_id)
            migrate_engine.execute(cmd)
    # remove name attribute from the field column of the form_definition table
    cmd = "SELECT f.id, f.fields FROM form_definition AS f"
    result = migrate_engine.execute(cmd)
    for row in result:
        form_definition_id = row[0]
        fields = str(row[1])
        if not fields.strip():
            continue
        fields_list = loads(_sniffnfix_pg9_hex(fields))
        if len(fields_list):
            for index, field in enumerate(fields_list):
                if "name" in field:
                    del field["name"]
            if migrate_engine.name == "mysql":
                cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" % (
                    dumps(fields_list),
                    form_definition_id,
                )
            else:
                cmd = "UPDATE form_definition SET fields='%s' WHERE id=%i" % (dumps(fields_list), form_definition_id)
        migrate_engine.execute(cmd)
def downgrade(migrate_engine):
    metadata.bind = migrate_engine
    metadata.reflect()
    try:
        Table("form_definition", metadata, autoload=True)
    except Exception:
        log.exception("Loading 'form_definition' table failed.")
    try:
        Table("form_values", metadata, autoload=True)
    except Exception:
        log.exception("Loading 'form_values' table failed.")
    # remove the name attribute in the content column JSON dict in the form_values table
    # and restore it to a list of values
    cmd = "SELECT form_values.id, form_values.content, form_definition.fields" \
          " FROM form_values, form_definition" \
          " WHERE form_values.form_definition_id=form_definition.id" \
          " ORDER BY form_values.id ASC"
    result = migrate_engine.execute(cmd)
    for row in result:
        form_values_id = int(row[0])
        if not str(row[1]).strip():
            continue
        values_dict = loads(str(row[1]))
        if not str(row[2]).strip():
            continue
        fields_list = loads(str(row[2]))
        if fields_list:
            values_list = []
            for field_index, field in enumerate(fields_list):
                field_name = field['name']
                field_value = values_dict[field_name]
                values_list.append(field_value)
            cmd = "UPDATE form_values SET content='%s' WHERE id=%i" % (
                dumps(values_list), form_values_id)
            migrate_engine.execute(cmd)
    # remove name attribute from the field column of the form_definition table
    cmd = "SELECT f.id, f.fields FROM form_definition AS f"
    result = migrate_engine.execute(cmd)
    for row in result:
        form_definition_id = row[0]
        fields = str(row[1])
        if not fields.strip():
            continue
        fields_list = loads(_sniffnfix_pg9_hex(fields))
        if len(fields_list):
            for index, field in enumerate(fields_list):
                if 'name' in field:
                    del field['name']
            if migrate_engine.name == 'mysql':
                cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" % (
                    dumps(fields_list), form_definition_id)
            else:
                cmd = "UPDATE form_definition SET fields='%s' WHERE id=%i" % (
                    dumps(fields_list), form_definition_id)
        migrate_engine.execute(cmd)
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)

    ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata, autoload=True )

    metadata.reflect()
    # Create the tables.
    try:
        ToolVersion_table.create()
    except Exception as e:
        log.debug( "Creating tool_version table failed: %s" % str( e ) )
    try:
        ToolVersionAssociation_table.create()
    except Exception as e:
        log.debug( "Creating tool_version_association table failed: %s" % str( e ) )
    # Populate the tool table with tools included in installed tool shed repositories.
    cmd = "SELECT id, metadata FROM tool_shed_repository"
    result = migrate_engine.execute( cmd )
    count = 0
    for row in result:
        if row[1]:
            tool_shed_repository_id = row[0]
            repository_metadata = loads( _sniffnfix_pg9_hex( str( row[1] ) ) )
            # Create a new row in the tool table for each tool included in repository.  We will NOT
            # handle tool_version_associaions because we do not have the information we need to do so.
            tools = repository_metadata.get( 'tools', [] )
            for tool_dict in tools:
                cmd = "INSERT INTO tool_version VALUES (%s, %s, %s, '%s', %s)" % \
                    ( nextval( migrate_engine, 'tool_version' ), localtimestamp( migrate_engine ), localtimestamp( migrate_engine ), tool_dict[ 'guid' ], tool_shed_repository_id )
                migrate_engine.execute( cmd )
                count += 1
    print("Added %d rows to the new tool_version table." % count)
    # Drop the tool_id_guid_map table since the 2 new tables render it unnecessary.
    try:
        ToolIdGuidMap_table.drop()
    except Exception as e:
        log.debug( "Dropping tool_id_guid_map table failed: %s" % str( e ) )
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)

    ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata, autoload=True )

    metadata.reflect()
    # Create the tables.
    try:
        ToolVersion_table.create()
    except Exception:
        log.exception("Creating tool_version table failed.")
    try:
        ToolVersionAssociation_table.create()
    except Exception:
        log.exception("Creating tool_version_association table failed.")
    # Populate the tool table with tools included in installed tool shed repositories.
    cmd = "SELECT id, metadata FROM tool_shed_repository"
    result = migrate_engine.execute( cmd )
    count = 0
    for row in result:
        if row[1]:
            tool_shed_repository_id = row[0]
            repository_metadata = loads( _sniffnfix_pg9_hex( str( row[1] ) ) )
            # Create a new row in the tool table for each tool included in repository.  We will NOT
            # handle tool_version_associaions because we do not have the information we need to do so.
            tools = repository_metadata.get( 'tools', [] )
            for tool_dict in tools:
                cmd = "INSERT INTO tool_version VALUES (%s, %s, %s, '%s', %s)" % \
                    ( nextval( migrate_engine, 'tool_version' ), localtimestamp( migrate_engine ), localtimestamp( migrate_engine ), tool_dict[ 'guid' ], tool_shed_repository_id )
                migrate_engine.execute( cmd )
                count += 1
    print("Added %d rows to the new tool_version table." % count)
    # Drop the tool_id_guid_map table since the 2 new tables render it unnecessary.
    try:
        ToolIdGuidMap_table.drop()
    except Exception:
        log.exception("Dropping tool_id_guid_map table failed.")
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)
    metadata.reflect()
    cmd = "SELECT form_values.id as id, form_values.content as field_values, form_definition.fields as fdfields " \
          + " FROM form_definition, form_values " \
          + " WHERE form_values.form_definition_id=form_definition.id " \
          + " ORDER BY form_values.id"
    result = migrate_engine.execute(cmd)
    corrupted_rows = 0
    for row in result:
        # first check if loading the dict from the json succeeds
        # if that fails, it means that the content field is corrupted.
        try:
            field_values_dict = loads(
                _sniffnfix_pg9_hex(str(row['field_values'])))
        except Exception:
            corrupted_rows = corrupted_rows + 1
            # content field is corrupted
            fields_list = loads(_sniffnfix_pg9_hex(str(row['fdfields'])))
            field_values_str = _sniffnfix_pg9_hex(str(row['field_values']))
            try:
                # Encoding errors?  Just to be safe.
                print("Attempting to fix row %s" % row['id'])
                print("Prior to replacement: %s" % field_values_str)
            except Exception:
                pass
            field_values_dict = {}
            # look for each field name in the values and extract its value (string)
            for index in range(len(fields_list)):
                field = fields_list[index]
                field_name_key = '"%s": "' % field['name']
                field_index = field_values_str.find(field_name_key)
                if field_index == -1:
                    # if the field name is not present the field values dict then
                    # inform the admin that this form values cannot be fixed
                    print(
                        "The 'content' field of row 'id' %i does not have the field '%s' in the 'form_values' table and could not be fixed by this migration script."
                        % (int(field['id']), field['name']))
                else:
                    # check if this is the last field
                    if index == len(fields_list) - 1:
                        # since this is the last field, the value string lies between the
                        # field name and the '"}' string at the end, hence len(field_values_str) - 2
                        value = field_values_str[field_index +
                                                 len(field_name_key
                                                     ):len(field_values_str) -
                                                 2]
                    else:
                        # if this is not the last field then the value string lies between
                        # this field name and the next field name
                        next_field = fields_list[index + 1]
                        next_field_index = field_values_str.find(
                            '", "%s": "' % next_field['name'])
                        value = field_values_str[field_index +
                                                 len(field_name_key
                                                     ):next_field_index]
                    # clean up the value string, escape the required quoutes and newline characters
                    value = value.replace("'", "\''")\
                                 .replace('"', '\\\\"')\
                                 .replace('\r', "\\\\r")\
                                 .replace('\n', "\\\\n")\
                                 .replace('\t', "\\\\t")
                    # add to the new values dict
                    field_values_dict[field['name']] = value
            # update the db
            json_values = dumps(field_values_dict)
            cmd = "UPDATE form_values SET content='%s' WHERE id=%i" % (
                json_values, int(row['id']))
            migrate_engine.execute(cmd)
            try:
                print("Post replacement: %s" % json_values)
            except Exception:
                pass
    if corrupted_rows:
        print('Fixed %i corrupted rows.' % corrupted_rows)
    else:
        print('No corrupted rows found.')
    except Exception, e:
        log.debug("Creating tool_version table failed: %s" % str(e))
    try:
        ToolVersionAssociation_table.create()
    except Exception, e:
        log.debug("Creating tool_version_association table failed: %s" %
                  str(e))
    # Populate the tool table with tools included in installed tool shed repositories.
    cmd = "SELECT id, metadata FROM tool_shed_repository"
    result = db_session.execute(cmd)
    count = 0
    for row in result:
        if row[1]:
            tool_shed_repository_id = row[0]
            repository_metadata = from_json_string(
                _sniffnfix_pg9_hex(str(row[1])))
            # Create a new row in the tool table for each tool included in repository.  We will NOT
            # handle tool_version_associaions because we do not have the information we need to do so.
            tools = repository_metadata.get('tools', [])
            for tool_dict in tools:
                cmd = "INSERT INTO tool_version VALUES (%s, %s, %s, '%s', %s)" % \
                    ( nextval( 'tool_version' ), localtimestamp(), localtimestamp(), tool_dict[ 'guid' ], tool_shed_repository_id )
                db_session.execute(cmd)
                count += 1
    print "Added %d rows to the new tool_version table." % count
    # Drop the tool_id_guid_map table since the 2 new tables render it unnecessary.
    try:
        ToolIdGuidMap_table.drop()
    except Exception, e:
        log.debug("Dropping tool_id_guid_map table failed: %s" % str(e))
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print __doc__
    metadata.reflect()
    cmd = "SELECT form_values.id as id, form_values.content as field_values, form_definition.fields as fdfields " \
          + " FROM form_definition, form_values " \
          + " WHERE form_values.form_definition_id=form_definition.id " \
          + " ORDER BY form_values.id"
    result = migrate_engine.execute( cmd )
    corrupted_rows = 0
    for row in result:
        # first check if loading the dict from the json succeeds
        # if that fails, it means that the content field is corrupted.
        try:
            field_values_dict = loads( _sniffnfix_pg9_hex( str( row['field_values'] ) ) )
        except Exception:
            corrupted_rows = corrupted_rows + 1
            # content field is corrupted
            fields_list = loads( _sniffnfix_pg9_hex( str( row['fdfields'] ) ) )
            field_values_str = _sniffnfix_pg9_hex( str( row['field_values'] ) )
            try:
                # Encoding errors?  Just to be safe.
                print "Attempting to fix row %s" % row['id']
                print "Prior to replacement: %s" % field_values_str
            except:
                pass
            field_values_dict = {}
            # look for each field name in the values and extract its value (string)
            for index in range( len(fields_list) ):
                field = fields_list[index]
                field_name_key = '"%s": "' % field['name']
                field_index = field_values_str.find( field_name_key )
                if field_index == -1:
                    # if the field name is not present the field values dict then
                    # inform the admin that this form values cannot be fixed
                    print "The 'content' field of row 'id' %i does not have the field '%s' in the 'form_values' table and could not be fixed by this migration script." % ( int( field['id'] ), field['name'] )
                else:
                    # check if this is the last field
                    if index == len( fields_list ) - 1:
                        # since this is the last field, the value string lies between the
                        # field name and the '"}' string at the end, hence len(field_values_str) - 2
                        value = field_values_str[ field_index + len( field_name_key ):len( field_values_str ) - 2 ]
                    else:
                        # if this is not the last field then the value string lies between
                        # this field name and the next field name
                        next_field = fields_list[index + 1]
                        next_field_index = field_values_str.find( '", "%s": "' % next_field['name'] )
                        value = field_values_str[ field_index + len( field_name_key ):next_field_index ]
                    # clean up the value string, escape the required quoutes and newline characters
                    value = value.replace( "'", "\''" )\
                                 .replace( '"', '\\\\"' )\
                                 .replace( '\r', "\\\\r" )\
                                 .replace( '\n', "\\\\n" )\
                                 .replace( '\t', "\\\\t" )
                    # add to the new values dict
                    field_values_dict[ field['name'] ] = value
            # update the db
            json_values = dumps(field_values_dict)
            cmd = "UPDATE form_values SET content='%s' WHERE id=%i" % ( json_values, int( row['id'] ) )
            migrate_engine.execute( cmd )
            try:
                print "Post replacement: %s" % json_values
            except:
                pass
    if corrupted_rows:
        print 'Fixed %i corrupted rows.' % corrupted_rows
    else:
        print 'No corrupted rows found.'
def upgrade(migrate_engine):
    metadata.bind = migrate_engine
    print(__doc__)
    metadata.reflect()
    try:
        Table( "form_definition", metadata, autoload=True )
    except Exception:
        log.exception("Loading 'form_definition' table failed.")
    try:
        Table( "form_values", metadata, autoload=True )
    except Exception:
        log.exception("Loading 'form_values' table failed.")

    def get_value(lst, index):
        try:
            return str(lst[index]).replace("'", "''")
        except IndexError:
            return ''
    # Go through the entire table and add a 'name' attribute for each field
    # in the list of fields for each form definition
    cmd = "SELECT f.id, f.fields FROM form_definition AS f"
    result = migrate_engine.execute( cmd )
    for row in result:
        form_definition_id = row[0]
        fields = str( row[1] )
        if not fields.strip():
            continue
        fields_list = loads( _sniffnfix_pg9_hex( fields ) )
        if len( fields_list ):
            for index, field in enumerate( fields_list ):
                field[ 'name' ] = 'field_%i' % index
                field[ 'helptext' ] = field[ 'helptext' ].replace("'", "''").replace('"', "")
                field[ 'label' ] = field[ 'label' ].replace("'", "''")
            fields_json = dumps( fields_list )
            if migrate_engine.name == 'mysql':
                cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" % ( fields_json, form_definition_id )
            else:
                cmd = "UPDATE form_definition SET fields='%s' WHERE id=%i" % ( fields_json, form_definition_id )
            migrate_engine.execute( cmd )
    # replace the values list in the content field of the form_values table with a name:value dict
    cmd = "SELECT form_values.id, form_values.content, form_definition.fields" \
          " FROM form_values, form_definition" \
          " WHERE form_values.form_definition_id=form_definition.id" \
          " ORDER BY form_values.id ASC"
    result = migrate_engine.execute( cmd )
    for row in result:
        form_values_id = int( row[0] )
        if not str( row[1] ).strip():
            continue
        row1 = str(row[1]).replace('\n', '').replace('\r', '')
        values_list = loads( str( row1 ).strip() )
        if not str( row[2] ).strip():
            continue
        fields_list = loads( str( row[2] ).strip() )
        if fields_list and isinstance(values_list, list):
            values_dict = {}
            for field_index, field in enumerate( fields_list ):
                field_name = field[ 'name' ]
                values_dict[ field_name ] = get_value(values_list, field_index )
            cmd = "UPDATE form_values SET content='%s' WHERE id=%i" % ( dumps( values_dict ), form_values_id )
            migrate_engine.execute( cmd )
Example #10
0
    try:
        ToolVersion_table.create()
    except Exception, e:
        log.debug( "Creating tool_version table failed: %s" % str( e ) )
    try:
        ToolVersionAssociation_table.create()
    except Exception, e:
        log.debug( "Creating tool_version_association table failed: %s" % str( e ) )
    # Populate the tool table with tools included in installed tool shed repositories.
    cmd = "SELECT id, metadata FROM tool_shed_repository"
    result = migrate_engine.execute( cmd )
    count = 0
    for row in result:
        if row[1]:
            tool_shed_repository_id = row[0]
            repository_metadata = loads( _sniffnfix_pg9_hex( str( row[1] ) ) )
            # Create a new row in the tool table for each tool included in repository.  We will NOT
            # handle tool_version_associaions because we do not have the information we need to do so.
            tools = repository_metadata.get( 'tools', [] )
            for tool_dict in tools:
                cmd = "INSERT INTO tool_version VALUES (%s, %s, %s, '%s', %s)" % \
                    ( nextval( migrate_engine, 'tool_version' ), localtimestamp( migrate_engine ), localtimestamp( migrate_engine ), tool_dict[ 'guid' ], tool_shed_repository_id )
                migrate_engine.execute( cmd )
                count += 1
    print "Added %d rows to the new tool_version table." % count
    # Drop the tool_id_guid_map table since the 2 new tables render it unnecessary.
    try:
        ToolIdGuidMap_table.drop()
    except Exception, e:
        log.debug( "Dropping tool_id_guid_map table failed: %s" % str( e ) )
     log.debug( "Loading 'form_values' table failed: %s" % str( e ) )
 def get_value(lst, index):
     try:
         return str(lst[index]).replace("'", "''")
     except IndexError,e:
         return ''
 # Go through the entire table and add a 'name' attribute for each field
 # in the list of fields for each form definition
 cmd = "SELECT f.id, f.fields FROM form_definition AS f"
 result = migrate_engine.execute( cmd )
 for row in result:
     form_definition_id = row[0]
     fields = str( row[1] )
     if not fields.strip():
         continue
     fields_list = loads( _sniffnfix_pg9_hex( fields ) )
     if len( fields_list ):
         for index, field in enumerate( fields_list ):
             field[ 'name' ] = 'field_%i' % index
             field[ 'helptext' ] = field[ 'helptext' ].replace("'", "''").replace('"', "")
             field[ 'label' ] = field[ 'label' ].replace("'", "''")
         fields_json = dumps( fields_list )
         if migrate_engine.name == 'mysql':
             cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" %( fields_json, form_definition_id )
         else:
             cmd = "UPDATE form_definition SET fields='%s' WHERE id=%i" %( fields_json, form_definition_id )
         migrate_engine.execute( cmd )
 # replace the values list in the content field of the form_values table with a name:value dict
 cmd = "SELECT form_values.id, form_values.content, form_definition.fields" \
       " FROM form_values, form_definition" \
       " WHERE form_values.form_definition_id=form_definition.id" \
    def get_value(lst, index):
        try:
            return str(lst[index]).replace("'", "''")
        except IndexError:
            return ''

    # Go through the entire table and add a 'name' attribute for each field
    # in the list of fields for each form definition
    cmd = "SELECT f.id, f.fields FROM form_definition AS f"
    result = migrate_engine.execute(cmd)
    for row in result:
        form_definition_id = row[0]
        fields = str(row[1])
        if not fields.strip():
            continue
        fields_list = loads(_sniffnfix_pg9_hex(fields))
        if len(fields_list):
            for index, field in enumerate(fields_list):
                field['name'] = 'field_%i' % index
                field['helptext'] = field['helptext'].replace("'",
                                                              "''").replace(
                                                                  '"', "")
                field['label'] = field['label'].replace("'", "''")
            fields_json = dumps(fields_list)
            if migrate_engine.name == 'mysql':
                cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" % (
                    fields_json, form_definition_id)
            else:
                cmd = "UPDATE form_definition SET fields='%s' WHERE id=%i" % (
                    fields_json, form_definition_id)
            migrate_engine.execute(cmd)
Example #13
0
     log.debug( "Loading 'form_values' table failed: %s" % str( e ) )
 def get_value(lst, index):
     try:
         return str(lst[index]).replace("'", "''")
     except IndexError,e:
         return ''
 # Go through the entire table and add a 'name' attribute for each field
 # in the list of fields for each form definition
 cmd = "SELECT f.id, f.fields FROM form_definition AS f"
 result = migrate_engine.execute( cmd )
 for row in result:
     form_definition_id = row[0]
     fields = str( row[1] )
     if not fields.strip():
         continue
     fields_list = from_json_string( _sniffnfix_pg9_hex( fields ) )
     if len( fields_list ):
         for index, field in enumerate( fields_list ):
             field[ 'name' ] = 'field_%i' % index
             field[ 'helptext' ] = field[ 'helptext' ].replace("'", "''").replace('"', "")
             field[ 'label' ] = field[ 'label' ].replace("'", "''")
         fields_json = to_json_string( fields_list )
         if migrate_engine.name == 'mysql':
             cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" %( fields_json, form_definition_id )
         else:
             cmd = "UPDATE form_definition SET fields='%s' WHERE id=%i" %( fields_json, form_definition_id )
         migrate_engine.execute( cmd )
 # replace the values list in the content field of the form_values table with a name:value dict
 cmd = "SELECT form_values.id, form_values.content, form_definition.fields" \
       " FROM form_values, form_definition" \
       " WHERE form_values.form_definition_id=form_definition.id" \
    try:
        ToolVersion_table.create()
    except Exception, e:
        log.debug( "Creating tool_version table failed: %s" % str( e ) )
    try:
        ToolVersionAssociation_table.create()
    except Exception, e:
        log.debug( "Creating tool_version_association table failed: %s" % str( e ) )
    # Populate the tool table with tools included in installed tool shed repositories.
    cmd = "SELECT id, metadata FROM tool_shed_repository"
    result = migrate_engine.execute( cmd )
    count = 0
    for row in result:
        if row[1]:
            tool_shed_repository_id = row[0]
            repository_metadata = from_json_string( _sniffnfix_pg9_hex( str( row[1] ) ) )
            # Create a new row in the tool table for each tool included in repository.  We will NOT
            # handle tool_version_associaions because we do not have the information we need to do so.
            tools = repository_metadata.get( 'tools', [] )
            for tool_dict in tools:
                cmd = "INSERT INTO tool_version VALUES (%s, %s, %s, '%s', %s)" % \
                    ( nextval( 'tool_version' ), localtimestamp(), localtimestamp(), tool_dict[ 'guid' ], tool_shed_repository_id )
                migrate_engine.execute( cmd )
                count += 1
    print "Added %d rows to the new tool_version table." % count
    # Drop the tool_id_guid_map table since the 2 new tables render it unnecessary.
    try:
        ToolIdGuidMap_table.drop()
    except Exception, e:
        log.debug( "Dropping tool_id_guid_map table failed: %s" % str( e ) )