Exemplo n.º 1
0
  def _comp (self, warnings=False):
    """ A tuple view for hashing, comparing, etc.
    Optional "warnings" whether to warn about underspecification.
    """
    discriminators = self.columns
    uks = sorted(
      {uk for col in self.columns for uk in col._find_unique_constraints()},
      key=lambda o: len(o.columns), reverse=True)

    for uk in uks:
      col_set = set(uk.columns)
      if col_set.issubset(self.columns):
        discriminators = col_set

      # If there's a PK we prefer it over anything else (Including warning
      # about underspecified rows.)
      if uk.is_pk:
        break

    # Warn about not enough columns to uniquely specify this row of data.
    if warnings and discriminators is self.columns and uks:
      try:
        uk = [k for k in uks if k.is_pk][0]
      except IndexError:
        # If no PK, then use the shortest UK
        uk = uks[-1]

      unspec_cols = sorted({col.name.part for col in uk.columns} -
                           {col.name.part for col in self.columns})
      col_list = ''
      if len(unspec_cols) > 1:
        col_list = ', '.join(unspec_cols[:-1]) + ' and '
      col_list += unspec_cols[-1]

      self.log.warning_once('INSERT {} underspecifies the {} key "{}". '
                            'Consider adding {} {} to the INSERT statement.'
                            .format(
                              self.get_location(True),
                              "primary" if uk.is_pk else "unique",
                              uk,
                              pluralize(len(unspec_cols), "column", False),
                              col_list))

    discriminators = {col.name.part.lower() for col in discriminators}

    return tuple(sorted(tup for tup in self.values().items()
                        if tup[0] in discriminators))
Exemplo n.º 2
0
  def read_cache (class_, file_name, default_schema=None):
    cache_log = logging.getLogger('Cache Loader')
    try:
      cache_file_name = _file_cache_file_name(file_name)
      cached_files, database = _read_cache(cache_file_name)

      out_of_date_files = []
      saw_top_file = False
      for file, cached_mtime in cached_files:
        if file == file_name:
          saw_top_file = True
        current_mtime = None
        try:
          current_mtime = os.stat(file).st_mtime
        except OSError:
          pass
        if current_mtime != cached_mtime:
          out_of_date_files.append(file)
      if not saw_top_file:
        cache_log.info('Cache in file "{}" does not correspond with {}. '
                       'Ignoring cache.'
                       .format(cache_file_name, file_name))
        return

      if not database._files.keys() - out_of_date_files:
        # Everything is out of date, so there's no reason to use the cache
        cache_log.info('Entire cache is out of date.')
        return

      database._top_file = file_name
      # reestablish Schema links back to Database
      for schema in database.schemas.values():
        schema.database = database

      database.default_schema = default_schema

      if out_of_date_files:
        database.log.info("Refreshing cache with {}..."
                          .format(pluralize(len(out_of_date_files),
                                                'out of date file')))
        database.refresh_files(out_of_date_files)
      else:
        database.log.info('Using cached definition.')

      return database
    except ValueError:
      pass
Exemplo n.º 3
0
 def drop_invalid_objects (self, invalid_objs):
   self.log.info("Invalidating {}...".format(pluralize(len(invalid_objs),
                                                       'out of date object')))
   for obj in progress_log(invalid_objs, self.log,
                           "Purged {} of invalidated objects."):
     if obj.name.schema == self.name.schema:
       referenced_by = {ref.from_ for ref in obj._referenced_by}
       if referenced_by.difference(invalid_objs):
         if self.log.isEnabledFor(logging.DEBUG):
           self.log.debug("{} will revert to deferred. Referenced by [{}]"
                          .format(obj.pretty_name,
                                  ", ".join(ref_obj.pretty_name
                                            for ref_obj in referenced_by)))
         self.make_deferred(obj)
       else:
         self.log.debug("{} can be discarded.".format(obj.pretty_name))
         # no refences go outside the invalidated set
         obj._clear_dependencies()
         self.drop(obj)
Exemplo n.º 4
0
 def __str__ (self):
   return "{}{} in schema definition".format(super().__str__(),
       pluralize(self.num_errors, 'error'))
Exemplo n.º 5
0
  def from_db (self):
    owner = self.name.schema

    self.log.info("Fetching schema {}...".format(owner))

    schema = {
      'objects': db.query_all(
        """ SELECT object_name
                 , object_type
                 , last_ddl_time
            FROM dba_objects
            WHERE owner = :o
              AND subobject_name IS NULL
              AND object_type IN ( 'FUNCTION'
                                 , 'INDEX'
                                 , 'PACKAGE'
                                 , 'PACKAGE BODY'
                                 , 'PROCEDURE'
                                 , 'SEQUENCE'
                                 , 'SYNONYM'
                                 , 'TABLE'
                                 , 'TRIGGER'
                                 , 'TYPE'
                                 , 'TYPE BODY'
                              -- , 'VIEW'
                                 )
            UNION ALL
            SELECT constraint_name
                 , 'CONSTRAINT'
                 , last_change
            FROM dba_constraints
            WHERE owner = :o
              -- Ignore constraints on tables in the recyclebin
              AND NOT (LENGTH(table_name) = 30
                   AND table_name LIKE 'BIN$%')
        """, o=owner, oracle_names=['object_name']),
      'columns': db.query_all(
        """ SELECT table_name
                 , COUNT(*) AS num_columns
            FROM dba_tab_cols
            WHERE owner = :o
              -- Ignore columns on tables in the recyclebin
              AND NOT (LENGTH(table_name) = 30
                   AND table_name LIKE 'BIN$%')
            GROUP BY table_name
        """, o=owner, oracle_names=['table_name']),
      'grants': 0,
      # db.query_one(
      # """ SELECT COUNT(*)
      #     FROM (SELECT DISTINCT owner, table_name
      #           FROM dba_tab_privs
      #           WHERE grantee = :o)
      # """, o=owner),
    }

    self.log.debug("Query complete.")
    total_objects = (len(schema['objects']) + sum(table['num_columns'] for table in schema['columns']) +
                     schema['grants'])

    modified_times = {}
    for object in schema['objects']:
      object_name = OracleFQN(owner, object['object_name'])
      object_type = _to_type(object['object_type'], object_name)
      if issubclass(object_type, PlsqlCode):
        object_name = _mangle_plsql_name(object_type, object_name)
        object_type = PlsqlCode
      if object_type not in modified_times:
        modified_times[object_type] = {}
      modified_times[object_type][object_name] = object['last_ddl_time']

    self.log.info("Schema {} has {}.".format(owner, pluralize(total_objects,
                                                                'object')))
    to_refresh = self.read_cache(modified_times)

    if schema['grants']:
      # Refresh all grants, but only if there are actually any grants out there
      to_refresh[Grant] = None


    change_count = 0
    for obj_type, names in to_refresh.items():
      if obj_type is Column:
        for table in schema['columns']:
          if names is None or table['table_name'] in names:
            change_count += table['num_columns']
      elif names is None:
        if obj_type in modified_times:
          change_count += len(modified_times[obj_type])
        elif obj_type is Grant:
          change_count += schema['grants']
      else:
        change_count += len(names)

    if to_refresh:
      def progress_message (o):
        return "Fetched {{}} of schema {}.{}".format(owner,
          " Currently fetching {}...".format(_plural_type(o))
          if o else '')

      actual = 0
      for obj in progress_log((obj for obj_type, names in to_refresh.items()
                               for obj in obj_type.from_db(
                                 self.name.schema, self.database, names)),
                              self.log, progress_message, count=change_count):
        actual += 1
        self.add(obj)
      self.log.info("Fetching schema {} complete.".format(owner))
      self.cache(modified_times)
    else:
      self.log.info('Using cached schema.')
Exemplo n.º 6
0
def _plural_type (obj):
  if not isinstance(obj, type):
    obj = type(obj)
  if hasattr(obj, 'namespace'):
    obj = obj.namespace
  return pluralize(2, obj.pretty_type, False)
Exemplo n.º 7
0
def main ():
  parser = HelpyArgparser(description='Generate Oracle migration script.')


  # Positional arguments
  parser.add_argument('connect_string',
      metavar=ConnectionStringAction.metavar, action=ConnectionStringAction,
      help='Oracle connection string')
  parser.add_argument('file', nargs='?', type=argparse.FileType('r'),
      help='SQL*Plus script to parse')

  # Output control options
  output_group = parser.add_mutually_exclusive_group()
  output_group.add_argument('-v', '--verbose', action='append_const', const=True,
      help='Verbose logging. Specify twice for more output.')
  output_group.add_argument('-q', '--quiet', action='store_true',
      help='Suppress output')

  # Configuration options
  parser.add_argument('--list-plsql-only', action='store_true',
                      help='List only the names of the changed PL/SQL objects, '
                      'rather than the full source code.')
  parser.add_argument('--schema',
      help='Schema name for unqualified object names. Defaults to <username>.')
  parser.add_argument('--dump', action='store_true',
      help='Dump specified schema.')
  parser.add_argument('-t', '--table',
                      metavar='TABLE[:COL1,COL2,...]', action='append',
                      help='Specify a table name to dump its data. Repeat as '
                      'needed. Columns can be specified like TABLE:COL1,COL2,...')
  parser.add_argument('-a', '--alias',
                      metavar='FROM_SCHEMA:TO_SCHEMA', action='append',
                      help='Specify a schema name in the definition file(s) that '
                      'you wish to replace with a different schema name for this '
                      'run. Repeat for each alias.')

  # User-input options
  prompt_group = parser.add_mutually_exclusive_group()
  prompt_group.add_argument('-y', '--apply', action='store_true',
    help='Apply all changes without asking.')
  prompt_group.add_argument('-n', '--no-apply', action='store_true',
    help='Do not ask or apply any changes.')

  # Now GO!
  args = parser.parse_args()

  if not args.file and not args.dump:
    parser.error('Provide a file, or --dump')

  # Configure logger
  log_config = {'style': '{', 'format': '{levelname}: {message}'}
  if args.quiet:
    sys.stdout = open(os.devnull, 'w')
    log_config['stream'] = sys.stdout
  elif args.verbose:
    if len(args.verbose) > 1:
      log_config['format'] = '{levelname} {name}: {message}'
      log_config['level'] = logging.DEBUG
    else:
      log_config['level'] = logging.INFO
  else:
    log_config['level'] = logging.WARN
  logging.basicConfig(**log_config)

  # Disable progress_log output when stderr is not interactive.
  precog.util.progress_output_enabled = os.isatty(sys.stderr.fileno())


  # Precog time
  try:
    schema_name = args.username
    if args.schema:
      schema_name = args.schema

    if args.alias:
      for alias in args.alias:
        add_schema_alias(*alias.split(':'))

    if args.file:
      database = Database.from_file(args.file, schema_name)

      diffs = database.diff_to_db(args.connect_string)

      if diffs:
        retry = '.'
        while True:
          changes = sum(1 for diff in diffs if diff.priority)
          change_str = pluralize(changes, 'change')
          print("Found {}{}\n".format(change_str, retry),
                file=sys.stderr)
          print("\n\n".join(diff.formatted(nosnip=args.no_apply, udiff=True,
                                           list_only=args.list_plsql_only)
                            for diff in diffs))
          print()

          if args.no_apply:
            break

          if not args.apply:
            doit = input("Apply {}? [yN] ".format(change_str))
          else:
            doit = 'y'

          errors = 0
          errored_objs = set()
          unapplied_diffs = []
          if 'y' == doit.lower():
            print("Applying {}...".format(change_str), file=sys.stderr)
            for diff in progress_print(diffs, "Applied {} of changes."):
              try:
                if diff.dependencies & errored_objs:
                  raise UnappliedDependencyError(
                    "Unable to apply change due to an error in a dependency\n"
                    "SQL: {}".format(diff.sql))
                diff.apply()
              except PrecogError as e:
                print(e, file=sys.stderr)
                errored_objs.add(diff)
                unapplied_diffs.append(diff)
                if diff.produces:
                  errored_objs.update(diff.produces)
                if diff.priority:
                  errors += 1
            if errors:
              print("\nUnable to apply {}.".format(pluralize(errors, 'change')),
                    file=sys.stderr)
            successes = changes - errors
            print("Successfully applied {}."
                  .format(pluralize(successes, 'change')), file=sys.stderr)
          if not errors:
            # We're done here
            break
          if not successes:
            # Terminate with a nonzero status code
            sys.exit(1)
          # Let's retry those that didn't work
          diffs = unapplied_diffs
          retry = ' that can be retried.'

      else:
        print('Database "{}@{}" is up to date with {}'.format(args.username,
                                                              args.dsn,
                                                              args.file.name),
            file=sys.stderr)

    elif args.dump:
      diffs = Database.dump_schema(args.connect_string, schema_name, args.table)
      print("\n\n".join(str(diff) for diff in diffs))

  except PrecogError as e:
    print(e, file=sys.stderr)
    if args.verbose and len(args.verbose) > 1:
      raise