예제 #1
0
 def diff (self, other):
   diffs = []
   types = (set(self.objects) | set(other.objects)) - {Column, Constraint}
   for t in progress_log(types, self.log, lambda cur_t:
                         "Compared {{}} of schema {}.{}".format(
                           self.name.schema, " Comparing {}...".format(
                             _plural_type(cur_t))
                           if cur_t else '')):
     rename = t not in {Sequence, Synonym}
     diffs.extend(self.diff_subobjects(other, lambda o: o.objects.get(t, {}),
                                       rename=rename))
   return diffs
예제 #2
0
 def drop_invalid_objects (self, invalid_objs):
   self.log.info("Invalidating {}...".format(pluralize(len(invalid_objs),
                                                       'out of date object')))
   for obj in progress_log(invalid_objs, self.log,
                           "Purged {} of invalidated objects."):
     if obj.name.schema == self.name.schema:
       referenced_by = {ref.from_ for ref in obj._referenced_by}
       if referenced_by.difference(invalid_objs):
         if self.log.isEnabledFor(logging.DEBUG):
           self.log.debug("{} will revert to deferred. Referenced by [{}]"
                          .format(obj.pretty_name,
                                  ", ".join(ref_obj.pretty_name
                                            for ref_obj in referenced_by)))
         self.make_deferred(obj)
       else:
         self.log.debug("{} can be discarded.".format(obj.pretty_name))
         # no refences go outside the invalidated set
         obj._clear_dependencies()
         self.drop(obj)
예제 #3
0
파일: diff.py 프로젝트: glennimoss/precog
def order_diffs (diffs):
  log = logging.getLogger('precog.diff.order_diffs')

  merged_diffs = {}
  for diff in progress_log(diffs, log, "Merging {} complete."):
    sql = tuple(diff.sql)
    if sql in merged_diffs:
      merged_diffs[sql].add_dependencies(diff._dependencies)
    else:
      merged_diffs[sql] = diff
  diffs = merged_diffs.values()

  # filter diffs to remove object drops that are autodropped by other diffs
  applicable_diffs = set(diffs)
  dropping = {diff.dropping: diff
              for diff in progress_log(diffs, log,
                                       "Gathering DROPs {} complete.")
              if diff.dropping}
  applicable_diffs = set()
  for diff in progress_log(diffs, log, "Filtering DROPs {} complete."):
    if diff.dropping:
      autodroppers = [obj for obj in diff.dropping.dependencies_with(
        Reference.AUTODROP) if obj in dropping]
      if autodroppers:
        if log.isEnabledFor(logging.DEBUG):
          log.debug(
            "Filtering {}: (Depends on {}) autodropped when dropping {}."
            .format(diff.pretty_name,
                    diff.dropping.dependencies_with(Reference.AUTODROP),
                    [a.pretty_name for a in autodroppers]))

        for d in autodroppers:
          # The other diff "produces" this diff
          dropping[d].produces.add(diff)
        continue
    applicable_diffs.add(diff)

  # Filter diffs to remove duplicate creates when a more encompassing statement
  # is creating the same thing as an individual one.
  creates = {}
  unnecessary_creates = set()
  for diff in progress_log(applicable_diffs, log,
                           "Filtering CREATEs {} complete."):
    if diff.priority == Diff.CREATE and diff.produces:
      for product in diff.produces:
        if product in creates:
          other_diff = creates[product]
          if other_diff.produces != diff.produces:
            if other_diff.produces.issuperset(diff.produces):
              if log.isEnabledFor(logging.DEBUG):
                log.debug("Filtering {}, covered by {}"
                          .format(diff, other_diff))
              unnecessary_creates.add(diff)
              break;
            elif other_diff.produces.issubset(diff.produces):
              if log.isEnabledFor(logging.DEBUG):
                log.debug("Filtering {}, covered by {}"
                          .format(other_diff, diff))
              unnecessary_creates.add(other_diff)
          else:
            raise DuplicateCreationError(diff, other_diff)
        creates[product] = diff
  applicable_diffs.difference_update(unnecessary_creates)


  if log.isEnabledFor(logging.DEBUG):
    log.debug("All diffs:\n{}".format(pprint.pformat(
      {diff.pretty_name: {'sql': diff.sql,
                          'dependencies':
                            {dep.pretty_name for dep in diff._dependencies},
                          'produces': {product.pretty_name
                                       for product in diff.produces},
                          'dropping': diff.dropping and
                            diff.dropping.pretty_name,
                          'autodrop chain': diff.dropping and
                            {dep.pretty_name
                                for dep in diff.dropping
                                  .dependencies_with(Reference.AUTODROP)},
                          'created': diff.created
                         }
      for diff in diffs})))

  sort_by = (lambda x: x.priority + (10
                                     if isinstance(x, ErrorCheckingDiff) else 0)
             if isinstance(x, Diff) else 0)
  # edges is dict of obj: [dependencies, ...]
  edges = {}
  # Produced objects of diffs to be sorted
  S = sorted(applicable_diffs, key=sort_by)

  # create edge list
  def add_edge (from_, to):
    if from_ not in edges:
      edges[from_] = set()
    try:
      edges[from_].update(to)
    except TypeError:
      edges[from_].add(to)

  for diff in progress_log(diffs, log, "Edge list {} complete."):
    add_edge(diff, diff.dependencies)
    for product in diff.produces:
      add_edge(product, diff)

  for k,v in edges.items():
    edges[k] = sorted(v, key=sort_by)
  if log.isEnabledFor(logging.DEBUG):
    log.debug("Edge list:\n{}".format(_edge_list(edges)))

  # list of sorted diffs
  L = []
  visited = set()

  indent = ''
  def visit (node, this_visit=()):
    nonlocal indent
    cycle = False
    if node in this_visit:
      cycle = True
    this_visit += (node,)
    if cycle:
      raise DiffCycleError(edges, this_visit)

    if not node in visited:
      visited.add(node)

      # A diff may have been filtered out of diffs but remains for
      applicable = isinstance(node, Diff) and node in applicable_diffs
      if log.isEnabledFor(logging.DEBUG):
        if applicable:
          debugstr = "{}Can I apply {}?"
        else:
          debugstr = "{}Visiting {}"
        log.debug(debugstr.format(indent, node.pretty_name))

      if node in edges:
        for dependent in edges[node]:
          indent += '  '
          visit(dependent, this_visit)
          indent = indent[:-2]

      if applicable:
        if log.isEnabledFor(logging.DEBUG):
          log.debug("{}Apply {}".format(indent, node.pretty_name))
        L.append(node)

  for node in progress_log(S, log, "Ordering {} complete."):
    visit(node)

  return L
예제 #4
0
  def from_db (self):
    owner = self.name.schema

    self.log.info("Fetching schema {}...".format(owner))

    schema = {
      'objects': db.query_all(
        """ SELECT object_name
                 , object_type
                 , last_ddl_time
            FROM dba_objects
            WHERE owner = :o
              AND subobject_name IS NULL
              AND object_type IN ( 'FUNCTION'
                                 , 'INDEX'
                                 , 'PACKAGE'
                                 , 'PACKAGE BODY'
                                 , 'PROCEDURE'
                                 , 'SEQUENCE'
                                 , 'SYNONYM'
                                 , 'TABLE'
                                 , 'TRIGGER'
                                 , 'TYPE'
                                 , 'TYPE BODY'
                              -- , 'VIEW'
                                 )
            UNION ALL
            SELECT constraint_name
                 , 'CONSTRAINT'
                 , last_change
            FROM dba_constraints
            WHERE owner = :o
              -- Ignore constraints on tables in the recyclebin
              AND NOT (LENGTH(table_name) = 30
                   AND table_name LIKE 'BIN$%')
        """, o=owner, oracle_names=['object_name']),
      'columns': db.query_all(
        """ SELECT table_name
                 , COUNT(*) AS num_columns
            FROM dba_tab_cols
            WHERE owner = :o
              -- Ignore columns on tables in the recyclebin
              AND NOT (LENGTH(table_name) = 30
                   AND table_name LIKE 'BIN$%')
            GROUP BY table_name
        """, o=owner, oracle_names=['table_name']),
      'grants': 0,
      # db.query_one(
      # """ SELECT COUNT(*)
      #     FROM (SELECT DISTINCT owner, table_name
      #           FROM dba_tab_privs
      #           WHERE grantee = :o)
      # """, o=owner),
    }

    self.log.debug("Query complete.")
    total_objects = (len(schema['objects']) + sum(table['num_columns'] for table in schema['columns']) +
                     schema['grants'])

    modified_times = {}
    for object in schema['objects']:
      object_name = OracleFQN(owner, object['object_name'])
      object_type = _to_type(object['object_type'], object_name)
      if issubclass(object_type, PlsqlCode):
        object_name = _mangle_plsql_name(object_type, object_name)
        object_type = PlsqlCode
      if object_type not in modified_times:
        modified_times[object_type] = {}
      modified_times[object_type][object_name] = object['last_ddl_time']

    self.log.info("Schema {} has {}.".format(owner, pluralize(total_objects,
                                                                'object')))
    to_refresh = self.read_cache(modified_times)

    if schema['grants']:
      # Refresh all grants, but only if there are actually any grants out there
      to_refresh[Grant] = None


    change_count = 0
    for obj_type, names in to_refresh.items():
      if obj_type is Column:
        for table in schema['columns']:
          if names is None or table['table_name'] in names:
            change_count += table['num_columns']
      elif names is None:
        if obj_type in modified_times:
          change_count += len(modified_times[obj_type])
        elif obj_type is Grant:
          change_count += schema['grants']
      else:
        change_count += len(names)

    if to_refresh:
      def progress_message (o):
        return "Fetched {{}} of schema {}.{}".format(owner,
          " Currently fetching {}...".format(_plural_type(o))
          if o else '')

      actual = 0
      for obj in progress_log((obj for obj_type, names in to_refresh.items()
                               for obj in obj_type.from_db(
                                 self.name.schema, self.database, names)),
                              self.log, progress_message, count=change_count):
        actual += 1
        self.add(obj)
      self.log.info("Fetching schema {} complete.".format(owner))
      self.cache(modified_times)
    else:
      self.log.info('Using cached schema.')