Пример #1
0
   def insert_bulk_byways(qb, bulk_list):

      # BUG nnnn: Branch Merge: When merging two branches, you have to
      # recalculate the whole node_byway table for the branch that's
      # merged to. I'm not sure there's an easier way: since we use stacked
      # branching, we don't want to record a parent branch byway in
      # node_byway unless we edit the byway, since we want the record
      # from the last_merge_rid; then when we merge, all the parent branch
      # byways that were not edited in the leafy branch have to be
      # recalculated (so maybe part of merge is to collect all the stack IDs
      # of things from last_merge_rid that we've updated to the
      # working_rid, and then we can just repair only those).

      # The node_byway table is always the latest revision, so no need for
      # revision_id. And we use the leafy branch ID and not the byway's.

      log.verbose('insert_bulk_byways: node_byway: insert: bulk_list: %s'
                  % (bulk_list,))

      insert_sql = (
         """
         INSERT INTO node_byway
            (branch_id
             , node_stack_id
             , byway_stack_id
             , node_vertex_xy)
         VALUES
            %s
         """ % (','.join(bulk_list),))

      rows = qb.db.sql(insert_sql)
      g.assurt(rows is None)
Пример #2
0
   def attempt_save(self, db, *args, **kwargs):

      g.assurt(id(db) == id(self.req.db))

      # No need to lock tables, as we only INSERT. The worst that'll happen is
      # multiple, duplicate rows, since route_feedback is primary keyed by an
      # id sequence.
      self.req.db.transaction_begin_rw()

      #if self.req.client.username == conf.anonymous_username:
      #   username = req.client.ip_addr
      #else:
      #   username = self.req.client.username
      #
      # VERIFY: [lb:] You must be be logged in to provide feedback, right?
      g.assurt(self.req.client.username != conf.anonymous_username)

      username = self.req.client.username

      self.req.db.insert('route_feedback',
                         { 'route_id': self.route_id,
                           'route_version': self.route_version,
                           'username': username },
                         { 'purpose': self.purpose,
                           'satisfaction': self.satisfaction,
                           'comments': self.comments })
      
      self.req.db.transaction_commit()
Пример #3
0
def timestamp_age(db, tmstamp, other=None, calc_secs=False):
   '''Computes the age of a given timestamp against the current time,
      or against another timestamp. PSQL returns, e.g., "-03:22:34.724414",
      "10 years", "8 mons", "1 day", "23:59:00", but pyscopg2 converts it
      to a timedelta. Note the odd looking way that negatives are indicated:
         >>> datetime.timedelta(microseconds=-1)
         datetime.timedelta(-1, 86399, 999999)
      And here's an example of an event happening in 1 minute:
         >>> datetime.timedelta(minutes=1)
         datetime.timedelta(0, 60)
      If calc_secs is True, we'll figure out the number of seconds elapsed
      instead of fetching a timedelta.
       '''
   # NOTE: If you've started a transation session with the db connection,
   #       CURRENT_TIME is static and reflects the time when the transaction
   #       was started. That is, you can call CURRENT_TIME over and over again
   #       and it'll always return the same value.
   if not other:
      other = "CURRENT_TIMESTAMP"
   else:
      other = "'%s'::TIMESTAMP WITH TIME ZONE" % (other,)
   if not calc_secs:
      sql_tstamp_age = (
         "SELECT AGE('%s'::TIMESTAMP WITH TIME ZONE, %s) AS age"
         % (tmstamp, other,))
   else:
      sql_tstamp_age = (
         """
         SELECT (EXTRACT(EPOCH FROM '%s'::TIMESTAMP WITH TIME ZONE)
                 - EXTRACT(EPOCH FROM %s)) AS age
         """ % (tmstamp, other,))
   rows = db.sql(sql_tstamp_age)
   g.assurt(len(rows) == 1)
   ts_age = rows[0]['age']
   return ts_age
Пример #4
0
   def search_for_orphan_query(self, qb):
      '''Returns all attachments that aren't marked as deleted but don't have 
         any non-deleted link_values'''
      # See also the note in link_value.search_for_orphan_query. 

      g.assurt(False) # not tested

      # FIXME: remove this:
      #cols_item_versioned = ','.join([("iv.%s" % (attr_defn[0],)) 
      #                     for attr_defn in item_versioned.One.local_defns])

      sql = (
         """
         SELECT 
            iv.stack_id
         FROM 
            item_versioned AS iv
         JOIN
            %s AS at_child
               USING (system_id)
         WHERE
            NOT iv.deleted
            AND iv.valid_until_rid = %d
            AND NOT EXISTS (
               SELECT lv.stack_id 
               FROM link_value AS lv
               JOIN item_versioned AS iv_2
                  USING (system_id)
               WHERE lv.lhs_stack_id = iv.stack_id
                     AND iv_2.valid_until_rid = %d
                     AND NOT iv_2.deleted)
         """ % (self.one_class.item_type_table,
                conf.rid_inf,
                conf.rid_inf))
      self.sql_search(qb, sql)
Пример #5
0
   def validize(self, qb, is_new_item, dirty_reason, ref_item):

      g.assurt(not self.valid)

      # Note the clobbery of dirty, which is okay: this is the first time we'll
      # set dirty on an item. After this, callers will use dirty_reason_add so
      # that the dirty reasons are |ored together.
      g.assurt(self.dirty == One.dirty_reason_none)
      # This is now okay: g.assurt(dirty_reason != One.dirty_reason_none)
      #self.dirty = dirty_reason
      self.dirty_reason_add(dirty_reason)

      # NOTE: Python's id() returns an object's ID... akin to &object in C.
      if (not is_new_item) and (ref_item is not None):
         # This item exists. For values the client does not indicate in GWIS, 
         # copy the value from the database.
         comparable = self.validize_consume_item(ref_item)
         #if comparable:
         #   #log.debug('validize: user item comparable to db item')
         #   # 2011.08.18: Curious if this'll happen much
         #   # FIXME: validize_compare ignores self.groups_access
         #   # NOTE: mark_deleted comes through here.
         #   # NOTE: MetC import comes through here.
         #   log.debug('validize: user item comparable to db item')

      # else, is_new_item, meaning (a) this item was not found in the 
      #       database, or (b) it was found in the database, and the user
      self.valid = True
Пример #6
0
 def save_core(self, qb):
    work_item.One.save_core(self, qb)
    if self.fresh:
       g.assurt(self.version == 1)
       g.assurt(not self.deleted)
       # Save to the 'merge_job' table.
       self.save_insert(qb, One.item_type_table, One.psql_defns)
Пример #7
0
   def add_members_to_group(self, basemap_qb, common_row,
                                  group_sid, usernames):

      log.debug('add_members_to_group: group_sid: %d.' % (group_sid,))

      grp_mmbs = group_membership.Many()
      grp_mmbs.search_by_group_id(basemap_qb, group_sid)

      group_uids = {}
      for gm in grp_mmbs:
         group_uids[gm.user_id] = gm

      for uname in usernames:
         try:
            user_id = User.user_id_from_username(basemap_qb.db, uname)
         except GWIS_Warning, e:
            user_id = None
            log.warning('add_members_to_group: no such user: %s' % (uname,))
         if user_id:
            if not (user_id in group_uids):
               common_row.update({
                  'stack_id'  : basemap_qb.item_mgr.get_next_client_id(),
                  'group_id'  : group_sid,
                  'user_id'   : user_id,
                  'username'  : uname,
                  })
               new_mmbrship = group_membership.One(qb=basemap_qb,
                                                   row=common_row)
               self.add_members_save_mmbrship(basemap_qb, new_mmbrship)
            else:
               existing_gm = group_uids[user_id]
               g.assurt(existing_gm.access_level_id == Access_Level.editor)
               log.info('add_members: user already member: %s in %s'
                        % (existing_gm.username, existing_gm.group_name,))
Пример #8
0
 def __init__(self, qb=None, row=None, req=None, copy_from=None):
    g.assurt((row is None) or ('is_public_basemap' not in row))
    g.assurt(copy_from is None) # Not supported for this class.
    #self.geofeature_layer_id = Geofeature_Layer.Default
    item_user_watching.One.__init__(self, qb, row, req, copy_from)
    if (qb is not None) and (self.stack_id == Many.baseline_id(qb.db)):
       self.is_public_basemap = True
Пример #9
0
   def user_validate(self, variant=None):
      '''
      Check the username and password/token included in the GWIS request.

         - If not provided          set self.username to anonymous username
         - If provided and valid    set self.username to validated username
         - If provided and invalid  raise GWIS_Error.

      '''

      log.verbose1('user_validate: variant: %s' % (str(variant),))
      user = None
      if self.req.doc_in is not None:
         user = self.req.doc_in.find('metadata/user')
      if user is None:
         # No auth data; set username to the anonymous user
         log.info('user_validate: anon: %s / %s'
                  % (conf.anonymous_username,
                     self.str_remote_ip_and_host_and_local_host(),))
         self.username = conf.anonymous_username
         self.user_group_id = User.private_group_id(self.req.db,
                                                    conf.anonymous_username)
         g.assurt(self.user_group_id > 0)
      else:
         # Parse and validate the username and credentials; raises on error.
         self.user_validate_parse(user, variant)
      if self.username is not None:
         # Check user's access to branch. Raises GWIS_Error if access denied.
         self.req.branch.branch_hier_enforce()
Пример #10
0
 def branch_id_from_branch_name(db, branch_name):
    # This script only finds current branches that have not been deleted.
    branch_id = None
    try:
       # FIXME: Instead of =, use LIKE %% so we can loosely match?
       rows = db.sql(
          """
          SELECT
             DISTINCT(br.stack_id)
          FROM
             branch AS br
          JOIN
             item_versioned AS br_iv
                USING (system_id)
          WHERE
             br_iv.name = %s
             AND NOT br_iv.deleted
             AND br_iv.valid_until_rid = %s
          """, (branch_name,
                conf.rid_inf,))
       if not rows:
          raise GWIS_Error('Branch "%s" is not recognized.' % (branch_name,))
       elif len(rows) != 1:
          raise GWIS_Error('Branch named "%s" not unique.' % (branch_name,))
       else:
          branch_id = int(rows[0]['stack_id'])
    except psycopg2.ProgrammingError, e:
       #raise GWIS_Error('Unanticipated SQL error: "%s" on branch "%s".'
       #                 % (str(e), branch_name,))
       g.assurt(False)
Пример #11
0
   def branch_enforce_permissions(qb, min_access):
      '''
      Check the user's rights to access the branch at the given revision ID.
      Since the user might belong to more than one group, uses min() to get the
      user's greatest level of access.
      '''

      log.verbose('branch_enforce_permissions: br_id: %d / rev: %s / min: %s'
                  % (qb.branch_hier[0][0], qb.revision, min_access,))

      access_level_id = None

      branch_many = Many()

      branch_many.search_by_stack_id(qb.branch_hier[0][0], qb)

      if len(branch_many) > 0:
         g.assurt(len(branch_many) == 1)
         log.verbose('branch_many: %s' % (branch_many,))
         access_level_id = branch_many[0].access_level_id
         log.verbose('access_level_id: %s' % (access_level_id,))

      if access_level_id is None:
         raise GWIS_Error('Insufficient privileges or unknown branch.')

      if (min_access is not None) and (min_access < access_level_id):
         raise GWIS_Error('Insufficient privileges or unknown branch.')
Пример #12
0
 def cp_group_shared_id(db, group_name):
    rows = db.sql(
       """
       SELECT 
          grp.stack_id AS group_id 
       FROM 
          group_ AS grp
       WHERE 
              grp.name = %s
          AND grp.access_scope_id = %s
          AND grp.valid_until_rid = %s
          AND grp.deleted IS FALSE
       """, (group_name,
             Access_Scope.shared,
             conf.rid_inf,))
    if rows:
       if len(rows) != 1:
          log.error('cp_group_shared_id: found %d rows for "%s"'
                    % (len(rows), group_name,))
          g.assurt(False)
       group_id = int(rows[0]['group_id'])
       g.assurt(group_id > 0)
    else:
       group_id = None
    return group_id
Пример #13
0
 def save_core_get_branch_id(self, qb):
    # The branch may or may not already be part of the qb.branch_hier.
    branch_hier_sids = [ x[0] for x in qb.branch_hier ]
    g.assurt((self.fresh) or (self.stack_id in branch_hier_sids))
    g.assurt((not self.fresh) or (not self.stack_id in branch_hier_sids))
    #
    return self.stack_id
Пример #14
0
   def auth_failure(self, username, kind):
      '''
      Responds to an authentication failure.
      '''
      g.assurt(kind in ('password', 'token',))
      # To make the sql calls easier, make a lookup
      args = {
         'username': username,
         'is_password': (kind == 'password'),
         'client_host': self.ip_addr,
         'instance': conf.instance_name,
         }
      # We need a r/w transaction in order to record the failure
      # BUG 2688: Use transaction_retryable?
      self.req.db.transaction_commit()
      self.req.db.transaction_begin_rw()
      # Log the auth failure
      # 2012.06.08: [lb] In CcpV1, I see 24 of these in a row for myself.
      #                  What gives?
      #
      # EXPLAIN: The daily.runic.sh nightly cron will look at user login
      # failures, and it'll complain/email if there are more than a certain
      # amount per day per user.
      # BUG nnnn: Do we need a better mechanism for detecting username attacks?
      #           Have we tested brute-force password attacks?
      #           What about other attacks....?
      self.req.p_notice('auth failed for "%s" (%s)' % (username, kind))
      log.info('auth_failure: username: %s / kind: %s' % (username, kind,))

      self.auth_failure_log_event(args)
      # Check if there have been too many recent failures
      self.auth_failure_check_recent(kind, args)
      # Commit now; we'll raise an exception shortly
      # BUG 2688: Use transaction_retryable?
      self.req.db.transaction_commit()
Пример #15
0
 def cp_group_private_id(db, username):
    rows = db.sql(
       """
       SELECT 
          grp.stack_id AS group_id
       FROM 
          user_ AS usr
       JOIN 
          group_membership AS gmp
             ON gmp.user_id = usr.id
       JOIN 
          group_ AS grp
             ON grp.stack_id = gmp.group_id
       WHERE
              usr.username = %s 
          AND grp.access_scope_id = %s
          AND gmp.access_level_id < %s
          AND gmp.valid_until_rid = %s
          AND gmp.deleted IS FALSE
       """, (username,
             Access_Scope.private,
             Access_Level.denied,
             conf.rid_inf,))
    if rows:
       g.assurt(len(rows) == 1)
       group_id = int(rows[0]['group_id'])
       g.assurt(group_id > 0)
    else:
       group_id = None
    return group_id
Пример #16
0
   def geocode_metrogis(addr):

      g.assurt(False) # Not used; not updated to CcpV2

      gis_ns = "http://www.metrogis.org/geocode"
      gml_ns = "http://www.opengis.net/gml"
      gis_url = "http://geoserver.state.mn.us/geocoder/geocode_response"
      hits_limit = conf.geocode_hit_limit

      # Have to split so that we can separate the address number.
      split_street = addr.street.split()
      split_street_mod = " ".join(split_street[1:len(split_street)])
      # FIXME: Is there a more efficient way to build this long string?
      url2 = (gis_url
              + "?methodName=GeocodeRequest&Version=1.1&CountryCode=US"
              + "&maximumResponses=" + str(hits_limit)
              + "&CompleteAddressNumber=" + urllib.quote(split_street[0])
              + "&CompleteStreetName=" + urllib.quote(split_street_mod)
              + "&PlaceName=" + urllib.quote(addr.city)
              + "&StateName=" + urllib.quote(addr.state)
              + "&ResponseFormat=XML")

      try:
         resp = misc.urllib2_urlopen_readall(url2)
      except Exception, e:
         log.error('Is this an error? %s / %s' % (str(e), resp_f,))
         raise GWIS_Error('Error finding location')
Пример #17
0
def process_check_output(cmd_and_args):
   try:
      log.debug('Running cmd [>=2.7]: %s' % (cmd_and_args,))
      resp = subprocess.check_output(cmd_and_args)
   except AttributeError:
      # < Python 2.7
      cmd = ' '.join(cmd_and_args)
      log.debug('Running cmd [<2.7]: %s' % (cmd,))
      p = subprocess.Popen(cmd,
                           shell=True,
                           # bufsize=bufsize,
                           stdin=subprocess.PIPE,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.STDOUT,
                           close_fds=True)
      (resp_in, resp_out_and_err) = (p.stdin, p.stdout)
      resp = resp_out_and_err.read()
      resp_in.close()
      resp_out_and_err.close()
      p.wait()
      #(sout, serr) = subprocess.Popen(cmd,
      #                                stdout=subprocess.PIPE,
      #                                stderr=subprocess.STDOUT).communicate()
   except Exception, e:
      g.assurt(False)
Пример #18
0
 def sql_inner_where_extra(self, qb, branch_hier, br_allow_deleted, 
                                 min_acl_id, job_classes=None):
    g.assurt(job_classes is None)
    job_classes = [One.job_class,]
    where_extra = merge_job.Many.sql_inner_where_extra(self, qb, 
          branch_hier, br_allow_deleted, min_acl_id, job_classes)
    return where_extra
Пример #19
0
 def __init__(self, qb=None, row=None, req=None, copy_from=None):
    # 
    self.assurt_on_init(qb, row, req, copy_from)
    # 
    self.dirty = One.dirty_reason_none
    # An item object is fresh once we set its stack ID if its version=0 (and
    # the current stack ID is negative, i.e., set to a client ID). 
    # The takeaway: self.fresh is only True during a save sequence, from 
    # stack_id_lookup_cached() through save().
    self.fresh = False
    # An item is valid once we start a save sequence and prepare it for 
    # saving (see prepare_and_save_item() and validize()). This, e.g., 
    # sets an item's valid_start/until_rid, hydrates missings columns, etc.
    self.valid = False
    # The req is our Apache request wrapper. Much of the data can be found
    # in qb, since we don't require a request to make an item (just a qb).
    self.req = req
    # Setup the column members
    g.assurt(not ((row is not None) and (copy_from is not None)))
    for attr_defn in self.attr_defns:
       self.col_to_attr(row, attr_defn, copy_from)
       # FIXME: We should complain about keys in row that we don't recognize.
    # BUG 2641: Is it costly to define these for all items?
    # 
    self.attrs = {}
    self.tagged = set()
Пример #20
0
 def search_get_sql(self, qb):
    g.assurt(not qb.confirm_leafiness)
    branch_hier_limit = qb.branch_hier_limit
    qb.branch_hier_limit = 1
    sql = item_user_watching.Many.search_get_sql(self, qb)
    qb.branch_hier_limit = branch_hier_limit
    return sql
Пример #21
0
 def verify_handler(self):
    ok = Ccp_Script_Args.verify_handler(self)
    if self.cli_opts.recipient_file and self.cli_opts.bug_number:
       log.error(
          'Please specify either --output or --bug-number, not both')
       ok = False
    elif self.cli_opts.bug_number:
       # Use the server name in the path.
       hostname = socket.gethostname() # E.g., 'runic', 'my.home.domain'
       dot_index = hostname.find('.')
       if dot_index > 0:
          hostname = hostname[0:dot_index]
       # else, dot_index is 0 (unexpected, e.g., ".home.domain"?)
       #    or dot_index is -1 (not found)
       self.cli_opts.recipient_file = (
          #'/ccp/bin/ccpdev/private/runic/schema/bug_%s/recipient_file'
          '/ccp/bin/ccpdev/private/%s/schema/bug_%s/recipient_file'
          % (socket.gethostname(), self.cli_opts.bug_number,))
    elif not self.cli_opts.recipient_file:
       log.error('Please specify either --output or --bug-number.')
       ok = False
    num_inputs = 0
    num_inputs += 1 if self.cli_opts.email_addrs else 0
    num_inputs += 1 if self.cli_opts.usernames else 0
    num_inputs += 1 if self.cli_opts.userids else 0
    if num_inputs > 1:
       log.error(
          'Cannot specify more than one of --emails, --usernames, --userids')
       ok = False
    g.assurt(self.cli_opts.split_count >= 1)
    return ok
Пример #22
0
   def __init__(self, copy_this=None, log_freq=1, loop_max=None, callee=None):

      # Copy the copy_this's attrs. We copy all of them now, but some of them
      # we reset following this block.
      if copy_this is not None:
         for mbr in Debug_Progress_Logger.__slots__:
            setattr(self, mbr, getattr(copy_this, mbr))
      else:
         self.log_listen = None
         # The caller can attach a payload. But currently none do.
         self.user_data = None
         self.debug_break_loops = False
         self.debug_break_loop_cnt = 1
         self.debug_break_loop_off = 0

      g.assurt(log_freq >= 1)
      self.log_freq = log_freq
      self.freq_msg = ''
      self.loop_max = loop_max

      self.loops_reset()

      self.log_silently = False

      self.callee = callee

      self.info_print_speed_enable = False
      self.info_print_speed_beging = 1
      self.info_print_speed_during = 10
      self.info_print_speed_finish = 1

      self.runtime_guess = 0
Пример #23
0
   def load_all_link_values__(self, qb, links, lhs, rhs, heavywt):

      userless_qb = qb.get_userless_qb()
      userless_qb.filters.include_item_stack = True

      g.assurt(lhs ^ rhs)
      if lhs:
         links.search_by_stack_id_lhs(self.stack_id, userless_qb)
      elif rhs:
         links.search_by_stack_id_rhs(self.stack_id, userless_qb)

      try:
         if self.link_values is not None:
            log.verbose(
               'load_all_link_values_: overwriting link_values: self: %s'
               % (self,))
      except AttributeError:
         # self.link_values has not been set yet.
         pass

      self.link_values_reset(qb)
      for lval in links:
         lval.groups_access_load_from_db(qb)
         self.wire_lval(qb, lval, heavywt)
      self.lvals_wired_ = True
      self.lvals_inclusive_ = True
Пример #24
0
   def search_center_sql(geom=None, table_name=None):
      g.assurt((geom is not None) or (table_name is not None))
      if table_name:
         geom_col_or_text = '%s.geometry' % (table_name,)
      else:
         # A geom, like '010100002023690000AE47E1FA4E3B1D4184EB516056005341'
         geom_col_or_text = "'%s'::GEOMETRY" % (geom,)

      # BUG nnnn: (Better) MULTIPOLYGON support. For now, using ST_GeometryN,
      # otherwise ST_ExteriorRing fails: 
      # BUG nnnn: Upgrade to PostGIS 2.1.x. (Requires later Psql than on svr.)
      # Also: ST_Line_Locate_Point renamed ST_LineLocatePoint in PostGIS 2.1.0.
      as_center_sql = (
         """  CASE
               WHEN ST_Within(ST_Centroid(%s), %s) THEN
                  ST_AsText(ST_Centroid(%s))
               ELSE
                  ST_AsText(ST_line_interpolate_point(
                              ST_ExteriorRing(ST_GeometryN(%s, 1)),
                              ST_line_locate_point(
                                 ST_ExteriorRing(ST_GeometryN(%s, 1)),
                                 ST_Centroid(%s))))
               END """ % (geom_col_or_text,
                          geom_col_or_text,
                          geom_col_or_text,
                          geom_col_or_text,
                          geom_col_or_text,
                          geom_col_or_text,))
      return as_center_sql
Пример #25
0
def xy_to_xy_line__deprecated(ln_xy, tolerance, pretemper=None):
   '''Convert a list of (x, y) tuples to a list with specified precision.'''

   # This fcn. used to convert to string and then quantize, but
   # there are a couple of problems with that.
   #
   # 1. Consider two points whose precision doesn't match.
   #    The quantize fcn. will round equivalentish values
   #    differently.
   #
   # >>> Decimal('369426.250000').quantize(Decimal('.1'))
   # Decimal('369426.2')
   # >>> Decimal('369426.250000915').quantize(Decimal('.1'))
   # Decimal('369426.3')
   #
   # You might be able to quantize twice as a possible work-around,
   #
   # >>> Decimal('369426.250000915').quantize(Decimal('.01')) \
   #     .quantize(Decimal('.1'))
   # Decimal('369426.2')
   # >>> Decimal('369426.250000915').quantize(Decimal('.01'))
   # Decimal('369426.25')
   #
   # but it's probably better to find a better solution.
   #
   # 2. Converting to string defaults to just six digits of
   #    precision, and if it rounds, and then you round again,
   #    two equivalentish values again won't match.
   #
   # >>> str(372573.945000904)
   # '372573.945001'
   # >>> str(372573.9450000003)
   # '372573.945'
   #
   # >>> Decimal('372573.945001').quantize(Decimal('.01')) \
   #     .quantize(Decimal('.1'))
   # Decimal('372574.0')
   # >>> Decimal('372573.945').quantize(Decimal('.01')) \
   #     .quantize(Decimal('.1'))
   # Decimal('372573.9')
   #
   # You might be able to be more deliberate about the string conversion,
   # 
   # >>> '%.28f' % (372573.945000904,)
   # '372573.9450009039719589054584503174'
   #
   # but again, it's probably better to find a better solution.

   g.assurt(False) # deprecated fcn.

   xy_precise = []
   for xy in ln_xy:
      if pretemper is None:
         pt_xy = (Decimal(str(xy[0])).quantize(tolerance),
                  Decimal(str(xy[1])).quantize(tolerance),)
      else:
         pt_xy = (Decimal(str(xy[0])).quantize(pretemper).quantize(tolerance),
                  Decimal(str(xy[1])).quantize(pretemper).quantize(tolerance),)
      xy_precise.append(pt_xy)
   return xy_precise
Пример #26
0
 def __init__(self, qb=None, row=None, req=None, copy_from=None):
    g.assurt(copy_from is None) # Not supported for this class.
    self.ride = None
    work_item.One.__init__(self, qb, row, req, copy_from)
    self.job_class = 'conflation_job'
    self.job_fcn = (
       'conflation.conflation:Conflation:process_request')
Пример #27
0
   def sql_apply_query_filters(self, qb, where_clause="", conjunction=""):

      g.assurt((not conjunction) or (conjunction == "AND"))

      # 2013.12.23: Moving item_findability-related filtering from route.py,
      #             so every item can use it.
      #
      # MAYBE: Add client support for more than just routes? Maybe, tracks?
      #        Regions? A list of all recently viewed items, of all types?

      if self.sql_enabled_squelch_circuitry():
         if not qb.filters.use_stealth_secret:
            gwis_errs = [] # Ignored, but oh, well.
            # itype is, e.g., Item_Type.ROUTE, or 'route'.
            itype = self.one_class.item_type_id
            if not qb.filters.get_id_count(itype, gwis_errs):
               where_clause, conjunction = self.sql_apply_squelch_filters(qb,
                                                   where_clause, conjunction)
            # else, searching by stack_id, so ignore item_findability.
         # else, using stealth_secret, so ignore item_findability.
      elif (   (qb.filters.findability_recent)
            or (qb.filters.findability_ignore)):
         raise GWIS_Error('Wrong item type for findability_recent')
      # else, item type doesn't squelch, so don't tune squelch filter.

      return item_stack.Many.sql_apply_query_filters(
                  self, qb, where_clause, conjunction)
Пример #28
0
 def item_type_str(self):
    item_type_str = 'N/a'
    if self.item_type_id:
       item_type_str = Item_Type.id_to_str(self.item_type_id)
    else:
       g.assurt(False) # I think all classes set the item_type_id, right?
    return item_type_str
Пример #29
0
 def stats_write_bucket_stat_(self, stat_bucket, threshold_fcn):
    bucket_values = stat_bucket.keys()
    bucket_values.sort()
    last_log = ''
    if threshold_fcn is not None:
       last_log = '-- Repeat of previous for ArcMap cxpx --'
    for bucket_value in bucket_values:
       bucket_payload = stat_bucket[bucket_value]
       payloads = ''
       if isinstance(bucket_payload, int):
          bucket_len = bucket_payload
       else:
          g.assurt(isinstance(bucket_payload, list))
          bucket_len = len(bucket_payload)
          # Include the payloads for outliers.
          if ((threshold_fcn is not None)
              and threshold_fcn(bucket_value, bucket_len)):
             # FIXME: For use in ArcMap, maybe just print() this, and
             # maybe do it after stats so it's not so messy on my screen.
             payloads = (' (%s)'
                         % (','.join([str(x) for x in bucket_payload]),))
       if threshold_fcn is None:
          self.log('  >> [%9d] => used x %9d%s'
                   % (bucket_value, bucket_len, payloads,))
       elif payloads:
          if last_log:
             self.log(last_log)
             last_log = ''
          msg = ('  >> [%9d] => used x %9d%s'
                 % (bucket_value, bucket_len, payloads,))
          print(msg)
          if self.stats_f is not None:
             self.stats_f.write(msg)
             self.stats_f.write('\n')
Пример #30
0
 def append_gml(self, elem, need_digest, new=None, extra_attrs=None, 
                      include_input_only_attrs=False):
    g.assurt(new is None)
    new = etree.Element(One.item_gwis_abbrev)
    return groupy_base.One.append_gml(self, elem, need_digest, new,
                                            extra_attrs,
                                            include_input_only_attrs)
Пример #31
0
def append_LineString(elem, geometry):
   '''Append a GML LineString representation of Geometry to elem. geometry
      must be an SVG path string.'''

   # The geometry is empty (None) if the item is deleted.
   # The geometry is empty ('') if the item is a route.
   if geometry:

      # Postgis has always started svg linestrings with 'M '.
      g.assurt(geometry[:2] == 'M ')
      g.assurt(elem.text is None)
      # In PostGIS >= 1.4, Postgis added the 'L' to conform to svg standards.
      elem.text = geometry[2:].replace('L ', '')
Пример #32
0
 def sql_inner_where_extra(self,
                           qb,
                           branch_hier,
                           br_allow_deleted,
                           min_acl_id,
                           job_classes=None):
     g.assurt(job_classes is None)
     job_classes = [
         One.job_class,
     ]
     where_extra = merge_job.Many.sql_inner_where_extra(
         self, qb, branch_hier, br_allow_deleted, min_acl_id, job_classes)
     return where_extra
Пример #33
0
 def attribute_load(self, ta_def):
     attr = None
     if ta_def.attr_source:
         # See if the attribute already exists.
         internal_name = ta_def.attr_source
         attrs = attribute.Many()
         attrs.search_by_internal_name(internal_name, self.qb_cur)
         if (len(attrs) == 1):
             log.debug('attr_load: %s' % (attrs[0], ))
             attr = attrs[0]
         else:
             g.assurt(len(attrs) == 0)
     return attr
Пример #34
0
 def attrs_to_dict(self, extra_attrs=None):
     one_dict = {}
     # When classes indicate their schema, they also indicate which columns
     # should be sent to the client using the [2] slot. We ignore that for
     # this method.
     g.assurt(False)  # Delete this fcn.?
     attrs = [col_tuple[One.scol_pyname] for col_tuple in self.attr_defns]
     if (extra_attrs is not None):
         attrs = attrs + extra_attrs
     for attr in attrs:
         if (getattr(self, attr) is not None):
             one_dict[attr] = getattr(self, attr)
     return one_dict
Пример #35
0
def svg_line_to_xy(svg):
    '''Convert an SVG line string to a list of (x, y) tuples. For example:
      "M 100 200 L 102 205 Z" -> [(100, 200), (102, 305)]'''
    g.assurt(svg[:2] == 'M ')
    svg = re.sub(r'[MLZ] ?', '', svg)
    coords = svg.split(' ')
    xys = list()
    for i in xrange(len(coords) / 2):
        xys.append((
            float(coords[i * 2]),
            float(coords[i * 2 + 1]),
        ))
    return xys
Пример #36
0
def send(db, touser, toaddr, subject, body):
    '''Send an e-mail to username touser at address toaddr.

      If toaddr is None, look it up in the database db. Otherwise, sent to the
      given toaddr regardless of the address's valididy or whether or not the
      user wants e-mail (use with caution!).'''

    # Bug 2717 - Security Problem: Server Blindly Accepts Usernames to Email
    # FIXME: Can we add extra constraints herein so we don't get hacked and
    #        email 100s of people?

    g.assurt(False)  # Deprecated: See: util_.emailer.Emailer.send_email.

    fromaddr = conf.mail_from_addr
    if (toaddr is None):
        toaddr = addr_get(db, touser)
        if (toaddr is None):
            log.info("can't send e-mail to %s by prefs" % (touser))
            return
    version = VERSION.major

    server_addy = conf.server_name
    contact_addy = conf.mail_from_addr

    msg = ('''\
To: %(toaddr)s
From: %(fromaddr)s
Subject: %(subject)s
X-Cyclopath-Flamingo: Yes
X-Cyclopath-User-To: %(touser)s

%(body)s

Thank you,
Cyclopath

This email was automatically sent to you because you are a Cyclopath user and your user preferences indicate that you would like to receive these types of email. You can change your preferences at http://%(server_addy)s or you can email %(contact_addy)s.
--
Sent by Cyclopath server software version %(version)s
''' % locals())
    # FIXME: See where else the server addy is hard-coded and use conf.server_name
    #        instead. Do the same for [email protected].

    g.assurt(False)  # See: Emailer.send_email. And: mail_ok_addrs: ALL_OKAY
    if ((conf.mail_ok_addrs is None) or (toaddr in conf.mail_ok_addrs)):
        server = smtplib.SMTP(conf.mail_host)
        server.sendmail(fromaddr, toaddr, msg)
        server.quit()
        log.info('e-mail sent: %s/%s %s' % (touser, toaddr, subject))
    else:
        log.warning('e-mail suppressed: %s/%s %s' % (touser, toaddr, subject))
Пример #37
0
    def kick_mr_do(qb):

        # Kick the task daemon.
        try:

            mr_do_port_num_key_name = 'mr_do_port_num_%s' % (
                conf.ccp_dev_path, )
            port_num_sql = (
                "SELECT value FROM key_value_pair WHERE key = '%s'" %
                (mr_do_port_num_key_name, ))
            rows = qb.db.sql(port_num_sql)
            if len(rows) == 1:
                jobs_queue_port = int(rows[0]['value'])
            else:
                g.assurt(len(rows) == 0)
                jobs_queue_port = conf.jobs_queue_port

            log.debug('kick_mr_do_: kicking task daemon on port: %d' %
                      (jobs_queue_port, ))

            # Open connection
            sock = socket.socket()
            sock.connect((
                'localhost',
                jobs_queue_port,
            ))
            sockf = sock.makefile('r+')

            # Write commands
            sockf.write('kick\n')
            sockf.flush()

            # Read XML response
            byte_count_str = sockf.readline().rstrip()
            log.debug('routed_fetch: kicked! byte_count_str: %s' %
                      (byte_count_str, ))
            # FIXME: Check for error?
            #if (byte_count_str == ''):
            #   raise GWIS_Error(Op_Handler.error_message
            #                    % ('No response from', '',))

            # Close connection (must close both to avoid "Connection reset by
            # peer" on server).
            sockf.close()
            sock.close()

        except socket.error, e:
            err_s = 'There was a problem kicking the jobs daemon: %s' % (
                str(e), )
            #raise GWIS_Error(err_s)
            log.warning(err_s)
Пример #38
0
 def from_gml_defn(self, elem, defn, attr_synonyms, req_code, pass_number):
     found_attr = False
     attr_value = None
     for attr_name in attr_synonyms:
         # If required is None or not set, it means we shouldn't expect this
         # attribute. In fact, if it's set, kvetch.
         if req_code is None:
             if pass_number == 1:
                 attr_value = elem.get(attr_name, None)
                 if attr_value is not None:
                     found_attr = True  # A no-op, since we're about to raise...
                     raise GWIS_Error('Illegal input attr: "%s".' %
                                      (attr_name, ))
             # else, pass_number is 2, so we're just checking for missing
             #       mandatory create attrs.
         else:
             # Otherwise, look for the attribute.
             attr_value = elem.get(attr_name, None)
             if pass_number == 1:
                 if attr_value is not None:
                     if defn[One.scol_intype] == bool:
                         attr_value = bool(int(attr_value))
                     else:
                         try:
                             g.assurt(defn[One.scol_intype] is not None)
                             attr_value = defn[One.scol_intype](attr_value)
                         except ValueError:
                             # Is this a programmer error or could malformed XML
                             # cause this? I'm think malformed XML could... so we
                             # should raise an error.
                             raise GWIS_Error(
                                 'Bad Attr Type: attr: %s / value: %s / type: %s'
                                 % (
                                     attr_name,
                                     attr_value,
                                     defn[One.scol_intype],
                                 ))
                     found_attr = True
                 else:
                     # We could leave whatever value is already set, but this
                     # fcn. is destructive (i.e., it's not an update).
                     attr_value = defn[One.scol_dfault]
                 setattr(self, defn[One.scol_pyname], attr_value)
             else:
                 g.assurt(pass_number == 2)
                 # Just see if the attr exists.
                 if attr_value is not None:
                     found_attr = True
             if found_attr:
                 break
     return found_attr
Пример #39
0
   def route_steps_update(self, rid, reverted_revs):
      # fetch route ids and deleted status from rid
      res = self.req.db.sql("""SELECT id, deleted, version FROM route
                               WHERE valid_starting_rid = %d""" % rid)
      rev_last_unreverted = min(reverted_revs) - 1
      
      for route in res:
         route_id = route['id']
         dst_version = route['version']
         
         if (not route['deleted']):
            # get the oldest unreverted version of the route, since that
            # was the source of the this route
            v_rows = self.req.db.sql("""
SELECT version FROM route
WHERE id = %(route_id)d
      AND valid_before_rid > %(rev_last_unreverted)d
      AND valid_starting_rid <= %(rev_last_unreverted)d""" % (locals()))
            src_version = v_rows[0]['version']
         else:
            # route represents the deletion of a route created in the
            # reverted revisions, so use the version just before this route
            src_version = dst_version - 1
            g.assurt(src_version >= 1)

         # copy all route_steps from src_version to dst_version
         route_step_cols = self.req.db.table_columns('route_step')
         route_step_cols.remove('route_version')
         cstr = ','.join(route_step_cols)
         
         self.req.db.sql("""
INSERT INTO route_step (route_version, %(cstr)s)
SELECT %(dst_version)d as route_version,
       %(cstr)s
FROM route_step
WHERE route_id = %(route_id)d
      AND route_version = %(src_version)d""" % (locals()))

# FIXME: This seems very simplistic... just copying rows?
         # Copy all route_stops from src_version to dst_version.
         route_stop_cols = self.req.db.table_columns('route_stop')
         route_stop_cols.remove('route_version')
         cstr = ','.join(route_stop_cols)

         self.req.db.sql("""
INSERT INTO route_stop (route_version, %(cstr)s)
SELECT %(dst_version)d as route_version,
       %(cstr)s
FROM route_stop
WHERE route_id = %(route_id)d
      AND route_version = %(src_version)d""" % (locals()))
Пример #40
0
    def repair_node_ids(qb, route_steps):
        # Fix all beg_node_id and fin_node_id in the sequence of steps to remove
        # any null ids. Null ids are replaced with consecutive negative ids
        # consistent with the ordering of the steps.
        # 2014.04.29: Could someone please explain why this fcn exists:
        #             "Repair" means something broke. What?

        for step_i in xrange(len(route_steps)):
            step = route_steps[step_i]
            if not step.beg_node_id:
                # get a new beg_node_id
                if (step.forward) and (step_i > 0):
                    # a previous step should have a node id already
                    prev = route_steps[step_i - 1]
                    step.beg_node_id = (prev.fin_node_id
                                        if prev.forward else prev.beg_node_id)
                else:
                    # We still didn't find a matching route stop, so assign a new
                    # client ID.
                    step.beg_node_id = qb.item_mgr.get_next_client_id()
            else:
                # push beg_node_id onto previous step to ensure connectivity
                if (step.forward) and (step_i > 0):
                    prev = route_steps[step_i - 1]
                    if (prev.forward and prev.fin_node_id < 0):
                        prev.fin_node_id = step.beg_node_id
                    elif (not prev.forward and prev.beg_node_id < 0):
                        prev.beg_node_id = step.beg_node_id

            if not step.fin_node_id:
                # get a new fin_node_id
                if (not step.forward) and (step_i > 0):
                    # a previous step should have a node id already
                    prev = route_steps[step_i - 1]
                    step.fin_node_id = (prev.fin_node_id
                                        if prev.forward else prev.beg_node_id)
                else:
                    # We didn't find a matching route stop, so just assign an id.
                    step.fin_node_id = qb.item_mgr.get_next_client_id()
            else:
                # push fin_node_id onto previous step to ensure connectivity
                if (not step.forward) and (step_i > 0):
                    prev = route_steps[step_i - 1]
                    if (prev.forward) and (prev.fin_node_id < 0):
                        prev.fin_node_id = step.fin_node_id
                    elif (not prev.forward) and (prev.beg_node_id < 0):
                        prev.beg_node_id = step.fin_node_id

            g.assurt(step.beg_node_id)
            g.assurt(step.fin_node_id)
Пример #41
0
    def resolve_item_sid(qb, item_name_or_id, item_type):

        item_stack_id = None

        if item_name_or_id:

            # For now, we only resolve attributes by their special name.
            # MAYBE: Use stack ID or name to find item of any item type.
            g.assurt(item_type == attribute.One.item_type_table)
            g.assurt(isinstance(item_name_or_id, basestring))

            attrs = attribute.Many()
            attrs.search_by_internal_name(item_name_or_id, qb)
            if (len(attrs) == 1):
                log.verbose('resolve_item_sid: found attribute: %s' %
                            (attrs[0], ))
                attr = attrs[0]
                item_stack_id = attr.stack_id
            else:
                g.assurt(len(attrs) == 0)
                log.error('resolve_item_sid: unknown attribute: %s' %
                          (item_name_or_id, ))
                # We can assert because we're only run via a dev script.
                g.assurt(False)

        return item_stack_id
Пример #42
0
 def __init__(self, qb=None, row=None, req=None, copy_from=None):
    g.assurt(copy_from is None) # Not supported for this class.
    work_item.One.__init__(self, qb, row, req, copy_from)
    self.job_class = 'route_analysis_job'
    self.job_fcn = (
       'route_analysis.route_analysis:Route_Analysis:process_request')
    # If the user passed just one region's name and tag, we need it to be
    # the first one.
    if ((not self.regions_ep_name_1 and not self.regions_ep_tag_1)
        and (self.regions_ep_name_2 or self.regions_ep_tag_2)):
       self.regions_ep_name_1 = self.regions_ep_name_2
       self.regions_ep_name_2 = ''
       self.regions_ep_tag_1 = self.regions_ep_tag_2
       self.regions_ep_tag_2 = ''
Пример #43
0
 def get_zipname(self):
     # This is really hacky. This class, merge_job_download, ultimately
     # derives from work_item... so this class is kind of a work_item type but
     # it's incomplete: it doesn't know anything about merge_job, or
     # merge_import_job, or merge_export_job...
     zipname = ''
     if self.job_class == 'merge_import_job':
         # E.g., = 'Cyclopath-Import'.
         zipname = merge_import_job.One.merge_job_zipname
     else:
         g.assurt(self.job_class == 'merge_export_job')
         # E.g., = 'Cyclopath-Export'.
         zipname = merge_export_job.One.merge_job_zipname
     return zipname
Пример #44
0
   def geocode_microsoft_parse_try(addr):
      '''Try to parse address using the Microsoft MapPoint service.'''

      mp_ns = "http://s.mappoint.net/mappoint-30/"
      mp_url = "http://findv3.staging.mappoint.net/Find-30/FindService.asmx"
      data = (
         '''
         <?xml version="1.0" encoding="UTF-8"?>
         <soap:Envelope
            xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
            xmlns:soapenc="http://schemas.xmlsoap.org/soap/encoding/"
            xmlns:xsd="http://www.w3.org/2001/XMLSchema"
            soap:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/"
            xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/">
            <soap:Header>
               <UserInfoHeader xmlns="%(mp_ns)s">
                  <DefaultDistanceUnit>km</DefaultDistanceUnit>
               </UserInfoHeader>
            </soap:Header>
            <soap:Body>
               <ParseAddress xmlns="%(mp_ns)s">
                  <inputAddress></inputAddress>
                  <countryRegion>United States</countryRegion>
               </ParseAddress>
            </soap:Body>
         </soap:Envelope>
         ''' % locals())

      g.assurt(False) # Not used; not updated to CcpV2

      soap_out = etree.XML(data)

      Geocode.xfind(soap_out, 'inputAddress', mp_ns).text = addr

      req = urllib2.Request(mp_url)
      req.add_data(etree.tostring(soap_out, pretty_print=True))
      req.add_header('Content-Type',
                     'text/xml; charset=utf-8')
      req.add_header('SOAPAction',
                     'http://s.mappoint.net/mappoint-30/ParseAddress')
      auth = urllib2.HTTPDigestAuthHandler()
      auth.add_password('MapPoint',
                        'findv3.staging.mappoint.net',
                        conf.mappoint_user,
                        conf.mappoint_password)
      try:
         urllib2.install_opener(urllib2.build_opener(auth))
         resp = misc.urllib2_urlopen_readall(req)
      except urllib2.HTTPError, e:
         raise GWIS_Error(Geocode.error_msg_template % (str(e),))
Пример #45
0
 def least_of(levels):
     level = Access_Level.invalid
     for walker in levels:
         if (level == Access_Level.invalid):
             g.assurt(Access_Level.is_valid(walker))
             level = walker
         elif (Access_Level.is_same_or_less_privileged(walker, level)):
             level = walker
             if (level == Access_Level.denied):
                 break
     if (level == Access_Level.invalid):
         level = Access_Level.denied
     g.assurt(Access_Level.is_valid(level))
     return level
Пример #46
0
    def gtfs_download(self):

        # E.g., wget -N --directory-prefix=$ccp/var/transit/metc/ \
        #            ftp://gisftp.metc.state.mn.us/google_transit.zip

        time_0 = time.time()

        local_dir = self.dname_gtfsdb
        remote_file = conf.transit_db_source
        g.assurt(remote_file)

        # FIXME: Instead of using wget, use internal Python functions?
        the_cmd = ('wget -N -P %s %s' % (
            local_dir,
            remote_file,
        ))

        log.debug('gtfs_download: downloading: %s' % (the_cmd, ))
        p = subprocess.Popen(
            [the_cmd],
            shell=True,
            # bufsize=bufsize,
            stdin=subprocess.PIPE,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            close_fds=True)
        (sin, sout_err) = (p.stdin, p.stdout)

        self.tfeed_not_retrieved = False
        while not self.tfeed_not_retrieved:
            line = sout_err.readline()
            if not line:
                break
            else:
                line = line.strip()
                #log.debug(line)
                for regex in self.regex_not_retrieved:
                    if regex.search(line):
                        self.tfeed_not_retrieved = True
                        break
                if self.tfeed_not_retrieved:
                    break
        sin.close()
        sout_err.close()
        p.wait()

        log.debug('gtfs_download: %s in: %s' % (
            'downloaded' if not self.tfeed_not_retrieved else 'not retrieved',
            misc.time_format_elapsed(time_0),
        ))
Пример #47
0
 def field_val_setup_all(self, old_byway, context):
     dst_layer = self.target_layers_final[Export_Base.tlyr_export]
     src_feat = None
     just_copy_from = False
     try:
         dst_feat = self.defs.field_val_setup_all(dst_layer,
                                                  old_byway,
                                                  src_feat,
                                                  context,
                                                  just_copy_from,
                                                  bad_geom_okay=True)
     except Feat_Skipped:
         g.assurt(False)
     return dst_feat
Пример #48
0
 def as_sql_where(self, rev, alias):
     'Return an SQL WHERE snippet representing the include and exclude rects.'
     where_clause = ""
     table_prefix = Query_Base.table_name_prefixed(alias)
     if self.include is not None:
         where_clause = self.include.sql_intersect(table_prefix +
                                                   "geometry")
     if self.exclude is not None:
         g.assurt(self.include is not None)
         where_clause = (
             "(%s) AND NOT (%s)" %
             (where_clause,
              self.exclude.sql_intersect(table_prefix + "geometry")))
     return where_clause
Пример #49
0
 def get_system_attr(qb, internal_name):
     the_attr = None
     attrs = Many()
     attrs.search_by_internal_name(internal_name, qb)
     if (len(attrs) == 1):
         log.debug('get_sys_attr: %s' % (attrs[0].friendly_name(), ))
         log.debug('              %s' % (attrs[0].__str_deets__(), ))
         #log.debug('get_system_attr: found attribute: %s' % (attrs[0].name,))
         the_attr = attrs[0]
         g.assurt(Item_Type.ATTRIBUTE == the_attr.item_type_id)
     else:
         g.assurt(len(attrs) == 0)
         # It is expected that callers can handle the attribute not existing.
     return the_attr
Пример #50
0
def timestamp_parse(ts, offset=None):
   '''Parse ts, which is in the form "11/Jan/2009:09:04:12", and return a UNIX
      timestamp (integer seconds since the epoch). Assume that ts is in UTC,
      unless offset is given, in which case use that as the offset from UTC
      (e.g. CST is "-0600").'''
   t = time.mktime(time.strptime(ts, '%d/%b/%Y:%H:%M:%S'))
   if offset is not None:
      m = RE_ts_offset.search(offset)
      if m is None:
         raise Exception('invalid time zone offset %s' % (offset,))
      g.assurt(m.group('minute') == '00') # nonzero minute offsets unsupported
      # Convert the offset to second and apply it in the right direction.
      t += -1 * 3600 * int(m.group('hour'))
   return int(t)
Пример #51
0
   def byways_get_many_by_id(self, fts_lookup, prog_log, processing_fcn):

      # Make a list of IDs to lookup. We can use the SELECT statment's WHERE
      # clause or JOIN clause. The latter is probably preferrable.

      # 2012.07.01: substage_cleanup being called early?
      g.assurt(self.qb_src is not None)

      if Import_Geowiki.filter_using_where_in:
         g.assurt(len(self.qb_src.filters.only_stack_ids) == 0)
      else:
         g.assurt(not self.qb_src.filters.stack_id_table_ref)

      if isinstance(fts_lookup, dict):
         id_list = fts_lookup.keys()
      else:
         g.assurt(isinstance(fts_lookup, list))
         id_list = fts_lookup

      # Trim the stack ID list if we are de-veloping/bugging.
      if prog_log.debug_break_loops:
         max_ids = prog_log.log_freq * prog_log.debug_break_loop_cnt
         beg_i = prog_log.debug_break_loop_off
         fin_i = beg_i + max_ids
         id_list = id_list[beg_i:fin_i]

      if id_list:
         self.byways_get_many_by_id_(id_list, prog_log, processing_fcn)
      else:
         log.info('byways_get_many_by_id: no stack IDs; nothing to load.')
Пример #52
0
    def shapefile_release_targets(self):

        log.info('Saving target shapefile layers...')

        # We could iterate through the individual layers in target_layers_final
        # or we could just tell the data source to save. But because I'm [lb]
        # O.C.D., let's do both.

        g.assurt(self.outp_datasrc is not None)
        g.assurt(self.file_driver is not None)

        layer_nums = xrange(self.outp_datasrc.GetLayerCount())
        for layer_i in layer_nums:
            layer = self.outp_datasrc.GetLayer(layer_i)
            #lname = layer.GetName()
            layer.SyncToDisk()

        # This does the exact same thing.
        for lname, layer in self.target_layers_final.iteritems():
            layer.SyncToDisk()

        self.target_layers_final = {}

        g.assurt(not self.target_layers_temp)
        g.assurt(not self.target_layers_temp_names)

        self.outp_datasrc.SyncToDisk()

        log.debug('shapefile_release_targets: Closing target data source.')

        self.outp_datasrc.Release()
        self.outp_datasrc = None

        # The C OGR supports self.file_driver.Release() but not the Py OGR.
        self.file_driver = None
Пример #53
0
    def attrs_define_string(
        self,
        attr_source='',
        #attr_type=str,
        byway_source='',
        field_target='',
        #field_type=ogr.OFTString,
        field_source='',

        # FIXME: What's the OGR default? I think it's 50.
        field_width=50,
        field_clone=False,
        val_callback=None,
        attr_edit=True,
        deleted_on=None,
        comparable=False,
        stackable=False,
        settable=False,
        cmp_ignore_empties=False,
        by_branch='',
    ):
        # deleted_on = ('', None,) is implied
        g.assurt(field_target)
        if deleted_on is None:
            # Delete attr if string is empty or None.
            deleted_on = (
                '',
                None,
            )
        ta_def = Branch_Defs_Base.TA_Defn(
            attr_source=attr_source,  #
            attr_type=str,  #
            byway_source=byway_source,
            field_target=field_target,
            field_type=ogr.OFTString,  #
            field_source=field_source,
            field_width=field_width,
            field_clone=field_clone,
            val_callback=val_callback,
            attr_edit=attr_edit,
            deleted_on=deleted_on,
            comparable=comparable,
            stackable=stackable,
            settable=settable,
            cmp_ignore_empties=cmp_ignore_empties,
        )
        self.attrs_by_branch[by_branch].append(ta_def)
        self.attrs_metadata.append(ta_def)
        return ta_def
Пример #54
0
    def append_gpx(self, db, elem, step_number):

        # NOTE: we must re-query the geometry so that we can transform it into
        # WGS84 for the gpx spec
        # FIXME: EXPLAIN: Do we care about permissions here?
        # FIXME: If we know we're exporting to GPX, can't we do this on fetch?
        if self.travel_mode == Travel_Mode.bicycle:
            rows = db.sql("""
            SELECT
               ST_AsText(ST_Transform(geometry, %d)) as geometry
            FROM 
               geofeature
            WHERE
               system_id = %d
            """ % (
                conf.srid_latlon,
                self.byway_id,
            ))
        else:
            g.assurt(self.step_number)
            rows = db.sql("""
            SELECT
               ST_AsText(ST_Transform(transit_geometry, %d)) as geometry
            FROM 
               route_step
            WHERE
               route_id = %d 
               AND step_number = %d
            """ % (
                conf.srid_latlon,
                self.route_id,
                self.step_number,
            ))

        wgs_xy = geometry.wkt_line_to_xy(rows[0]['geometry'])
        if not self.forward:
            wgs_xy.reverse()

        for lonlat in wgs_xy:
            # Parsed pair is [longitude, latitude]
            # FIXME: Search this element name. Why 'trk'? Because GPX?
            new = etree.Element('trkpt')
            misc.xa_set(new, 'lat', lonlat[1])
            misc.xa_set(new, 'lon', lonlat[0])

            name = etree.SubElement(new, 'name')
            name.text = self.step_name

            elem.append(new)
Пример #55
0
   def get_style_bike_facility(self, args, extra_parms=''):

      g.assurt(not 'extra_parms' in args)
      args['extra_parms'] = extra_parms

      style_text = (
"""
         STYLE
            COLOR %(dashon_color)s
            WIDTH %(pen_width)s%(extra_parms)s
         END""") % args

      del args['extra_parms']

      return style_text
Пример #56
0
 def attr_defns_assemble(base_class, addlocal_defns):
     add_py_names = [col_tuple[0] for col_tuple in addlocal_defns]
     n_names = len(add_py_names)
     add_py_names = set(add_py_names)
     # Don't let a class use the same name twice in its own local_defns.
     g.assurt(n_names == len(add_py_names))
     # But if a class specifies the same name as a parent class, use the
     # descendant's tuple.
     attr_defns = []
     for col_tuple in base_class.One.attr_defns:
         if col_tuple[0] not in add_py_names:
             attr_defns.append(col_tuple)
     # Finally, add the descendant's tuples.
     attr_defns += addlocal_defns
     return attr_defns
Пример #57
0
    def get_tile_skin(self, skin_name, skin_path):

        source_path = os.path.join(path_to_source_skins, skin_path)

        # This is a little bit C.f. tilecache_update.get_tile_skin.

        module_path = ('skins.%s' % (skin_name, ))
        skin_module = Mod_Loader.load_package_module(module_path)

        tile_skin = skin_module.get_skin()
        g.assurt(tile_skin is not None)

        log.debug('Loaded skin: %s' % (skin_name, ))

        return tile_skin
Пример #58
0
 def job_cleanup(self):
     # If the job failed, it's marked as such, otherwise, it's still marked
     # 'working'.
     if (self.wtem.latest_step.status_text in Job_Status.finished_statuses):
         # If we caught an error, we marked the job failed. If Mr. Do! is being
         # shutdown, we marked 'aborted'. If the user clicked Cancel in the
         # client, we mark 'canceled' (or 'suspended' for Suspend). So we only
         # won't have marked 'complete' if we're here
         g.assurt(self.wtem.latest_step.status_text != 'complete')
         pass  # Already in a final state.
     else:
         log.debug('job_cleanup: marking complete.')
         g.assurt(self.wtem.latest_step.status_text == 'working')
         qb = None
         self.mr_do.stage_create_next(qb, self.wtem, 'complete')
Пример #59
0
 def byway_transition_cost(self, state):
     a = geometry.rotation_ccw(state.prev_edge.dir_exit, self.dir_entry)
     # FIXME: Change routing_penalty_* in CONFIG: was meters, now seconds
     # FIXME: These seem too costly? Maybe don't apply to short byways? Or
     #        devise a mechanism to know if the vertex is really an
     #        intersection, and what kind of traffic controls exist.
     if (a > Payload_Byway.PI_EIGHTH_POSITIVE):
         return 22.2  # secs. # conf.routing_penalty_left # 100 (meters) / 4.5 m/s (10mph)
     elif (a > Payload_Byway.PI_EIGHTH_NEGATIVE):
         return 4.44  # secs. # conf.routing_penalty_straight # 20 (meters)
     else:
         g.assurt(a >= Payload_Byway.PI_NEGATIVE)
         # FIXME: Why is a right more costly than forward? Because you slow
         # down?
         return 8.88  # secs. # conf.routing_penalty_right # 40 (meters)
Пример #60
0
 def save(self, qb, rid):
     attachment.One.save(self, qb, rid)
     # Tell item_mgr we've changed. This might mean we're a new version of
     # an existing item, so the old version is in the lookup, or this might
     # mean we're a new item altogether that doesn't exist in the lookups.
     if qb.item_mgr.loaded_cache:
         g.assurt(qb.item_mgr.cache_attrs is not None)
         qb.item_mgr.cache_attrs[self.stack_id] = self
         qb.item_mgr.cache_attrnames[self.value_internal_name] = self
         qb.item_mgr.attr_and_tag_ids.add(self.stack_id)
     else:
         # 2012.08.14: Remove this warning, maybe; [lb] doesn't think this is
         # an error but he's curious if/when this path happens.
         log.warning(
             'What code path is this? Save but not item_mgr.loaded_cache.')