Esempio n. 1
0
def setup(opts={}, args=[], conf_path=None):
    conf_root = None
    conf_over = {}

    # Process command line
    if hasattr(opts, 'option'):
        for o in opts.option:
            p = o.split('=')
            if len(p) == 2:
                conf_over[p[0]] = p[1]
    if hasattr(opts, 'days') and opts.days is not None:
        conf_over['days'] = opts.days
    if hasattr(opts, 'formatter') and opts.formatter is not None:
        conf_over['formatter'] = opts.formatter
    if hasattr(opts, 'grabber') and opts.grabber is not None:
        conf_over['grabber'] = opts.grabber
    if hasattr(opts, 'debug') and opts.debug is not None:
        conf_over['debug_level'] = opts.debug
    if hasattr(opts, 'logpath') and opts.logpath is not None:
        conf_over['log_path'] = opts.logpath
    if hasattr(opts, 'syslog') and opts.syslog is not None:
        conf_over['syslog'] = opts.syslog
    if hasattr(opts, 'config') and opts.config is not None:
        conf_path = opts.config
    if hasattr(opts, 'confdir') and opts.confdir is not None:
        conf_root = opts.confdir

    # Defaults
    if conf_root is None:
        conf_root = os.path.expanduser('~/.pyepg')
    if conf_path is None:
        conf_path = os.path.join(conf_root, 'config')
    cache_path = os.path.join(conf_root, 'cache')

    # Load configuration
    conf.init(conf_path, conf_over)

    # Initialise log
    log.init(conf.get('log_path', None), conf.get('syslog', False),
             conf.get('debug_level', -1))

    # Initialise the cache
    cache.init(cache_path)
Esempio n. 2
0
def setup ( opts = {}, args = [], conf_path = None ):
  conf_root = None
  conf_over = {}

  # Process command line
  if hasattr(opts, 'option'):
    for o in opts.option:
      p = o.split('=')
      if len(p) == 2:
        conf_over[p[0]] = p[1]
  if hasattr(opts, 'days') and opts.days is not None:
    conf_over['days'] = opts.days
  if hasattr(opts, 'formatter') and opts.formatter is not None:
    conf_over['formatter'] = opts.formatter
  if hasattr(opts, 'grabber') and opts.grabber is not None:
    conf_over['grabber'] = opts.grabber
  if hasattr(opts, 'debug') and opts.debug is not None:
    conf_over['debug_level'] = opts.debug
  if hasattr(opts, 'logpath') and opts.logpath is not None:
    conf_over['log_path'] = opts.logpath
  if hasattr(opts, 'syslog') and opts.syslog is not None:
    conf_over['syslog'] = opts.syslog
  if hasattr(opts, 'config') and opts.config is not None:
    conf_path = opts.config
  if hasattr(opts, 'confdir') and opts.confdir is not None:
    conf_root = opts.confdir

  # Defaults
  if conf_root is None:
    conf_root = os.path.expanduser('~/.pyepg')
  if conf_path is None:
    conf_path = os.path.join(conf_root, 'config')
  cache_path = os.path.join(conf_root, 'cache')

  # Load configuration
  conf.init(conf_path, conf_over)

  # Initialise log
  log.init(conf.get('log_path', None), conf.get('syslog', False),
           conf.get('debug_level', -1))

  # Initialise the cache
  cache.init(cache_path) 
Esempio n. 3
0
def get_channels(package=None):

    # Get defaults
    if package is None: package = get_package()

    # Map channels
    channels = map(lambda x: Channel(x), conf.get('channel[]', []))
    if package:
        channels = package.channels(channels)

    return channels
Esempio n. 4
0
def get_channels ( package = None ):

  # Get defaults
  if package is None: package = get_package()

  # Map channels
  channels = map(lambda x: Channel(x), conf.get('channel[]', []))
  if package:
    channels = package.channels(channels)

  return channels
Esempio n. 5
0
def configure ():
  print ''
  print 'Atlas Configuration'
  print '-' * 60

  # API key
  apikey = conf.get('atlas_apikey', '')
  print ''
  print 'API Key [%s]: ' % apikey,
  apikey = sys.stdin.readline().strip()
  if apikey: conf.set('atlas_apikey', apikey)

  # Publishers to be used
  p_pubs = [ 'bbc.co.uk', 'five.tv', 'channel4.com', 'itv.com', 'tvblob.com' ]
  s_pubs = [ 'pressassociation.com' ]
  conf.set('atlas_primary_publishers',   conf.get('atlas_primary_publishers', p_pubs))
  conf.set('atlas_secondary_publishers', conf.get('atlas_secondary_publishers', s_pubs))

  # Hidden settings
  conf.set('atlas_channel_chunk', conf.get('atlas_channel_chunk', 32))
  conf.set('atlas_time_chunk',    conf.get('atlas_time_chunk', 86400))
Esempio n. 6
0
def configure():
    print ''
    print 'Atlas Configuration'
    print '-' * 60

    # API key
    apikey = conf.get('atlas_apikey', '')
    print ''
    print 'API Key [%s]: ' % apikey,
    apikey = sys.stdin.readline().strip()
    if apikey: conf.set('atlas_apikey', apikey)

    # Publishers to be used
    p_pubs = ['bbc.co.uk', 'five.tv', 'channel4.com', 'itv.com', 'tvblob.com']
    s_pubs = ['pressassociation.com']
    conf.set('atlas_primary_publishers',
             conf.get('atlas_primary_publishers', p_pubs))
    conf.set('atlas_secondary_publishers',
             conf.get('atlas_secondary_publishers', s_pubs))

    # Hidden settings
    conf.set('atlas_channel_chunk', conf.get('atlas_channel_chunk', 32))
    conf.set('atlas_time_chunk', conf.get('atlas_time_chunk', 86400))
Esempio n. 7
0
def publisher_overlay(a, b, pubs):
    ignore_keys = conf.get('atlas_overlay_ignore',
                           ['uri'])  #, 'transmission_end_time' ])
    pa = a['publisher']['key']
    pb = b['publisher']['key']
    ia = -1
    ib = -1
    try:
        ia = pubs.index(pa)
    except:
        pass
    try:
        ib = pubs.index(pb)
    except:
        pass

    def _overlay(a, b):
        if type(b) == dict:
            for k in b:
                if k not in a:
                    a[k] = b[k]
                elif k not in ignore_keys:
                    a[k] = _overlay(a[k], b[k])
            return a
        elif type(b) == list:
            for i in range(len(b)):
                if i < len(a):
                    a[i] = _overlay(a[i], b[i])
                else:
                    a.append(b[i])
            return a
        else:
            return b

    ret = None
    if ib < ia:
        t = a
        a = b
        b = t
    args = (a['uri'],
            a['broadcasts'][0]['transmission_time'].strftime('%H:%M'),
            a['broadcasts'][0]['transmission_end_time'].strftime('%H:%M'),
            b['uri'],
            b['broadcasts'][0]['transmission_time'].strftime('%H:%M'),
            b['broadcasts'][0]['transmission_end_time'].strftime('%H:%M'))
    log.debug('overlay %s @ %s-%s with %s @ %s-%s' % args, 6)
    ret = _overlay(a, b)
    return ret
Esempio n. 8
0
def grab ( opts, args ):

  # Initialise EPG
  epg = EPG()

  # Get config
  days     = conf.get('days', 7)
  today    = datetime.datetime.today()

  # Get grabber/formatter
  grabber   = get_grabber()
  formatter = get_formatter()

  # Channels
  channels  = get_channels()

  # Get EPG
  log.info('grabbing EPG for %d days' % days)
  grabber.grab(epg, channels, today, today + datetime.timedelta(days=days))

  # Attempt to deal with missing +N channels
  fix_plus_n(epg, channels)

  # Finish the EPG (will tidy it up)
  epg.finish()

  # Output
  formatter.format(epg, sys.stdout)

  # Stats
  log.info('')
  log.info('Statistics:')
  log.info('--------------------------------------')
  log.info('Channel  Count: %d' % len(epg.get_channels()))
  log.info('Brand    Count: %d' % len(epg.get_brands()))
  log.info('Series   Count: %d' % len(epg.get_series()))
  log.info('Episode  Count: %d' % len(epg.get_episodes()))
  log.info('Schedule Count: %d' % epg.get_sched_count())
Esempio n. 9
0
def grab(opts, args):

    # Initialise EPG
    epg = EPG()

    # Get config
    days = conf.get('days', 7)
    today = datetime.datetime.today()

    # Get grabber/formatter
    grabber = get_grabber()
    formatter = get_formatter()

    # Channels
    channels = get_channels()

    # Get EPG
    log.info('grabbing EPG for %d days' % days)
    grabber.grab(epg, channels, today, today + datetime.timedelta(days=days))

    # Attempt to deal with missing +N channels
    fix_plus_n(epg, channels)

    # Finish the EPG (will tidy it up)
    epg.finish()

    # Output
    formatter.format(epg, sys.stdout)

    # Stats
    log.info('')
    log.info('Statistics:')
    log.info('--------------------------------------')
    log.info('Channel  Count: %d' % len(epg.get_channels()))
    log.info('Brand    Count: %d' % len(epg.get_brands()))
    log.info('Series   Count: %d' % len(epg.get_series()))
    log.info('Episode  Count: %d' % len(epg.get_episodes()))
    log.info('Schedule Count: %d' % epg.get_sched_count())
Esempio n. 10
0
def publisher_overlay ( a, b, pubs ):
  ignore_keys = conf.get('atlas_overlay_ignore', [ 'uri' ])#, 'transmission_end_time' ])
  pa   = a['publisher']['key']
  pb   = b['publisher']['key']
  ia   = -1
  ib   = -1
  try:
    ia = pubs.index(pa)
  except: pass
  try:
    ib = pubs.index(pb)
  except: pass
  def _overlay ( a, b ):
    if type(b) == dict:
      for k in b:
        if k not in a:
          a[k] = b[k]
        elif k not in ignore_keys:
          a[k] = _overlay(a[k], b[k])
      return a
    elif type(b) == list:
      for i in range(len(b)):
        if i < len(a):
          a[i] = _overlay(a[i], b[i])
        else:
          a.append(b[i])
      return a
    else:
      return b
  ret = None
  if ib < ia:
    t = a
    a = b
    b = t
  args = (a['uri'], a['broadcasts'][0]['transmission_time'].strftime('%H:%M'), a['broadcasts'][0]['transmission_end_time'].strftime('%H:%M'), b['uri'], b['broadcasts'][0]['transmission_time'].strftime('%H:%M'), b['broadcasts'][0]['transmission_end_time'].strftime('%H:%M'))
  log.debug('overlay %s @ %s-%s with %s @ %s-%s' % args, 6)
  ret = _overlay(a, b)
  return ret
Esempio n. 11
0
def _get_file(name, ttl=None):
    import time
    log.debug('cache: get file %s' % name, 3)
    ok = False
    data = None
    meta = None
    valid = False
    path = CACHE_PATH + os.path.sep + name

    # Default TTL
    if ttl is None: ttl = conf.get('default_cache_ttl', 7 * 86400)

    # Check age
    if os.path.exists(path) and os.path.exists(path + '.meta'):
        log.debug('cache: %s in cache' % name, 4)
        st = os.stat(path)
        meta = eval(open(path + '.meta').read())
        data = open(path).read()

        # OK
        if (st.st_mtime + ttl) > time.time():
            log.debug('cache: %s ttl ok' % name, 4)
            ok = True

        # TTL passed
        else:
            log.debug('cache: %s ttl expired' % name, 4)

        # Validate
        if 'md5' in meta and meta['md5'] == md5(data):
            log.debug('cache: %s md5 ok' % name, 4)
            valid = True
        else:
            log.debug('cache: %s md5 mismatch' % name)

    # Return data
    return (data, meta, ok, valid)
Esempio n. 12
0
def _get_file ( name, ttl = None ):
  import time
  log.debug('cache: get file %s' % name, 3)
  ok    = False
  data  = None
  meta  = None
  valid = False
  path  = CACHE_PATH + os.path.sep + name

  # Default TTL
  if ttl is None: ttl = conf.get('default_cache_ttl', 7*86400)

  # Check age
  if os.path.exists(path) and os.path.exists(path + '.meta'):
    log.debug('cache: %s in cache' % name, 4)
    st   = os.stat(path)
    meta = eval(open(path + '.meta').read())
    data = open(path).read()

    # OK
    if (st.st_mtime + ttl) > time.time():
      log.debug('cache: %s ttl ok' % name, 4)
      ok = True
    
    # TTL passed
    else:
      log.debug('cache: %s ttl expired' % name, 4)

    # Validate
    if 'md5' in meta and meta['md5'] == md5(data):
      log.debug('cache: %s md5 ok' % name, 4)
      valid = True
    else:
      log.debug('cache: %s md5 mismatch' % name)

  # Return data
  return (data, meta, ok, valid)
Esempio n. 13
0
def get_package ():
  ret     = None
  package = conf.get('package', None)
  if package:
    ret = _import('pyepg.package.%s', package)
  return ret
Esempio n. 14
0
def grab(epg, channels, start, stop):
    import multiprocessing as mp

    # Filter the channel list (only include those we have listing for)
    channels = filter_channels(channels)
    days = util.total_seconds(stop - start) / 86400
    channels = sorted(channels, cmp=lambda a, b: cmp(a.number, b.number))
    log.info('atlas - epg grab %d channels for %d days' %
             (len(channels), days))

    # Config
    grab_thread_cnt = conf.get('atlas_grab_threads', 32)
    data_thread_cnt = conf.get('atlas_data_threads', 0)
    if grab_thread_cnt <= 0:
        grab_thread_cnt = len(channels)
    if data_thread_cnt <= 0:
        data_thread_cnt = mp.cpu_count() * 2
    data_thread_cnt = min(data_thread_cnt, len(channels))
    grab_thread_cnt = min(grab_thread_cnt, len(channels))

    # Create input/output queues
    inq = ChannelQueue(channels)
    outq = DataQueue(len(channels))

    # Create grab threads
    grab_threads = []
    for i in range(grab_thread_cnt):
        t = GrabThread(i, inq, outq, start, stop)
        grab_threads.append(t)

    # Create data threads
    data_threads = []
    for i in range(data_thread_cnt):
        t = DataThread(i, outq, epg)
        data_threads.append(t)

    # Start threads
    for t in grab_threads:
        t.start()
    for t in data_threads:
        t.start()

    # Wait for completion (inq first)
    ins = outs = len(channels)
    while True:
        s = inq.remain()
        if s != ins:
            ins = s
            log.info('atlas - grab %3d/%3d channels remain' %
                     (s, len(channels)))
        s = outq.remain()
        if s != outs:
            outs = s
            log.info('atlas - proc %3d/%3d channels remain' %
                     (s, len(channels)))
        if not ins and not outs: break

        # Safety checks
        i = 0
        for t in grab_threads:
            if t.isAlive(): i = i + 1
        if not i and ins:
            log.error('atlas - grab threads have died prematurely')
            break
        i = 0
        for t in data_threads:
            if t.isAlive(): i = i + 1
        if not i and outs:
            log.error('atlas - proc threads have died prematurely')
            break
        time.sleep(1.0)
Esempio n. 15
0
    def run(self):
        conn = None
        log.debug('atlas - grab thread %3d started' % self._idx, 0)

        # Create connection
        import httplib
        retry = conf.get('atlas_conn_retry_limit', 5)
        while not conn and retry:
            try:
                conn = httplib.HTTPConnection(ATLAS_API_HOST)
                log.debug('atlas - grab thread %3d conn created' % self._idx,
                          1)
            except:
                retry = retry - 1
                time.sleep(conf.get('atlas_conn_retry_period', 2.0))
        if not conn:
            log.error('atlas - grab thread %3d failed to connect')
            return

        # Config
        key = conf.get('atlas_apikey', None)
        p_pubs = conf.get('atlas_primary_publishers',\
                          [ 'bbc.co.uk', 'itv.com' 'tvblob.com',\
                            'channel4.com' ])
        s_pubs = conf.get('atlas_secondary_publishers',\
                          [ 'pressassociation.com' ])
        anno   = [ 'broadcasts', 'extended_description', 'series_summary',\
                   'brand_summary', 'people' ]
        tsize = conf.get('atlas_time_chunk', self._stop - self._start)

        # Time
        tm_from = time.mktime(self._start.timetuple())
        tm_to = time.mktime(self._stop.timetuple())

        # URL base
        url = 'schedule.json?'
        url = url + 'annotations=' + ','.join(anno)
        if key: url = url + '&apiKey=' + key

        # Until queue exhausted
        while True:

            # Get next entry
            c = None
            try:
                c = self._inq.get_nowait()
            except Empty:
                break
            log.debug(
                'atlas - grab thread %3d fetch   %s' % (self._idx, c.title), 0)
            sched = []

            # By time
            tf = tm_from
            while tf < tm_to:
                tt = min(tf + tsize, tm_to)
                a  = (time.strftime('%Y-%m-%d %H:%M', time.localtime(tf)),\
                      time.strftime('%Y-%m-%d %H:%M', time.localtime(tt)))
                #log.info('atlas -     period %s to %s' % a)

                # Process each publisher
                pubs = []
                for p in s_pubs:
                    pubs.append(p)
                for p in p_pubs:
                    if p in c.publisher: pubs.append(p)
                log.debug('PUBS: %s' % pubs, 0)
                for p in pubs:
                    #log.info('atlas -       publisher %s' % p)
                    u = url + '&from=%d&to=%d' % (tf, tt)
                    u = u + '&publisher=' + p
                    u = u + '&channel_id=' + c.shortid

                    # Fetch data
                    data = atlas_fetch(u, conn)

                    # Processs
                    if data and 'schedule' in data:
                        for s in data['schedule']:
                            if 'items' in s:
                                sched.extend(s['items'])

                # Update
                tf = tf + tsize

            # Put into the output queue
            log.debug(
                'atlas - grab thread %3d fetched %s' % (self._idx, c.title), 1)
            self._outq.put((c, pubs, sched))
            self._inq.task_done()

        # Done
        if conn: conn.close()
        log.debug('atlas - grab thread %3d complete' % self._idx, 0)
Esempio n. 16
0
def configure ( opts, args, conf_path = None ):

  #
  # Global
  #

  print 'System Configuration'
  print '-' * 60

  # Number of days to grab
  days = conf.get('days', 7)
  while True:
    print 'Days to grab [%d]: ' % days,
    t = sys.stdin.readline().strip()
    if not t: break
    try:
      days = int(t)
      break
    except: pass
  conf.set('days', days)

  # Postcode
  print '\nPostcode (for regional TV) [%s]: ' % conf.get('postcode', ''),
  pc = sys.stdin.readline().strip()
  if pc:
    conf.set('postcode', pc)

  #
  # Grabber
  #

  grabbers = get_grabbers()
  if not grabbers:
    log.error('no grabbers available')
    sys.exit(1)
  options = map(lambda x: x[0], grabbers)
  idx     = get_select('\nSelect grabber:', options)
  grabber = grabbers[idx][1]
  conf.set('grabber', grabbers[idx][0])
  print ''
  print 'Grabber: %s' % grabbers[idx][0]

  #
  # Formatter
  #

  formatters = get_formatters()
  if not formatters:
    log.error('no formatters available')
    sys.exit(1)
  options   = map(lambda x: x[0], formatters)
  idx       = get_select('\nSelect formatter:', options)
  formatter = formatters[idx][1]
  conf.set('formatter', formatters[idx][0])
  print ''
  print 'Formatter: %s' % formatters[idx][0]

  #
  # Grabber/Formatter config
  #

  if hasattr(grabber, 'configure'):
    grabber.configure()
  if hasattr(formatter, 'configure'):
    formatter.configure()

  #
  # Channels
  #
  channels = []

  print ''
  print 'Channel Configuration'
  print '-' * 60

  # Get packages
  packages  = grabber.packages()
  options   = []
  options.extend(['Skip'])
  options.extend(map(lambda x: x.title(), packages))
  idx       = get_select('Select Platform:', options)

  # Platform
  if idx:
    idx      = idx - 1
    package = packages[idx]
    conf.set('package', package.id())

    # Exclusions
    a = None
    while a not in [ 'y', 'n', 'yes', 'no' ]:
      print '\nWould you like to add exclusions (y/n)? ',
      a = sys.stdin.readline().strip().lower()
    
    # Get
    if a in [ 'y', 'yes' ]:
      for c in package.channels():
        a = None
        while a not in [ 'y', 'n', 'yes', 'no' ]:
          print '\n  %s (y/n)? ' % c.title,
          a = sys.stdin.readline().strip().lower()
        if a in [ 'y', 'yes' ]: channels.append(c.title)

    # Store
    channels = []
    for c in package.channels():
      channels.append(c.uri)
    conf.set('channel[]', channels)

  #
  # Output summary and get confirmation
  #

  # TODO
        
  #
  # Save
  #
  conf.save()
Esempio n. 17
0
def get_grabber():
    return _import('pyepg.grabber.%s', conf.get('grabber', 'atlas'))
Esempio n. 18
0
def get_formatter():
  return _import('pyepg.formatter.%s', conf.get('formatter', 'epg'))
Esempio n. 19
0
        continue

      # Regional channel
      if p[7] == '1': 
        regional.append(c)

      # Store
      elif c.extra['stream'][0][0]:
        chns.append(c)

    except Exception, e:
      log.error('failed to process [%s] [e=%s]' % (l, str(e)))

  # Process regional channels
  regions = process_region_data(reg_data)
  pc      = conf.get('postcode', '')
  for c in regional:
    t = find_regional(c, pc, chns, regions)
    if t: 
      c.uri             = t.uri
      c.extra['stream'] = t.extra['stream']
      chns.insert(0, c)

  # Filter duplicates
  ret = []
  for c in chns:
    if c not in ret:
      ret.append(c)

  return ret
Esempio n. 20
0
def configure(opts, args, conf_path=None):

    #
    # Global
    #

    print 'System Configuration'
    print '-' * 60

    # Number of days to grab
    days = conf.get('days', 7)
    while True:
        print 'Days to grab [%d]: ' % days,
        t = sys.stdin.readline().strip()
        if not t: break
        try:
            days = int(t)
            break
        except:
            pass
    conf.set('days', days)

    # Postcode
    print '\nPostcode (for regional TV) [%s]: ' % conf.get('postcode', ''),
    pc = sys.stdin.readline().strip()
    if pc:
        conf.set('postcode', pc)

    #
    # Grabber
    #

    grabbers = get_grabbers()
    if not grabbers:
        log.error('no grabbers available')
        sys.exit(1)
    options = map(lambda x: x[0], grabbers)
    idx = get_select('\nSelect grabber:', options)
    grabber = grabbers[idx][1]
    conf.set('grabber', grabbers[idx][0])
    print ''
    print 'Grabber: %s' % grabbers[idx][0]

    #
    # Formatter
    #

    formatters = get_formatters()
    if not formatters:
        log.error('no formatters available')
        sys.exit(1)
    options = map(lambda x: x[0], formatters)
    idx = get_select('\nSelect formatter:', options)
    formatter = formatters[idx][1]
    conf.set('formatter', formatters[idx][0])
    print ''
    print 'Formatter: %s' % formatters[idx][0]

    #
    # Grabber/Formatter config
    #

    if hasattr(grabber, 'configure'):
        grabber.configure()
    if hasattr(formatter, 'configure'):
        formatter.configure()

    #
    # Channels
    #
    channels = []

    print ''
    print 'Channel Configuration'
    print '-' * 60

    # Get packages
    packages = grabber.packages()
    options = []
    options.extend(['Skip'])
    options.extend(map(lambda x: x.title(), packages))
    idx = get_select('Select Platform:', options)

    # Platform
    if idx:
        idx = idx - 1
        package = packages[idx]
        conf.set('package', package.id())

        # Exclusions
        a = None
        while a not in ['y', 'n', 'yes', 'no']:
            print '\nWould you like to add exclusions (y/n)? ',
            a = sys.stdin.readline().strip().lower()

        # Get
        if a in ['y', 'yes']:
            for c in package.channels():
                a = None
                while a not in ['y', 'n', 'yes', 'no']:
                    print '\n  %s (y/n)? ' % c.title,
                    a = sys.stdin.readline().strip().lower()
                if a in ['y', 'yes']: channels.append(c.title)

        # Store
        channels = []
        for c in package.channels():
            channels.append(c.uri)
        conf.set('channel[]', channels)

    #
    # Output summary and get confirmation
    #

    # TODO

    #
    # Save
    #
    conf.save()
Esempio n. 21
0
def get_grabber():
  return _import('pyepg.grabber.%s', conf.get('grabber', 'atlas'))
Esempio n. 22
0
def get_package():
    ret = None
    package = conf.get('package', None)
    if package:
        ret = _import('pyepg.package.%s', package)
    return ret
Esempio n. 23
0
def get_formatter():
    return _import('pyepg.formatter.%s', conf.get('formatter', 'epg'))
Esempio n. 24
0
  def run ( self ):
    conn = None
    log.debug('atlas - grab thread %3d started' % self._idx, 0)

    # Create connection
    import httplib
    retry = conf.get('atlas_conn_retry_limit', 5)
    while not conn and retry:
      try:
        conn  = httplib.HTTPConnection(ATLAS_API_HOST)
        log.debug('atlas - grab thread %3d conn created' % self._idx, 1)
      except:
        retry = retry - 1
        time.sleep(conf.get('atlas_conn_retry_period', 2.0))
    if not conn:
      log.error('atlas - grab thread %3d failed to connect')
      return

    # Config
    key    = conf.get('atlas_apikey', None)
    p_pubs = conf.get('atlas_primary_publishers',\
                      [ 'bbc.co.uk', 'itv.com' 'tvblob.com',\
                        'channel4.com' ])
    s_pubs = conf.get('atlas_secondary_publishers',\
                      [ 'pressassociation.com' ])
    anno   = [ 'broadcasts', 'extended_description', 'series_summary',\
               'brand_summary', 'people' ]
    tsize  = conf.get('atlas_time_chunk', self._stop - self._start)

    # Time
    tm_from = time.mktime(self._start.timetuple())
    tm_to   = time.mktime(self._stop.timetuple())

    # URL base
    url = 'schedule.json?'
    url = url + 'annotations=' + ','.join(anno)
    if key:  url = url + '&apiKey=' + key

    # Until queue exhausted
    while True:
    
      # Get next entry
      c = None
      try:
        c = self._inq.get_nowait()
      except Empty:
        break
      log.debug('atlas - grab thread %3d fetch   %s' % (self._idx, c.title), 0)
      sched = []

      # By time
      tf = tm_from
      while tf < tm_to:
        tt = min(tf + tsize, tm_to)
        a  = (time.strftime('%Y-%m-%d %H:%M', time.localtime(tf)),\
              time.strftime('%Y-%m-%d %H:%M', time.localtime(tt)))
        #log.info('atlas -     period %s to %s' % a)

        # Process each publisher
        pubs = []
        for p in s_pubs: pubs.append(p)
        for p in p_pubs:
          if p in c.publisher: pubs.append(p)
        log.debug('PUBS: %s' % pubs, 0)
        for p in pubs:
          #log.info('atlas -       publisher %s' % p)
          u = url + '&from=%d&to=%d' % (tf, tt)
          u = u + '&publisher=' + p
          u = u + '&channel_id=' + c.shortid

          # Fetch data
          data  = atlas_fetch(u, conn)

          # Processs
          if data and 'schedule' in data:
            for s in data['schedule']:
              if 'items' in s:
                sched.extend(s['items'])

        # Update
        tf = tf + tsize

      # Put into the output queue
      log.debug('atlas - grab thread %3d fetched %s' % (self._idx, c.title), 1)
      self._outq.put((c, pubs, sched))
      self._inq.task_done()

    # Done
    if conn: conn.close()
    log.debug('atlas - grab thread %3d complete' % self._idx, 0)
Esempio n. 25
0
def grab ( epg, channels, start, stop ):
  import multiprocessing as mp

  # Filter the channel list (only include those we have listing for)
  channels = filter_channels(channels)
  days     = util.total_seconds(stop - start) / 86400
  channels = sorted(channels, cmp=lambda a,b: cmp(a.number,b.number))
  log.info('atlas - epg grab %d channels for %d days' % (len(channels), days))

  # Config
  grab_thread_cnt = conf.get('atlas_grab_threads', 32)
  data_thread_cnt = conf.get('atlas_data_threads', 0)
  if grab_thread_cnt <= 0:
    grab_thread_cnt = len(channels)
  if data_thread_cnt <= 0:
    data_thread_cnt = mp.cpu_count() * 2
  data_thread_cnt = min(data_thread_cnt, len(channels))
  grab_thread_cnt = min(grab_thread_cnt, len(channels))

  # Create input/output queues
  inq  = ChannelQueue(channels)
  outq = DataQueue(len(channels))

  # Create grab threads
  grab_threads = []
  for i in range(grab_thread_cnt):
    t = GrabThread(i, inq, outq, start, stop)
    grab_threads.append(t)

  # Create data threads
  data_threads = []
  for i in range(data_thread_cnt):
    t = DataThread(i, outq, epg)
    data_threads.append(t)

  # Start threads
  for t in grab_threads: t.start()
  for t in data_threads: t.start()

  # Wait for completion (inq first)
  ins = outs = len(channels)
  while True:
    s = inq.remain()
    if s != ins:
      ins = s
      log.info('atlas - grab %3d/%3d channels remain' % (s, len(channels)))
    s = outq.remain()
    if s != outs:
      outs = s
      log.info('atlas - proc %3d/%3d channels remain' % (s, len(channels)))
    if not ins and not outs: break
  
    # Safety checks
    i = 0
    for t in grab_threads:
      if t.isAlive(): i = i + 1
    if not i and ins:
      log.error('atlas - grab threads have died prematurely')
      break
    i = 0
    for t in data_threads:
      if t.isAlive(): i = i + 1
    if not i and outs:
      log.error('atlas - proc threads have died prematurely')
      break
    time.sleep(1.0)
Esempio n. 26
0
def get_data ( name, ttl = None ):
  url = conf.get('data_url', 'http://cloud.github.com/downloads/adamsutton/PyEPG')
  return get_url(url + '/' + name, True, ttl)
Esempio n. 27
0
                continue

            # Regional channel
            if p[7] == '1':
                regional.append(c)

            # Store
            elif c.extra['stream'][0][0]:
                chns.append(c)

        except Exception, e:
            log.error('failed to process [%s] [e=%s]' % (l, str(e)))

    # Process regional channels
    regions = process_region_data(reg_data)
    pc = conf.get('postcode', '')
    for c in regional:
        t = find_regional(c, pc, chns, regions)
        if t:
            c.uri = t.uri
            c.extra['stream'] = t.extra['stream']
            chns.insert(0, c)

    # Filter duplicates
    ret = []
    for c in chns:
        if c not in ret:
            ret.append(c)

    return ret
Esempio n. 28
0
def get_data(name, ttl=None):
    url = conf.get('data_url',
                   'http://cloud.github.com/downloads/adamsutton/PyEPG')
    return get_url(url + '/' + name, True, ttl)