示例#1
0
文件: kuveja.py 项目: joneskoo/kuveja
class MetadataReader(object):
    """Get metadata from images"""
    def __init__(self, globstring, cache=None):
        super(MetadataReader, self).__init__()
        self.globstring = globstring
        if cache:
            from cache import Cache
            self.cache = Cache(cache)
        else:
            self.cache = None

    def read(self):
        files = glob.glob(self.globstring)
        if self.cache:
            self.cache.update(files, metadata_read)
            metadatas = self.cache.get_metadatas()
        else:
            metadatas = []
            for fname in files:
                timestamp, meta, mtime = metadata_read(fname)
                d = dict(file=os.path.basename(fname),
                            meta=meta,
                            timestamp=unicode(timestamp))
                metadatas.append(d)
        return metadatas
示例#2
0
    def __init__(self, inidir, inifile, amount):
        """Commence download operation.
        
        Arguments
        inidir -- working directory
        inifile -- config file
        amount -- amount of items to download

        """
        print('Download data for display 3...')
        self._data  = []
    
        #Create dummy GUI
        root = tki.Tk()
        settings = Settings3(inidir, inifile)
        dsdblog = InifileDataSourceDescription(sBlog, inidir, inifile)   
    
        itemarg = (dsdblog.cachedir, (settings.previewx, settings.previewy), 
                   (settings.smallpreviewx, settings.smallpreviewy), settings.library, 
                   settings.booksearchprefix, settings.booksearchsuffix)
        cache = Cache(dsdblog.cachedir, BlogspotItemWithIsbn, itemarg)
        harvester = BlogspotHarvester(dsdblog, self._addandcheck, BlogspotItemWithIsbn)
        harvester.itemarg = itemarg
        harvester.newestId = ''
        
        harvester.update(amount)
        cache.updateContents(self._data, harvester.newestId)
        print('Done!')
示例#3
0
 def __init__(self, q, cached=True):
     
     url = "http://api.urbandictionary.com/soap"
     key = "91cf66fb7f14bbf7fb59c7cf5e22155f"
     
     # Live connect for uncached queries 
     # or queries we do not have in cache.
     cache = Cache("urbandictionary", ".pickle")
     if not cached or not cache.exists(q):
         server = soap.SOAPProxy(url)
         definitions = server.lookup(key, q)
         data = []
         for item in definitions:
             ubd = UrbanDictionaryDefinition(
                 item.word, item.url, item.definition, item.example, item.author
             )
             self.append(ubd)
             data.append( [item.word, item.word, item.definition, item.example, item.author] )
         # Cache a pickled version of the response.
         if cached:
             data = pickle.dumps(data)
             cache.write(q, data)
     
     # For cached queries,
     # unpack the pickled version in the cache.
     else:
         definitions = cache.read(q)
         definitions = pickle.loads(definitions)
         for item in definitions:
             ubd = UrbanDictionaryDefinition(
                 item[0], item[1], item[2], item[3], item[4]
             )
             self.append(ubd)
示例#4
0
def main(send=False):
  key = get_value('key')
  html = None
  # get movie info for all categories
  for cat in CATEGORIES:
    td = Tmdb(key, cat)
    movies = td.get_movies(NUM_RES)
    ca = Cache(os.path.basename(cat))
    ca.shelve_results(movies)
    newMovies = ca.shelve_results(movies)
    movieObjects = ca.shelve_get_items(newMovies) # only new ones
    op = Output(movieObjects)
    if html is None:
      html = [op.generate_header()]
    catPrettified = cat.title().replace("_", " ")
    html.append(op.generate_category_title(catPrettified))
    html.append(op.generate_movie_html_div())
  # save html
  f = open(OUTFILE, "w")
  f.write("\n".join(html))
  f.close() 
  # email
  if send:
    subject = "Sharemovi.es / %s movies / week %s" % (", ".join(CATEGORIES), str(THIS_WEEK))
    sender = get_value('sender')
    recipients = load_emails('recipients')
    ma = Mail(sender)
    ma.mail_html(recipients, subject, "\n".join(html))
示例#5
0
def get(url, dest):
    """Get file from <url> and save it to <dest>.

    Tries to retrieve <url> from cache, otherwise stores it in
    cache following retrieval.
    """
    url = urllib.unquote(url)
    if url.endswith("/"):
        raise Error("illegal url - can't get a directory")

    if os.path.isdir(dest):
        dest = os.path.join(dest, os.path.basename(url))
    else:
        if dest.endswith("/"):
            raise Error("no such directory: " + dest)

    if os.path.lexists(dest):
        raise Error("won't overwrite already existing file: " + dest)

    cache = Cache()
    cached_path = cache.retrieve(url, dest)
    if cached_path:
        print "* get: retrieved file from cache"
    else:
        print "* get: retrieving file from network..."
        system("curl -L -f %s -o %s" % (mkarg(url), mkarg(dest)))
        cached_path = cache.store(url, dest)

    return cached_path
示例#6
0
class BaseHandler(tornado.web.RequestHandler):
    def __init__(self, *arg, **arg_key_word):
        super(BaseHandler, self).__init__(*arg, **arg_key_word)
        self.cache = Cache()
        self.m = hashlib.md5()
        self.dbm = DataBaseManager()

    @tornado.gen.coroutine
    def response_as_json(self, res):
        self.set_header("Content-Type", 'application/json; charset="utf-8"')
        self.write(json.dumps(res))
        self.finish()

    def md5_code(self, string):
        self.m.update(string)
        return self.m.hexdigest()

    def set_cache(self, key, value, time_out):
        self.cache.set_cache(
            key=key,
            value=value,
            time_out=time_out
        )

    def get_cache(self, key):
        return self.cache.get_cache(key)

    def clear_cache(self, key):
        self.cache.clear_cache(key)
示例#7
0
文件: track.py 项目: pscn/yampdqmgr
  def __init__(self, db=None, artist=None, album=None, min_cache=100,
      max_cache=1000, commit_after=100):
    """Create a new track factory.
    >>> a = Track(min_cache=1, max_cache=1)
    """
    self.__cache_key = Cache(min_cache, max_cache)
    self.__cache_id = Cache(min_cache, max_cache)
    if db is None:
      db = sqlite3.connect(':memory:')
    self.__db = db
    self.__db.row_factory = dict_factory
    self.__db.isolation_level = 'Immediate'
    self.__cursor = self.__db.cursor()

    if artist is None:
      artist = Artist(db=db, min_cache=min_cache, max_cache=max_cache,
          commit_after=commit_after)
    self.__artist = artist
    if album is None:
      album = Album(db=db, artist=artist, min_cache=min_cache,
          max_cache=max_cache, commit_after=commit_after)
    self.__album = album

    self.__tbl_name = 'track'
    self.__pending_changes = 0
    self.__commit_after = commit_after
    self.__init_db__()
示例#8
0
文件: core.py 项目: bigwhite/buildc
    def cache_remove(cmode):
        if cmode == None:
            Cache.cache_remove()
        else:
            Cache.cache_remove_by_cmode(cmode)

        return True
示例#9
0
def main():
  (opts, args) = cli()
  key = get_value('key')
  td = Tmdb(key, opts.category)
  if opts.listing:
    li = Listing(opts.category)
    movies = li.get_movies()
    prefix = "list_"
    subject = "Week %s: %s" % (THIS_WEEK, li.title)
  else:
    movies = td.get_movies(opts.numres) 
    prefix = ""
    subject = "%s movies - week %s" % (opts.category.title().replace("_", " "), THIS_WEEK)
  ca = Cache(prefix + os.path.basename(opts.category))
  newMovies = ca.shelve_results(movies)
  if opts.listing:
    movieObjects = ca.shelve_get_items(movies) # allow dups
  else:
    movieObjects = ca.shelve_get_items(newMovies) # only new ones
  op = Output(movieObjects)
  html = [op.generate_header()]
  html.append(op.generate_movie_html_div())
  if opts.printres:
    print "\n".join(html)
  if opts.mailres:
    sender = get_value('sender')
    recipients = load_emails('recipients')
    ma = Mail(sender)
    ma.mail_html(recipients, subject, "\n".join(html))
示例#10
0
 def testCacheWithPrefix(self):
     s = Storage({'application': 'admin',
                  'folder': 'applications/admin'})
     cache = Cache(s)
     prefix = cache.with_prefix(cache.ram,'prefix')
     self.assertEqual(prefix('a', lambda: 1, 0), 1)
     self.assertEqual(prefix('a', lambda: 2, 100), 1)
     self.assertEqual(cache.ram('prefixa', lambda: 2, 100), 1)
示例#11
0
class UTKhashmir(khashmir.KhashmirBase):
    _Node = UTNode

    def setup(self, host, port, data_dir, rlcount, checkpoint=True):
        khashmir.KhashmirBase.setup(self, host, port,data_dir, rlcount, checkpoint)
        self.cur_token = self.last_token = sha('')
        self.tcache = Cache()
        self.gen_token(loop=True)
        self.expire_cached_tokens(loop=True)
        
    def expire_cached_tokens(self, loop=False):
        self.tcache.expire(time() - TOKEN_UPDATE_INTERVAL)
        if loop:
            self.rawserver.external_add_task(self.expire_cached_tokens, TOKEN_UPDATE_INTERVAL, (True,))
                                
    def gen_token(self, loop=False):
        self.last_token = self.cur_token
        self.cur_token = sha(newID())
        if loop:
            self.rawserver.external_add_task(self.gen_token, TOKEN_UPDATE_INTERVAL, (True,))

    def get_token(self, host, port):
        x = self.cur_token.copy()
        x.update("%s%s" % (host, port))
        h = x.digest()
        return h

        
    def val_token(self, token, host, port):
        x = self.cur_token.copy()
        x.update("%s%s" % (host, port))
        a = x.digest()
        if token == a:
            return True

        x = self.last_token.copy()
        x.update("%s%s" % (host, port))
        b = x.digest()
        if token == b:
            return True

        return False

    def addContact(self, host, port, callback=None):
        # use dns on host, then call khashmir.addContact
        Thread(target=self._get_host, args=[host, port, callback]).start()

    def _get_host(self, host, port, callback):

        # this exception catch can go away once we actually fix the bug
        try:
            ip = gethostbyname(host)
        except TypeError, e:
            raise TypeError(str(e) + (": host(%s) port(%s)" % (repr(host), repr(port))))
        
        self.rawserver.external_add_task(self._got_host, 0, (ip, port, callback))
示例#12
0
文件: s3iterable.py 项目: cioc/DAL
class S3Iterable(object):
  def __init__(self):
    '''
    Subclasses must handle setting up config including:
    * bucketname
    * parser
    '''
    self.bucketname = None
    self.parser = None
    self.cache = Cache()
    self.iterator = iter
    self.decompress = None

  def subsets(self):
    l = self.cache.s3listcontents(self.bucketname)
    o = []
    for i in l:
      o.append(i.key)
    return o

  def iter(self, subset):
    h = self.cache.directhandle(self.bucketname, subset, decompress=self.decompress)
    for l in self.iterator(h):
      if self.parser is None:
        yield l
      else:
        yield self.parser(l)

  def filter(self, subset, f):
    h = self.cache.directhandle(self.bucketname, subset, decompress=self.decompress)
    for l in self.iterator(h):
      if self.parser is None:
        j = l
      else:
        j = self.parser(l)
      if f(j):
        yield j

  def byid(self, index):
    (subset, i) = index
    h = self.cache.directhandle(self.bucketname, subset, decompress=self.decompress)
    c = 0
    for l in self.iterator(h):
      if c == i:
        if self.parser is None:
          return l
        else:
          return self.parser(l)
      else:
        c += 1
    return None 

  def display(self, items):
    for i in items:
      print i
def export_network(data, cache=None, **kwargs):
    if cache is None:
        from cache import Cache
        cache = Cache()
    #log.debug('CreateNetwork {0}'.format(data))

    # We'll deal with two additional attributes, '_network' and '_uid'.
    # Thoses two attributes allow us to find the network from the value and vice-versa.
    # Note that since the '_uid' refer to the current python context,
    # it's value could be erroned when calling import_network.
    # However the change of collisions are extremely improbable so checking the type of the python variable
    # is sufficient.
    # Please feel free to provide a better design if any if possible.

    # todo: after refactoring, the network cache will be merged with the import cache
    data_id = id(data)
    result = cache.get_network_by_id(data_id)
    if result is not None:
        return result

    # Create network
    # Optimisation: Use existing network if already present in scene
    #if hasattr(data, '_network') and is_valid_PyNode(data._network):
    #    network = data._network
    #else:
    # Automaticly name network whenever possible
    try:
        network_name = data.__getNetworkName__()
    except (AttributeError, TypeError):
        network_name = data.__class__.__name__

    network = pymel.createNode('network', name=network_name)

    # Monkey patch the network in a _network attribute if supported.
    if isinstance(data, object) and not isinstance(data, dict):
        data._network = network

    # Ensure the network have the current python id stored
    if not network.hasAttr('_uid'):
        pymel.addAttr(network, longName='_uid', niceName='_uid', at='long')  # todo: validate attributeType
    # network._uid.set(id(_data))

    # Cache as soon as possible since we'll use recursivity soon.
    cache.set_network_by_id(data_id, network)

    # Convert _pData to basic data dictionary (recursive for now)
    data_dict = core.export_dict(data, recursive=False, cache=cache, **kwargs)
    assert (isinstance(data_dict, dict))

    fnNet = network.__apimfn__()
    for key, val in data_dict.items():
        if _can_export_attr_by_name(key):
            _add_attr(fnNet, key, val, cache=cache)

    return network
示例#14
0
    def test_loadCache(self):
        self.assertTrue( self.g.login(self.username, self.password) )

        c = self.g.loadCache("GC4808G")
        self.assertTrue( isinstance(c, Cache) )
        self.assertEquals( "GC4808G", Cache.__str__(c) )

        # Cache with non-ascii chars
        c = self.g.loadCache("GC4FRG5")
        self.assertTrue( isinstance(c, Cache) )
        self.assertEquals( "GC4FRG5", Cache.__str__(c) )
示例#15
0
def delete(url):
    """Delete <url> from cache"""
    url = urllib.unquote(url)
    if url.endswith("/"):
        raise Error("illegal url - can't delete a directory")

    print "* del: removing file from cache..."
    cache = Cache()
    result = cache.delete(url)

    return result
def import_dict(data, cache=None, **kwargs):
    """
    Rebuild any instance of a python object instance that have been serialized using export_dict.

    Args:
        _data: A dict instance containing only basic data types.
        **kwargs:

    Returns:

    """

    if cache is None:
        from cache import Cache
        cache = Cache()

    #assert (data is not None)
    if isinstance(data, dict) and '_class' in data:
        # Handle Serializable object
        cls_path = data['_class']
        cls_name = cls_path.split('.')[-1]
        cls_module = data.get('_class_module', None)
        #cls_namespace = data.get('_class_namespace')

        # HACK: Previously we were storing the complete class namespace.
        # However this was not very flexible when we played with the class hierarchy.
        # If we find a '_class_module' attribute, it mean we are doing thing the new way.
        # Otherwise we'll let it slip for now.

        if cls_module:
            cls_def = cache.get_class_by_name(cls_name, module_name=cls_module)
        else:
            cls_def = cache.get_class_by_namespace(cls_name)

        if cls_def is None:
            logging.error("Can't create class instance for {0}, did you import to module?".format(cls_path))
            return None

        instance = create_class_instance(cls_def)

        for key, val in data.items():
            if key != '_class':
                instance.__dict__[key] = import_dict(val, cache=cache)
        return instance

    # Handle array
    elif is_data_list(data):
        return [import_dict(v, cache=cache) for v in data]

    # Handle other types of data
    else:
        return data
 def __init__(self, walrusinterface, config):
     self.walrus = walrusinterface
     pollfreq = config.getint('server', 'pollfreq')
     try:
         freq = config.getint('server', 'pollfreq.buckets')
     except ConfigParser.NoOptionError:
         freq = pollfreq
     self.buckets = Cache(freq)
     try:
         freq = config.getint('server', 'pollfreq.objects')
     except ConfigParser.NoOptionError:
         freq = pollfreq
     self.objects = Cache(freq)
 def __init__(self, balanceinterface, config):
     self.bal = balanceinterface
     pollfreq = config.getint('server', 'pollfreq')
     try:
         freq = config.getint('server', 'pollfreq.balancers')
     except ConfigParser.NoOptionError:
         freq = pollfreq
     self.balancers = Cache(freq)
     try:
         freq = config.getint('server', 'pollfreq.elb_instances')
     except ConfigParser.NoOptionError:
         freq = pollfreq
     self.instances = Cache(freq)
示例#19
0
 def test_parse_magnet(self):
     m="magnet:?xt=urn:btih:47DF370B841D1477C160A96E20A887C5C458010C&dn=The+Zero+Theorem+%282013%29+%5B720p%5D&tr=http%3A%2F%2Ftracker.yify-torrents.com%2Fannounce&tr=udp%3A%2F%2Ftracker.openbittorrent.com%3A80&tr=udp%3A%2F%2Ftracker.publicbt.org%3A80&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Fopen.demonii.com%3A1337&tr=udp%3A%2F%2Fp4p.arenabg.ch%3A1337&tr=udp%3A%2F%2Fp4p.arenabg.com%3A1337"
     ih=Cache.hash_from_magnet(m)
     self.assertEqual(len(ih), 40)
     self.assertEqual(ih, "47DF370B841D1477C160A96E20A887C5C458010C" )
     m="magnet:?xt=urn:btih:440008e244e8398522d2271318afc2f938274d56&dn=Alela+Diane+-++The+Pirate%27s+Gospel&tr=udp%3A%2F%2Fopen.demonii.com%3A1337&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Fexodus.desync.com%3A6969"
     ih=Cache.hash_from_magnet(m)
     self.assertEqual(len(ih), 40)
     self.assertEqual(ih, "440008e244e8398522d2271318afc2f938274d56".upper() )
     m='magnet:?xt=urn:btih:VQV6ME7OQAENNYPFHITASWARN6PXEO6I&dn=Game.of.Thrones.S01E05.720p.HDTV.x264-CTU&tr=udp://tracker.openbittorrent.com:80&tr=udp://open.demonii.com:80&tr=udp://tracker.coppersurfer.tk:80&tr=udp://tracker.leechers-paradise.org:6969&tr=udp://exodus.desync.com:6969'
     ih=Cache.hash_from_magnet(m)
     self.assertEqual(len(ih), 40)
     self.assertTrue(re.match('^[0-9A-F]+$',ih))
示例#20
0
    def __init__(self, clcinterface, config):
        self.clc = clcinterface
        pollfreq = config.getint('server', 'pollfreq')
        try:
            freq = config.getint('server', 'pollfreq.zones')
        except ConfigParser.NoOptionError:
            freq = pollfreq
        self.zones = Cache(freq)

        try:
            freq = config.getint('server', 'pollfreq.images')
        except ConfigParser.NoOptionError:
            freq = pollfreq
        self.images = Cache(freq)

        try:
            freq = config.getint('server', 'pollfreq.instances')
        except ConfigParser.NoOptionError:
            freq = pollfreq
        self.instances = Cache(freq)

        try:
            freq = config.getint('server', 'pollfreq.keypairs')
        except ConfigParser.NoOptionError:
            freq = pollfreq
        self.keypairs = Cache(freq)

        try:
            freq = config.getint('server', 'pollfreq.groups')
        except ConfigParser.NoOptionError:
            freq = pollfreq
        self.groups = Cache(freq)

        try:
            freq = config.getint('server', 'pollfreq.addresses')
        except ConfigParser.NoOptionError:
            freq = pollfreq
        self.addresses = Cache(freq)

        try:
            freq = config.getint('server', 'pollfreq.volumes')
        except ConfigParser.NoOptionError:
            freq = pollfreq
        self.volumes = Cache(freq)

        try:
            freq = config.getint('server', 'pollfreq.snapshots')
        except ConfigParser.NoOptionError:
            freq = pollfreq
        self.snapshots = Cache(freq)

        try:
            freq = config.getint('server', 'pollfreq.tags')
        except ConfigParser.NoOptionError:
            freq = pollfreq
        self.tags = Cache(freq)
示例#21
0
def add(src, url):
    """Add file from <src> to the cache using <url> for future retrieval"""
    url = urllib.unquote(url)
    if url.endswith("/"):
        raise Error("illegal url - can't add a directory")

    if not os.path.exists(src):
        raise Error("no such file: " + src)

    print "* add: storing file in cache..."
    cache = Cache()
    cached_path = cache.store(url, src)

    return cached_path
示例#22
0
文件: test.py 项目: adcaes/Wrpp
	def testCache(self, n):
		m = Cache(n)

		# Insert n elements, (0, n]
		for i in range(0, n):
			m.set(i, i+1)
		
		# Retrieve n elements, (0, n]
		for i in range(0, n):
			r = random.randrange(0, n)
			assert r+1 == m.get(r)
		
		# Insert n elements, (n, n+n]
		for i in range(n, n+n):
			m.set(i, i+1)
		
		# Retrieve n elements, (n, n+n]
		for i in range(0, n):
			r = random.randrange(n, n+n)
			assert r+1 == m.get(r)

		# Retrieve n elements, (0, n]
		# Elements should have been deleted from cache
		for i in range(0, n):
			assert m.get(i) == None
示例#23
0
文件: core.py 项目: bigwhite/buildc
    def cache_update(cmode, ignore_error):
        if cmode == None:
            result = Cache.cache_update(Glo.BIT32, ignore_error)
            if result == False:
                return False
            result = Cache.cache_update(Glo.BIT64, ignore_error)
            if result == False:
                return False
        else:
            result = Cache.cache_update(cmode, ignore_error)
            if result == False:
                return False

        return True
示例#24
0
 def geocode(self, localities):
     for loc in localities:
         loc.feature_geocodes = {}
         loc.parts['feature_geocodes'] = {}
         for feature in loc.parts['features']:              
             logging.info('Geocoding feature "%s"' % feature)  
             key = 'geocode-%s' % feature
             geocode = Cache.get(key)
             if not geocode:
                 geocode = self.geocoder.geocode(feature)
                 Cache.put(key, geocode)
             loc.parts['feature_geocodes'][feature] = geocode 
             logging.info('Geocoded feature "%s"' % feature)
     return localities
示例#25
0
 def predict(self, localities):
     """Predict locality type for each locality in a list."""
     for loc in localities:
         logging.info('Predicting locality type for "%s"' % loc.name)
         key = 'loctype-%s' % loc.name
         prediction = Cache.get(key)
         if not prediction:
             loctype, scores = self.predictor.get_type(loc.name)
             prediction = dict(locname=loc.name, loctype=loctype, scores=scores)
             Cache.put(key, prediction)
         loc.type = prediction['loctype']
         loc.type_scores = prediction['scores']
         logging.info('Predicted "%s" for "%s"' % (loc.type, loc.name))
     return localities
示例#26
0
    def start_url(self, uri):
        if self._th:
            raise Exception('Torrent is already started')
        
        def info_from_file(uri):
            if os.access(uri,os.R_OK):
                info = lt.torrent_info(uri)
                tp= {'ti':info} 
                resume_data= self._cache.get_resume(info_hash=str(info.info_hash()))
                if resume_data:
                    tp['resume_data']=resume_data
                return tp
            raise ValueError('Invalid torrent path %s' % uri)
        
        if uri.startswith('http://') or uri.startswith('https://'):
            self._url=uri
            stored=self._cache.get_torrent(url=uri)
            if stored:
                tp=info_from_file(stored)
            else:
                tp={'url':uri}
                resume_data=self._cache.get_resume(url=uri)
                if resume_data:
                    tp['resume_data']=resume_data
        elif uri.startswith('magnet:'):
            self._url=uri
            stored=self._cache.get_torrent(info_hash=Cache.hash_from_magnet(uri))
            if stored:
                tp=info_from_file(stored)
            else:
                tp={'url':uri}
                resume_data=self._cache.get_resume(info_hash=Cache.hash_from_magnet(uri))
                if resume_data:
                    tp['resume_data']=resume_data
        elif os.path.isfile(uri):
            tp=info_from_file(uri)
        else:
            raise ValueError("Invalid torrent %s" %uri)
        
        tp.update(self._torrent_params)
        self._th = self._ses.add_torrent(tp)
        for tr in INITIAL_TRACKERS:
            self._th.add_tracker({'url':tr})
        self._th.set_sequential_download(True)
#         if tp.has_key('ti'):
#             self._meta_ready(self._th.get_torrent_info())
        
        self._monitor.start()
        self._dispatcher.do_start(self._th, self._ses)
示例#27
0
文件: main.py 项目: xyzwvut/CacheFS
def main(argv):
    args = parse_cmdline(argv)

    config.load_config(args.config)

    apply_cmdline_overwrites(args)

    # TODO: Path not expaned used before cache sanity-checked it
    back = backend.create(config.config['back'], config.config['cache']['dir'])
    cache = Cache(config.config['cache'], back)

    if config.config.getboolean('main', 'console'):
        CacheFSConsole(cache).cmdloop()

    cache.shutdown()
class Cmd(object):
    def __init__(self):
        self.cache = Cache()

    def get_address(self, statement):
        address = raw_input("What address would you like {}?\n".format(statement))
        return address

    def get_action(self):
        action = raw_input("(R)ead, (W)rite, or (D)isplay Cache?\n")
        if action is '':
            action = 'q'
        return action.lower()[0]

    def read_address(self, address):
        result = self.cache.read_address(address)
        print "At that byte there is the value {0:X} ({1})".format(result[0],
                                                                   result[1])
        return

    def write_data(self, address):
        datum = raw_input("What datum would you like to write at that address?\n")
        datum = int(datum, 16)
        result = self.cache.write_address(address, datum)
        print ("Value {:X} has been written to address {:X}. ({})".format(
            datum,
            address,
            result))
        return

    def display_cache(self):
        print str(self.cache)
        return

    def run(self):
        while True:
            action = self.get_action()
            if action == "r":
                address = int(self.get_address("read"), 16)
                self.read_address(address)
            elif action == "w":
                address = int(self.get_address("to write to"), 16)
                self.write_data(address)
            elif action == "d":
                self.display_cache()
            else:
                break
        return
示例#29
0
  def fill_dungeons(self, dungeons):
    assert(self._info)
    dungeon_id = Cache.get_daily_dungeon_id()
    if dungeon_id > 0: dungeons.daily = dungeon_id

    dungeon_id = Cache.get_survival_dungeon_id()
    if dungeon_id > 0: dungeons.survival = dungeon_id

    dungeon_id = Cache.get_event_dungeon_id()
    if dungeon_id > 0: dungeons.event = dungeon_id

    dungeons.epic_progress = self.epic_progress()

    if self._last_epic_stage:
      dungeons.last_epic_stage_id = self._last_epic_stage.stage_id()
      dungeons.last_epic_stage_difficulty = self._last_epic_stage.last_level()
示例#30
0
    def __save(self):
        """
        Saves the current object state in a json file

        @rtype: None
        @returnx: Nothing
        """
        categ_mtime = Cache.getDirModifiedTimeService(MENU_BASE_DIR)
        items_mtime = Cache.getDirModifiedTimeService(DESKTOP_DIR)

        data = { 
                MENU_BASE_DIR: { categ_mtime: self.__categories },
                DESKTOP_DIR: { items_mtime : self.__desktop_files }
               }

        self.__cache.save(data, self.__convert)
示例#31
0
def get_html(url):
    # type: (str) -> Optional[BeautifulSoup]
    """Gets cached or live HTML from the url"""
    headers = {
        "Accept": "text/html",
        "Accept-encoding": "gzip"
    }
    with Cache() as c:
        cached = c.get(url)
        if cached:
            if cached["fresh"]:
                return BeautifulSoup(cached["blob"], "html.parser")
            headers.update(conditional_headers(cached))
        r = requests.get(url, headers=headers, timeout=SEARCH_TIMEOUT)
        if 200 == r.status_code:
            soup = BeautifulSoup(r.content, "html.parser")
            c.set(url, r.content, r.headers)
            return soup
        if 304 == r.status_code:
            c.touch(url, r.headers)
            return BeautifulSoup(cached["blob"], "html.parser")
        logger.debug("get_html error: {} {}".format(r.status_code, url))
        return None
示例#32
0
 def find_best_scheme(self, json_path):
     self.parse_json(json_path)
     print("Optimizing " + self.workload + "...")
     if self.cache_read_enabled or self.cache_write_enabled:
         self.cache_instance = Cache(
             self.damoos_path + "/scheme_adapters/polyfit_adapter/",
             self.cache_file, self.cache_option)
     self.run_orig()
     self.generate_search_param()
     res1 = self.collect_data(self.search_param)
     self.collect_score(res1, "append")
     res = self.exploit_best_region()
     res2 = self.collect_data(res)
     self.collect_score(res2, "append")
     res3 = res1 + res2
     self.workload_info.append(res3)
     best_point = self.fit(res)
     print("Best point:", best_point)
     subprocess.call([
         "sudo", "DAMOOS=" + self.damoos_path, "bash",
         self.damoos_path + "/frontend/cleanup.sh"
     ])
     return best_point
示例#33
0
def stony_brook_scraper(name):
    print('Retrieving ' + name + "'s email...")
    cache = Cache()
    try:
        email = cache[name]
        return email
    except KeyError:
        pass
    query_link = DIRECTORIES.get('stony brook')
    name = name.replace(" ", "%20")
    query_link = query_link.format(name)
    driver = get_driver()
    driver.get(query_link)
    driver.implicitly_wait(5)
    time.sleep(3)
    tree = fromstring(driver.page_source)
    email = tree.xpath('//tr[@class="data"]//a[@class="email"]/text()')
    print(email)
    driver.quit()
    email = email[0] if email else None
    if email is not None:
        cache[name] = email
    return email
示例#34
0
def build_environment(request, response, session):
    """
    Build the environment dictionary into which web2py files are executed.
    """

    environment = {}
    for key in html.__all__:
        environment[key] = getattr(html, key)

    # Overwrite the URL function with a proxy
    # url function which contains this request.
    environment['URL'] = html._gURL(request)

    for key in validators.__all__:
        environment[key] = getattr(validators, key)
    if not request.env:
        request.env = Storage()
    environment['T'] = translator(request)
    environment['HTTP'] = HTTP
    environment['redirect'] = redirect
    environment['request'] = request
    environment['response'] = response
    environment['session'] = session
    environment['cache'] = Cache(request)
    environment['DAL'] = DAL
    environment['Field'] = Field
    environment['SQLDB'] = SQLDB  # for backward compatibility
    environment['SQLField'] = SQLField  # for backward compatibility
    environment['SQLFORM'] = SQLFORM
    environment['SQLTABLE'] = SQLTABLE
    environment['LOAD'] = LoadFactory(environment)
    environment['local_import'] = \
        lambda name, reload=False, app=request.application:\
        local_import_aux(name,reload,app)
    BaseAdapter.set_folder(os.path.join(request.folder, 'databases'))
    response._view_environment = copy.copy(environment)
    return environment
示例#35
0
def play(targets, seen_key, args, kwargs):
    redis = Cache()
    redis.set(AUTO_LOCK, now())
    print('STARTING PLAYBACK...')
    play_msg = json.dumps({
        'type': 'play',
        'targets': targets,
        'seen_key': seen_key,
        'args': args,
        'kwargs': kwargs
    })
    fade_msg = json.dumps({
        'type': 'fade',
        'targets': targets,
        'seen_key': seen_key
    })
    redis.publish(PLAY_QUEUE, play_msg)
    redis.publish(FADE_QUEUE, fade_msg)
示例#36
0
 def __init__(self, scaleinterface, config):
     self.scaling = scaleinterface
     pollfreq = config.getint('server', 'pollfreq')
     try:
         freq = config.getint('server', 'pollfreq.scalinggroups')
     except ConfigParser.NoOptionError:
         freq = pollfreq
     self.groups = Cache(freq)
     try:
         freq = config.getint('server', 'pollfreq.scalinginstances')
     except ConfigParser.NoOptionError:
         freq = pollfreq
     self.instances = Cache(freq)
     try:
         freq = config.getint('server', 'pollfreq.launchconfigs')
     except ConfigParser.NoOptionError:
         freq = pollfreq
     self.launchconfigs = Cache(freq)
示例#37
0
    def artist_b(self, artist, callback):
        a = Cache.Instance().getArtist(artist.link.uri, True)
        if a != None:
            callback(a)
        else:

            def artistBrowsed(artist):
                por = None
                try:
                    por = artist.artist.portrait_link().uri
                except:
                    pass
                a = {
                    "name":
                    artist.artist.name,
                    "uri":
                    artist.artist.link.uri,
                    "portrait":
                    por,
                    "bio":
                    artist.biography,
                    "toptracks":
                    self.tracks(artist.tophit_tracks, artist.artist.link.uri),
                    "similar":
                    self.artists(artist.similar_artists)
                }

                def albumsBrowsed(albums):
                    a["albums"] = albums
                    Cache.Instance().addArtist(a, True)
                    callback(a)

                self.albums_b(artist.albums, albumsBrowsed,
                              artist.artist.link.uri)

            artist = artist.browse(spotify.ArtistBrowserType.NO_TRACKS,
                                   artistBrowsed)
示例#38
0
def build_environment(request, response, session):
    """
    Build and return evnironment dictionary for controller and view.
    """

    environment = {}
    for key in html.__all__:
        environment[key] = getattr(html, key)
    for key in validators.__all__:
        environment[key] = getattr(validators, key)
    environment['T'] = translator(request)
    environment['HTTP'] = HTTP
    environment['redirect'] = redirect
    environment['request'] = request
    environment['response'] = response
    environment['session'] = session
    environment['cache'] = Cache(request)
    environment['SQLDB'] = SQLDB
    SQLDB._set_thread_folder(os.path.join(request.folder, 'databases'))
    environment['SQLField'] = SQLField
    environment['SQLFORM'] = SQLFORM
    environment['SQLTABLE'] = SQLTABLE
    response._view_environment = copy.copy(environment)
    return environment
示例#39
0
class Core(SchedComponent):
    def __init__(self, core_id, num_compute_units, sys, clk, logger_on,
                 lower_compute_id, lower_mem_id):
        super().__init__("Core", clk, sys, lower_compute_id)
        self.core_id = core_id
        self.compute_units = []
        self.l1c = Cache(sys, clk, core_id, 0, 16, 8, 16384, 64, 1, logger_on,
                         lower_mem_id)
        for i in range(num_compute_units):
            self.compute_units.append(
                ComputeUnit(sys, clk, i, logger_on,
                            self.l1c.get_component_id()))

        self.__is_idle = True
        self.threadgroup = None

    def is_idle(self):
        return self.__is_idle

    def schedule(self, threadgroup):
        pass

    def complete_from_lower(self):
        pass
示例#40
0
def test_multithreaded_different_keys(mock_redis):

    mock_redis.return_value.get = mock.Mock(side_effect=mock_redis_get_b)
    cache = Cache('fake-host', 1234, None, 1, 3)
    cache.cache_storage.set('foo2', 'bar2')

    def call_get(key, result, idx):
        res = cache.get(key)
        result[idx] = (res, time.time())

    keys = ['foo1', 'foo2']
    threads = [None] * 2
    results = [None] * 2
    for i in range(len(threads)):
        threads[i] = Thread(target=call_get, args=(keys[i], results, i))
        threads[i].start()
        time.sleep(.02)
    for i in range(len(threads)):
        threads[i].join()
    assert results[0][0] == 'bar1'
    assert results[1][0] == 'bar2'
    assert results[0][1] > results[1][1]  # second call finishes first

    mock_redis.return_value.get.assert_called_once_with('foo1')
示例#41
0
文件: pack.py 项目: shilang/buildc
    def pack_build(cmode, tag, force_update):
        build_home = os.getcwd()

        attribute_lists = Pack.__pack_init()
        distribution = attribute_lists.distribution
        source = attribute_lists.source

        Pack.__do_clean(build_home)

        if "dependences" in list(dir(attribute_lists)):
            dependences = attribute_lists.dependences

            result = Cache.cache_build_by_external_libs(
                dependences, cmode, force_update)
            if result == False:
                return False

            dotrc = Glo.dot_buildc_rc_path()
            if not os.path.exists(dotrc):
                print('Can not found ' + dotrc)
                print('Please run buildc init and then config .buildc.rc!')
                sys.exit(Errors.conf_file_not_found)
            buildc_rc = Load.load_dot_buildc_rc(dotrc)

            dotrepository = Glo.dot_buildc_repository_path()
            svn_tree = SvnTree()
            svn_tree.import_format_tree_from_file(dotrepository)
            for dependence in dependences:
                Pack.__copy_dependent_library(dependence, svn_tree, buildc_rc,
                                              build_home, cmode)

        result = Pack.__do_pack(build_home, source, distribution, cmode, tag,
                                force_update)
        if result == False:
            return False
        return True
示例#42
0
    def SimConfig(self):
        """ Setup and process simulation """

        self.sim_configs = []

        self.title = '2-Way Set Associative Replacement Policy Comparison (' + self.mem_pattern + ' Memory Pattern)'

        self.sim_configs.append(
            Cache(config_name='LRU Cache (2-way)', repl='LRU', ways=2))
        self.sim_configs.append(
            Cache(config_name='RR Cache (2-way)', repl='RR', ways=2))
        self.sim_configs.append(
            Cache(config_name='LFRU Cache (2-way)', repl='LFRU', ways=2))
        self.sim_configs.append(
            Cache(config_name='LFU Cache (2-way)', repl='LFU', ways=2))
        self.sim_configs.append(
            Cache(config_name='FIFO Cache (2-way)', repl='FIFO', ways=2))
        self.sim_configs.append(
            Cache(config_name='MRU Cache (2-way)', repl='MRU', ways=2))
示例#43
0
文件: test_util.py 项目: kkaszyk/tcs
class TestCache(unittest.TestCase):
    def setUp(self):
        self.sys = MemSys("Test Platform", 64)
        self.memory = Memory(self.sys, 4, 4, 4, 2, 64, 1866, True, None)
        self.l2c = Cache(self.sys, 1037000, 0, 1, 16, 8, 534288, 64, 3, True,
                         self.memory.get_component_id())
        self.l1c0 = Cache(self.sys, 1037000, 0, 0, 16, 8, 16384, 64, 1, True,
                          self.l2c.get_component_id())
        self.cu0 = ComputeUnit(self.sys, 1037000, 0, True,
                               self.l1c0.get_component_id())
        self.sys.build_map()

    def test_cache_line_addr_calc(self):
        self.assertEqual(self.l1c0.get_cache_line(0x4000), 0x100)
        self.assertEqual(self.l1c0.get_cache_line(0x4010), 0x100)
        self.assertEqual(self.l1c0.get_cache_line(0x4110), 0x104)
        self.assertEqual(self.l1c0.get_cache_line(0x4112), 0x104)
        self.assertEqual(self.l1c0.get_cache_line(0x41100000), 0x1044000)
        self.assertEqual(self.l1c0.get_cache_line(0x4110000000000000),
                         0x104400000000000)
示例#44
0
    def __init__(self, q, cached=True):

        url = "http://api.urbandictionary.com/soap?wsdl"
        key = "91cf66fb7f14bbf7fb59c7cf5e22155f"

        # Live connect for uncached queries
        # or queries we do not have in cache.
        cache = Cache("urbandictionary", ".pickle")
        if not cached or not cache.exists(q):
            server = soap.SOAPProxy(url)
            try:
                definitions = server.lookup(key, q)
            except Exception(soap.faultType):
                raise UrbanDictionaryError("the API is no longer supported")
            data = []
            for item in definitions:
                ubd = UrbanDictionaryDefinition(item.word, item.url,
                                                item.definition, item.example,
                                                item.author)
                self.append(ubd)
                data.append([
                    item.word, item.word, item.definition, item.example,
                    item.author
                ])
            # Cache a pickled version of the response.
            if cached:
                data = pickle.dumps(data)
                cache.write(q, data)

        # For cached queries,
        # unpack the pickled version in the cache.
        else:
            definitions = cache.read(q)
            definitions = pickle.loads(definitions)
            for item in definitions:
                ubd = UrbanDictionaryDefinition(item[0], item[1], item[2],
                                                item[3], item[4])
                self.append(ubd)
示例#45
0
    def worker(cls):
        while True:
            if len(cls.tasks) < 1:
                time.sleep(.1)
                continue

            c = cls.tasks.pop(0)
            info = od.list_items_with_cache(c['full_path'], True)

            for f in info.files:
                p = f['full_path']

                if not Cache.has(p):
                    continue

                file = Cache.get(p).files[0]
                if file['hash'] != f['hash']:
                    print('expired file: %s' % p)
                    Cache.rem(p)

            for f in info.folders:
                p = f['full_path']

                if not Cache.has(p):
                    print('no cached: %s' % p)
                    new = od.list_items_with_cache(p, True)

                    cls.cache_all(new)
                    cls.tasks += new.folders[1:]
                    continue

                folder = Cache.get(p).folders[0]
                if folder['hash'] != f['hash']:
                    print('expired folder: %s' % p)
                    new = od.list_items_with_cache(p, True)

                    cls.cache_all(new)
                    cls.tasks += new.folders[1:]
示例#46
0
class Downloader:

    def __init__(self, cache_dir):
        self.cache = Cache(cache_dir)

    def get(self, url):
        data = self.cache.try_get(Cache.GET_METHOD, url)
        if data is None:
            response = self._do_get(url)
            data = response.text
            self.cache.save(Cache.GET_METHOD, url, data)
        return data

    def post(self, url, req_data):
        data = self.cache.try_get(Cache.POST_METHOD, url, req_data)
        if data is None:
            response = self._do_post(url, req_data)
            data = response.text
            self.cache.save(Cache.POST_METHOD, url, data, params=req_data)
        return data

    def _do_get(self, url):
        # print(f'fetching {url}')
        response = requests.get(url)
        self._check_is_ok(response)
        return response

    def _do_post(self, url, req_data):
        # print(f'fetching {url} with {req_data}')
        response = requests.post(url, data=req_data)
        self._check_is_ok(response)
        return response

    def _check_is_ok(self, response):
        status = response.status_code
        if status != 200:
            print(f'Unexpected response status: {status}')
            os.sys.exit(1)
示例#47
0
    def refresh_folders(cls):
        tasks = [{'full_path': config.start_directory}]

        while len(tasks) > 0:
            c = tasks.pop(0)
            info = od.list_items_with_cache(c['full_path'], True)

            for f in info.files:
                p = f['full_path']

                if not Cache.has(p):
                    continue

                file = Cache.get(p).files[0]
                if file['hash'] != f['hash']:
                    print('expired file: %s' % p)
                    Cache.rem(p)

            for f in info.folders:
                p = f['full_path']

                if not Cache.has(p):
                    print('no cached: %s' % p)
                    new = od.list_items_with_cache(p, True)

                    cls.cache_all(new)
                    tasks += new.folders[1:]
                    continue

                folder = Cache.get(p).folders[0]
                if folder['hash'] != f['hash']:
                    print('expired folder: %s' % p)
                    new = od.list_items_with_cache(p, True)

                    cls.cache_all(new)
                    tasks += new.folders[1:]
        print('refresh folders finish!')
示例#48
0
"""Main module."""
from gui import start_gui
import logging
from cache import Cache
import argparse
import os

logging.basicConfig(format="%(asctime)s : %(levelname)s:%(message)s",
                    filename="test.log",
                    level=logging.INFO)

parser = argparse.ArgumentParser()
parser.add_argument("-c",
                    "--cache-dir",
                    help="Choose directory to store cache in")
parser.add_argument("-n",
                    "--no-cache",
                    action="store_true",
                    help="Disable caching of results")
args = parser.parse_args()

if not args.no_cache:
    if args.cache_dir:
        cache = Cache(cfile=os.path.join(args.cache_dir, 'studentinfo.json'))
    else:
        cache = Cache()
else:
    cache = Cache(use_cache=False)
start_gui(cache)
示例#49
0
from globals import current
from html import *
from validators import *
from http import redirect, HTTP
from dal import DAL, Field
from sqlhtml import SQLFORM, SQLTABLE
from compileapp import LOAD

# Dummy code to enable code completion in IDE's.
if 0:
    from globals import Request, Response, Session
    from cache import Cache
    from languages import translator
    from tools import Auth, Crud, Mail, Service, PluginManager

    # API objects
    request = Request()
    response = Response()
    session = Session()
    cache = Cache(request)
    T = translator(request)

    # Objects commonly defined in application model files
    # (names are conventions only -- not part of API)
    db = DAL()
    auth = Auth(db)
    crud = Crud(db)
    mail = Mail()
    service = Service()
    plugins = PluginManager()
示例#50
0
                    choices=write_policies,
                    help="Write policy for cache {" +
                    ", ".join(write_policies) + "}")

args = parser.parse_args()

mem_size = 2**args.MEMORY
cache_size = 2**args.CACHE
block_size = 2**args.BLOCK
mapping = 2**args.MAPPING

hits = 0
misses = 0

memory = Memory(mem_size, block_size)
cache = Cache(cache_size, mem_size, block_size, mapping, args.REPLACE,
              args.WRITE)

mapping_str = "2^{0}-way associative".format(args.MAPPING)
print("\nMemory size: " + str(mem_size) + " bytes (" +
      str(mem_size // block_size) + " blocks)")
print("Cache size: " + str(cache_size) + " bytes (" +
      str(cache_size // block_size) + " lines)")
print("Block size: " + str(block_size) + " bytes")
print("Mapping policy: " + ("direct" if mapping == 1 else mapping_str) + "\n")

# Setup Readline for history and completion
# See: https://pymotw.com/2/readline/
#  and https://pewpewthespells.com/blog/osx_readline.html
if 'libedit' in readline.__doc__:
    # macOS
    readline.parse_and_bind("bind ^I rl_complete")
示例#51
0
class Client(object):
    INITIAL_TRACKERS = [
        'udp://tracker.openbittorrent.com:80', 'udp://tracker.istole.it:80',
        'udp://open.demonii.com:80', 'udp://tracker.coppersurfer.tk:80',
        'udp://tracker.leechers-paradise.org:6969',
        'udp://exodus.desync.com:6969', 'udp://tracker.publicbt.com:80',
        'http://tracker.torrentbay.to:6969/announce',
        'http://tracker.pow7.com/announce', 'udp://tracker.ccc.de:80/announce',
        'udp://open.demonii.com:1337', 'http://9.rarbg.com:2710/announce',
        'http://bt.careland.com.cn:6969/announce',
        'http://explodie.org:6969/announce',
        'http://mgtracker.org:2710/announce',
        'http://tracker.best-torrents.net:6969/announce',
        'http://tracker.tfile.me/announce',
        'http://tracker1.wasabii.com.tw:6969/announce',
        'udp://9.rarbg.com:2710/announce', 'udp://9.rarbg.me:2710/announce',
        'udp://coppersurfer.tk:6969/announce',
        'http://www.spanishtracker.com:2710/announce',
        'http://www.todotorrents.com:2710/announce'
    ]  ### Added some trackers from MCT

    VIDEO_EXTS = {
        '.avi': 'video/x-msvideo',
        '.mp4': 'video/mp4',
        '.mkv': 'video/x-matroska',
        '.m4v': 'video/mp4',
        '.mov': 'video/quicktime',
        '.mpg': 'video/mpeg',
        '.ogv': 'video/ogg',
        '.ogg': 'video/ogg',
        '.webm': 'video/webm',
        '.ts': 'video/mp2t',
        '.3gp': 'video/3gpp',
        '.rar': 'video/unrar'
    }

    def __init__(self,
                 url=None,
                 port=None,
                 ip=None,
                 auto_shutdown=True,
                 wait_time=20,
                 timeout=5,
                 auto_delete=True,
                 temp_path=None,
                 is_playing_fnc=None,
                 print_status=False):

        # server
        if port:
            self.port = port
        else:
            self.port = random.randint(8000, 8099)
        if ip:
            self.ip = ip
        else:
            self.ip = "127.0.0.1"
        self.server = Server((self.ip, self.port), Handler, client=self)

        # Options
        if temp_path:
            self.temp_path = temp_path
        else:
            self.temp_path = DOWNLOAD_PATH
        self.is_playing_fnc = is_playing_fnc
        self.timeout = timeout
        self.auto_delete = auto_delete
        self.wait_time = wait_time
        self.auto_shutdown = auto_shutdown
        self.buffer_size = BUFFER
        self.first_pieces_priorize = BUFFER
        self.last_pieces_priorize = 5
        self.state_file = "state"
        try:
            self.torrent_paramss = {
                'save_path': self.temp_path,
                'storage_mode': lt.storage_mode_t.storage_mode_allocate
            }
        except Exception as e:
            try:
                do = xbmcgui.Dialog()
                e = e1 or e2
                do.ok(
                    config.get_localized_string(30035) + 'BT Libtorrent',
                    config.get_localized_string(30036),
                    config.get_localized_string(60015), str(e))
            except:
                pass
            return

        # State
        self.has_meta = False
        self.meta = None
        self.start_time = None
        self.last_connect = 0
        self.connected = False
        self.closed = False
        self.file = None
        self.files = None
        self._th = None
        self.seleccion = 0
        self.index = 0

        # Sesion
        self._cache = Cache(self.temp_path)
        self._ses = lt.session()
        #self._ses.listen_on(0, 0)                                              ### ALFA: it blocks repro of some .torrents
        # Cargamos el archivo de estado (si existe)
        """                                                                     ### ALFA: it blocks repro of some .torrents
        if os.path.exists(os.path.join(self.temp_path, self.state_file)):
            try:
                f = open(os.path.join(self.temp_path, self.state_file), "rb")
                state = pickle.load(f)
                self._ses.load_state(state)
                f.close()
            except:
                pass
        """

        self._start_services()

        # Monitor & Dispatcher
        self._monitor = Monitor(self)
        if print_status:
            self._monitor.add_listener(self.print_status)
        self._monitor.add_listener(self._check_meta)
        self._monitor.add_listener(self.save_state)
        self._monitor.add_listener(self.priorize_start_file)
        self._monitor.add_listener(self.announce_torrent)

        if self.auto_shutdown:
            self._monitor.add_listener(self._auto_shutdown)

        self._dispatcher = Dispatcher(self)
        self._dispatcher.add_listener(self._update_ready_pieces)

        # Iniciamos la URL
        if url:
            self.start_url(url)

    def set_speed_limits(self, download=0, upload=0):
        """
        Función encargada de poner límites a la velocidad de descarga o subida
        """
        if isinstance(download, int) and download > 0:
            self._th.set_download_limit(download * 1024)
        if isinstance(upload, int) and download > 0:
            self._th.set_upload_limit(upload * 1024)

    def get_play_list(self):
        """
        Función encargada de generar el playlist
        """
        # Esperamos a lo metadatos
        while not self.has_meta:
            time.sleep(1)

        # Comprobamos que haya archivos de video
        if self.files:
            if len(self.files) > 1:
                return "http://" + self.ip + ":" + str(
                    self.port) + "/playlist.pls"
            else:
                return "http://" + self.ip + ":" + str(
                    self.port) + "/" + urllib.quote(self.files[0].path)

    def get_files(self):
        """
        Función encargada de genera el listado de archivos
        """
        # Esperamos a lo metadatos
        while not self.has_meta:
            time.sleep(1)
        files = []

        # Comprobamos que haya archivos de video
        if self.files:
            # Creamos el dict con los archivos
            for file in self.files:
                n = file.path
                u = "http://" + self.ip + ":" + str(
                    self.port) + "/" + urllib.quote(n)
                s = file.size
                files.append({"name": n, "url": u, "size": s})

        return files

    def _find_files(self, files, search=None):
        """
        Función encargada de buscar los archivos reproducibles del torrent
        """
        self.total_size = 0
        # Obtenemos los archivos que la extension este en la lista
        videos = filter(
            lambda f: self.VIDEO_EXTS.has_key(os.path.splitext(f.path)[1]),
            files)

        if not videos:
            raise Exception('No video files in torrent')
        for v in videos:
            self.total_size += v.size  ### ALFA
            videos[videos.index(v)].index = files.index(v)
        return videos

    def set_file(self, f):
        """
        Función encargada de seleccionar el archivo que vamos a servir y por tanto, priorizar su descarga
        """
        # Seleccionamos el archivo que vamos a servir
        fmap = self.meta.map_file(f.index, 0, 1)
        self.file = File(f.path, self.temp_path, f.index, f.size, fmap,
                         self.meta.piece_length(), self)
        if self.seleccion < 0:  ### ALFA
            self.file.first_piece = 0  ### ALFA
            self.file.last_piece = self.meta.num_pieces()  ### ALFA
            self.file.size = self.total_size  ### ALFA
        self.prioritize_file()

    def prioritize_piece(self, pc, idx):
        """
        Función encargada de priorizar una determinada pieza
        """
        piece_duration = 1000
        min_deadline = 2000
        dl = idx * piece_duration + min_deadline
        """                                                                     ### ALFA
        try:
            self._th.set_piece_deadline(pc, dl, lt.deadline_flags.alert_when_available)
        except:
            pass
        """

        if idx == 0:
            tail_pieces = 9
            # Piezas anteriores a la primera se desactivan
            if (self.file.last_piece - pc) > tail_pieces:
                for i in xrange(self.file.first_piece, pc):
                    self._th.piece_priority(i, 0)
                    self._th.reset_piece_deadline(i)

            # Piezas siguientes a la primera se activan
            for i in xrange(pc + 1, self.file.last_piece + 1):
                #self._th.piece_priority(i, 0)
                self._th.piece_priority(i, 1)

    def prioritize_file(self):
        """
        Función encargada de priorizar las piezas correspondientes al archivo seleccionado en la funcion set_file()
        """
        priorities = []
        for i in xrange(self.meta.num_pieces()):
            if i >= self.file.first_piece and i <= self.file.last_piece:
                priorities.append(1)
            else:
                if self.index < 0:
                    priorities.append(1)  ### ALFA
                else:
                    priorities.append(0)  ### ALFA

        self._th.prioritize_pieces(priorities)

        x = 0
        for i, _set in enumerate(self._th.piece_priorities()):
            if _set > 0: x += 1
            #logger.info("***** Nº Pieza: %s: %s" % (i, str(_set)))
        logger.info("***** Piezas %s : Activas: %s" % (str(i + 1), str(x)))
        logger.info("***** first_piece %s : last_piece: %s" %
                    (str(self.file.first_piece), str(self.file.last_piece)))

    def download_torrent(self, url):
        """
        Función encargada de descargar un archivo .torrent
        """
        from core import httptools

        data = httptools.downloadpage(url).data
        return data

    def start_url(self, uri):
        """
        Función encargada iniciar la descarga del torrent desde la url, permite:
          - Url apuntando a un .torrent
          - Url magnet
          - Archivo .torrent local
        """

        if self._th:
            raise Exception('Torrent is already started')

        if uri.startswith('http://') or uri.startswith('https://'):
            torrent_data = self.download_torrent(uri)
            info = lt.torrent_info(lt.bdecode(torrent_data))
            tp = {'ti': info}
            resume_data = self._cache.get_resume(
                info_hash=str(info.info_hash()))
            if resume_data:
                tp['resume_data'] = resume_data

        elif uri.startswith('magnet:'):
            tp = {'url': uri}
            resume_data = self._cache.get_resume(
                info_hash=Cache.hash_from_magnet(uri))
            if resume_data:
                tp['resume_data'] = resume_data

        elif os.path.isfile(uri):
            if os.access(uri, os.R_OK):
                info = lt.torrent_info(uri)
                tp = {'ti': info}
                resume_data = self._cache.get_resume(
                    info_hash=str(info.info_hash()))
                if resume_data:
                    tp['resume_data'] = resume_data
            else:
                raise ValueError('Invalid torrent path %s' % uri)
        else:
            raise ValueError("Invalid torrent %s" % uri)

        tp.update(self.torrent_paramss)
        self._th = self._ses.add_torrent(tp)

        for tr in self.INITIAL_TRACKERS:
            self._th.add_tracker({'url': tr})

        self._th.set_sequential_download(True)
        self._th.force_reannounce()
        self._th.force_dht_announce()

        self._monitor.start()
        self._dispatcher.do_start(self._th, self._ses)
        self.server.run()

    def stop(self):
        """
        Función encargada de de detener el torrent y salir
        """
        self._dispatcher.stop()
        self._dispatcher.join()
        self._monitor.stop()
        self.server.stop()
        self._dispatcher.stop()
        if self._ses:
            self._ses.pause()
            if self._th:
                self.save_resume()
            self.save_state()
        self._stop_services()
        self._ses.remove_torrent(self._th, self.auto_delete)
        del self._ses
        self.closed = True

    def pause(self):
        """
        Función encargada de de pausar el torrent
        """
        self._ses.pause()

    def _start_services(self):
        """
        Función encargada de iniciar los servicios de libtorrent: dht, lsd, upnp, natpnp
        """
        self._ses.add_dht_router("router.bittorrent.com", 6881)
        self._ses.add_dht_router("router.bitcomet.com", 554)
        self._ses.add_dht_router("router.utorrent.com", 6881)
        self._ses.add_dht_router("dht.transmissionbt.com", 6881)  ### from MCT
        self._ses.start_dht()
        self._ses.start_lsd()
        self._ses.start_upnp()
        self._ses.start_natpmp()

    def _stop_services(self):
        """
        Función encargada de detener los servicios de libtorrent: dht, lsd, upnp, natpnp
        """
        self._ses.stop_natpmp()
        self._ses.stop_upnp()
        self._ses.stop_lsd()
        self._ses.stop_dht()

    def save_resume(self):
        """
        Función encargada guardar los metadatos para continuar una descarga mas rapidamente
        """
        if self._th.need_save_resume_data() and self._th.is_valid(
        ) and self.meta:
            r = ResumeData(self)
            start = time.time()
            while (time.time() - start) <= 5:
                if r.data or r.failed:
                    break
                time.sleep(0.1)
            if r.data:
                self._cache.save_resume(self.unique_file_id,
                                        lt.bencode(r.data))

    @property
    def status(self):
        """
        Función encargada de devolver el estado del torrent
        """
        if self._th:
            s = self._th.status()
            # Download Rate
            s._download_rate = s.download_rate / 1024

            # Progreso del archivo
            if self.file:
                pieces = s.pieces[self.file.first_piece:
                                  self.file.last_piece]  ### ALFA
                progress = float(sum(pieces)) / len(pieces)
                s.pieces_len = len(pieces)  ### ALFA
                s.pieces_sum = sum(pieces)  ### ALFA
                #logger.info('***** Estado piezas: %s' % pieces)
            else:
                progress = 0
                s.pieces_len = 0  ### ALFA
                s.pieces_sum = 0  ### ALFA

            s.progress_file = progress * 100

            # Tamaño del archivo
            s.file_name = ''  ### ALFA
            s.seleccion = ''  ### ALFA

            if self.file:
                s.seleccion = self.seleccion  ### ALFA
                s.file_name = self.file.path  ### ALFA
                s.file_size = self.file.size / 1048576.0
            else:
                s.file_size = 0

            # Estado del buffer
            if self.file and self.file.cursor:  # Con una conexion activa: Disponible vs Posicion del reproductor
                percent = len(self.file.cursor.cache)
                percent = percent * 100 / self.buffer_size
                s.buffer = int(percent)

            elif self.file:  # Sin una conexion activa: Pre-buffer antes de iniciar
                # El Pre-buffer consta de dos partes_
                # 1. Buffer al inicio del archivo para que el reproductor empieze sin cortes
                # 2. Buffer al final del archivo (en algunos archivos el reproductor mira el final del archivo antes de comenzar)
                bp = []

                # El tamaño del buffer de inicio es el tamaño del buffer menos el tamaño del buffer del final
                first_pieces_priorize = self.buffer_size - self.last_pieces_priorize

                # Comprobamos qué partes del buffer del inicio estan disponibles
                for x in range(first_pieces_priorize):
                    if self._th.have_piece(self.file.first_piece + x):
                        bp.append(True)
                    else:
                        bp.append(False)

                # Comprobamos qué partes del buffer del final estan disponibles
                for x in range(self.last_pieces_priorize):
                    if self._th.have_piece(self.file.last_piece - x):
                        bp.append(True)
                    else:
                        bp.append(False)

                s.buffer = int(sum(bp) * 100 / self.buffer_size)

            else:  # Si no hay ningun archivo seleccionado: No hay buffer
                s.buffer = 0

            # Tiempo restante para cerrar en caso de tener el timeout activo
            if self.auto_shutdown:
                if self.connected:
                    if self.timeout:
                        s.timeout = int(self.timeout -
                                        (time.time() - self.last_connect - 1))
                        if self.file and self.file.cursor:
                            s.timeout = self.timeout
                        if s.timeout < 0: s.timeout = "Cerrando"
                    else:
                        s.timeout = "---"
                else:
                    if self.start_time and self.wait_time:
                        s.timeout = int(self.wait_time -
                                        (time.time() - self.start_time - 1))
                        if s.timeout < 0: s.timeout = "Cerrando"
                    else:
                        s.timeout = "---"

            else:
                s.timeout = "Off"

            # Estado de la descarga
            STATE_STR = ['Queued', 'Checking', 'Downloading Metadata', \
                         'Downloading', 'Finalized', 'Seeding', 'Allocating', 'Checking Fastresume']
            s.str_state = STATE_STR[s.state]

            # Estado DHT
            if self._ses.dht_state() is not None:
                s.dht_state = "On"
                s.dht_nodes = self._ses.status().dht_nodes
            else:
                s.dht_state = "Off"
                s.dht_nodes = 0

            # Cantidad de Trackers
            s.trackers = len(self._th.trackers())

            # Origen de los peers
            s.dht_peers = 0
            s.trk_peers = 0
            s.pex_peers = 0
            s.lsd_peers = 0

            for peer in self._th.get_peer_info():
                if peer.source & 1:
                    s.trk_peers += 1
                if peer.source & 2:
                    s.dht_peers += 1
                if peer.source & 4:
                    s.pex_peers += 1
                if peer.source & 8:
                    s.lsd_peers += 1

            return s

    """
    Servicios:
      - Estas funciones se ejecutan de forma automatica cada x tiempo en otro Thread.
      - Estas funciones son ejecutadas mientras el torrent esta activo algunas pueden desactivarse 
        segun la configuracion como por ejemplo la escritura en el log
    """

    def _auto_shutdown(self, *args, **kwargs):
        """
        Servicio encargado de autoapagar el servidor
        """
        if self.file and self.file.cursor:
            self.last_connect = time.time()
            self.connected = True

        if self.is_playing_fnc and self.is_playing_fnc():
            self.last_connect = time.time()
            self.connected = True

        if self.auto_shutdown:
            # shudown por haber cerrado el reproductor
            if self.connected and self.is_playing_fnc and not self.is_playing_fnc(
            ):
                if time.time() - self.last_connect - 1 > self.timeout:
                    self.stop()

            # shutdown por no realizar ninguna conexion
            if (
                    not self.file or not self.file.cursor
            ) and self.start_time and self.wait_time and not self.connected:
                if time.time() - self.start_time - 1 > self.wait_time:
                    self.stop()

            # shutdown tras la ultima conexion
            if (
                    not self.file or not self.file.cursor
            ) and self.timeout and self.connected and not self.is_playing_fnc:
                if time.time() - self.last_connect - 1 > self.timeout:
                    self.stop()

    def announce_torrent(self):
        """
        Servicio encargado de anunciar el torrent
        """
        self._th.force_reannounce()
        self._th.force_dht_announce()

    def save_state(self):
        """
        Servicio encargado de guardar el estado
        """
        state = self._ses.save_state()
        f = open(os.path.join(self.temp_path, self.state_file), 'wb')
        pickle.dump(state, f)
        f.close()

    def _update_ready_pieces(self, alert_type, alert):
        """
        Servicio encargado de informar que hay una pieza disponible
        """
        if alert_type == 'read_piece_alert' and self.file:
            self.file.update_piece(alert.piece, alert.buffer)

    def _check_meta(self):
        """
        Servicio encargado de comprobar si los metadatos se han descargado
        """
        if self.status.state >= 3 and self.status.state <= 5 and not self.has_meta:

            # Guardamos los metadatos
            self.meta = self._th.get_torrent_info()

            # Obtenemos la lista de archivos del meta
            fs = self.meta.files()
            if isinstance(fs, list):
                files = fs
            else:
                files = [fs.at(i) for i in xrange(fs.num_files())]

            # Guardamos la lista de archivos
            self.files = self._find_files(files)

            # Si hay varios vídeos (no RAR), se selecciona el vídeo o "todos"
            lista = []
            seleccion = 0
            for file in self.files:
                if '.rar' in str(file.path):
                    seleccion = -9
                lista += [os.path.split(str(file.path))[1]]
            if len(lista) > 1 and seleccion >= 0:
                d = xbmcgui.Dialog()
                seleccion = d.select(
                    msg_header + config.get_localized_string(30034), lista)

            if seleccion < 0:
                index = 0
                self.index = seleccion
            else:
                index = seleccion
                self.index = self.files[index].index
            self.seleccion = seleccion

            # Marcamos el primer archivo como activo
            self.set_file(self.files[index])

            # Damos por iniciada la descarga
            self.start_time = time.time()

            # Guardamos el .torrent en el cache
            self._cache.file_complete(self._th.get_torrent_info())

            self.has_meta = True

    def priorize_start_file(self):
        '''
        Servicio encargado de priorizar el principio y final de archivo cuando no hay conexion
        '''
        if self.file and not self.file.cursor:
            num_start_pieces = self.buffer_size - self.last_pieces_priorize  # Cantidad de piezas a priorizar al inicio
            num_end_pieces = self.last_pieces_priorize  # Cantidad de piezas a priorizar al final

            pieces_count = 0
            # Priorizamos las ultimas piezas
            for y in range(self.file.last_piece - num_end_pieces,
                           self.file.last_piece + 1):
                if not self._th.have_piece(y):
                    self.prioritize_piece(y, pieces_count)
                    pieces_count += 1

            # Priorizamos las primeras piezas
            for y in range(self.file.first_piece, self.file.last_piece + 1):
                if not self._th.have_piece(y):
                    if pieces_count == self.buffer_size:
                        break
                    self.prioritize_piece(y, pieces_count)
                    pieces_count += 1

    def print_status(self):
        '''
        Servicio encargado de mostrar en el log el estado de la descarga
        '''
        s = self.status  ### ALFA
        if self.seleccion >= 0:
            archivo = self.seleccion + 1
        else:
            archivo = self.seleccion

        logger.info(
            '%.2f%% de %.1fMB %s | %.1f kB/s | #%s %d%% | AutoClose: %s | S: %d(%d) P: %d(%d)) | TRK: %d DHT: %d PEX: %d LSD %d | DHT:%s (%d) | Trakers: %d | Pieces: %d (%d)' % \
            (s.progress_file, s.file_size, s.str_state, s._download_rate, archivo, s.buffer, s.timeout, s.num_seeds, \
             s.num_complete, s.num_peers, s.num_incomplete, s.trk_peers, s.dht_peers, s.pex_peers, s.lsd_peers,
             s.dht_state, s.dht_nodes, s.trackers, s.pieces_sum, s.pieces_len)) ### ALFA
示例#52
0
def test_store():
    c = Cache(16, 4, 2)
    c.store(0x112, 1)
    assert c.cache == [[-1, -1, -1, -1], [0x112, -1, -1, -1]]
示例#53
0
def test_get_tag():
    c = Cache(16, 4, 2)
    assert c.get_tag(0x0000) == 0x000
    assert c.get_tag(0x2200) == 0x110
    assert c.get_tag(0x113c) == 0x089
示例#54
0
def test_get_cache_offset():
    c = Cache(16, 1, 8)
    assert c.get_cache_offset(0x0084) == 4
    assert c.get_cache_offset(0x113c) == 12
示例#55
0
def test_get_set_number():
    c = Cache(16, 2, 4)
    assert c.get_set_number(0x0000) == 0
    assert c.get_set_number(0x113c) == 3
示例#56
0
class Client(object):
    INITIAL_TRACKERS=['udp://tracker.openbittorrent.com:80',
                  'udp://tracker.istole.it:80',
                  'udp://open.demonii.com:80',
                  'udp://tracker.coppersurfer.tk:80',
                  'udp://tracker.leechers-paradise.org:6969',
                  'udp://exodus.desync.com:6969',
                  'udp://tracker.publicbt.com:80'
                  "http://exodus.desync.com:6969/announce",
                  "udp://tracker.publicbt.com:80/announce",
                  "udp://tracker.openbittorrent.com:80/announce",
                  "http://tracker.torrentbay.to:6969/announce",
                  "http://fr33dom.h33t.com:3310/announce",
                  "http://tracker.pow7.com/announce",
                  "udp://tracker.ccc.de:80/announce",
                  "http://tracker.bittorrent.am:80/announce",
                  "http://denis.stalker.h3q.com:6969/announce",
                  "udp://tracker.prq.to:80/announce",
                  "udp://tracker.istole.it:80/announce",
                  "udp://open.demonii.com:1337",                  
                  "http://9.rarbg.com:2710/announce",
                  "http://announce.torrentsmd.com:6969/announce",
                  "http://bt.careland.com.cn:6969/announce",
                  "http://explodie.org:6969/announce",
                  "http://mgtracker.org:2710/announce",
                  "http://tracker.best-torrents.net:6969/announce",
                  "http://tracker.tfile.me/announce",
                  "http://tracker.torrenty.org:6969/announce",
                  "http://tracker1.wasabii.com.tw:6969/announce",
                  "udp://9.rarbg.com:2710/announce",
                  "udp://9.rarbg.me:2710/announce",
                  "udp://coppersurfer.tk:6969/announce",
                  "udp://tracker.btzoo.eu:80/announce",                  
                  "http://www.spanishtracker.com:2710/announce",
                  "http://www.todotorrents.com:2710/announce",
                  ]                  

    VIDEO_EXTS={'.avi':'video/x-msvideo','.mp4':'video/mp4','.mkv':'video/x-matroska',
            '.m4v':'video/mp4','.mov':'video/quicktime', '.mpg':'video/mpeg','.ogv':'video/ogg',
            '.ogg':'video/ogg', '.webm':'video/webm', '.ts': 'video/mp2t', '.3gp':'video/3gpp'}

    def __init__(self, url = None, port=None, ip=None, auto_shutdown=True, wait_time=35, timeout=15, auto_delete=False, temp_path=None, is_playing_fnc=None, print_status = False):

        #server
        self.port = port if port else random.randint(8000,8099)
        self.ip = ip if ip else "127.0.0.1"
        self.server = Server((self.ip,self.port), Handler, client=self)


        #Options
        self.temp_path = temp_path if temp_path else os.path.join(os.path.dirname(__file__),"tmp")
        self.is_playing_fnc = is_playing_fnc
        self.timeout = timeout
        self.auto_delete = auto_delete
        self.wait_time= wait_time
        self.auto_shutdown = auto_shutdown
        self.buffer_size = 10
        self.state_file="state"
        self.torrent_paramss={'save_path':self.temp_path,'storage_mode':lt.storage_mode_t.storage_mode_sparse}



        #State
        self.has_meta = False
        self.meta = None
        self.start_time = None
        self.last_connect = 0
        self.connected = False
        self.closed = False
        self.file = None
        self.files = None
        self._th=None


        #Sesion
        self._cache=Cache(self.temp_path)
        self._ses=lt.session()
        self._ses.listen_on(0,0)
        #Cargamos el archivo de estado (si existe)
        if os.path.exists(os.path.join(self.temp_path,self.state_file)):
            try:
                with open(os.path.join(self.temp_path,self.state_file), "rb") as f:
                    state=pickle.load(f)
                    self._ses.load_state(state)
            except:
                pass
                
        
        self._start_services()
        
        #Monitor & Dispatcher
        self._monitor= Monitor(self)
        if print_status:
            self._monitor.add_listener(self.print_status)
        self._monitor.add_listener(self._check_meta)
        self._monitor.add_listener(self.save_state)
        self._monitor.add_listener(self.priorize_start_file)
        self._monitor.add_listener(self.announce_torrent)
        
        if self.auto_shutdown:
            self._monitor.add_listener(self._auto_shutdown)
        self._dispatcher=Dispatcher(self)
        self._dispatcher.add_listener(self._update_ready_pieces)

        #Iniciamos la URL
        if url:
            self.start_url(url)

    def get_play_list(self):
        #Esperamos a lo metadatos
        while not self.has_meta:
            time.sleep(1)

        #Comprobamos que haya archivos de video
        if self.files:
            if len(self.files) > 1:
                return "http://" + self.ip + ":" + str(self.port) + "/playlist.pls"
            else:
                return "http://" + self.ip + ":" + str(self.port) + "/" + urllib.quote(self.files[0].path)

    def get_files(self):
        #Esperamos a lo metadatos
        while not self.has_meta:
            time.sleep(1)
        files = []

        #Comprobamos que haya archivos de video
        if self.files:
            #Creamos el dict con los archivos
            for file in self.files:
                n = file.path
                u = "http://" + self.ip + ":" + str(self.port) + "/" + urllib.quote(n)
                s = file.size
                files.append({"name":n,"url":u,"size":s})

        return files

    def announce_torrent(self):
        self._th.force_reannounce()
        self._th.force_dht_announce()

    def _auto_shutdown(self, *args, **kwargs):
        if self.file and self.file.cursor:
            self.last_connect = time.time()
            self.connected = True

        if self.is_playing_fnc and self.is_playing_fnc():
            self.last_connect = time.time()
            self.connected = True

        if self.auto_shutdown:
            #shutdown por haber cerrado el reproductor
            if self.connected and self.is_playing_fnc and not self.is_playing_fnc():
                if time.time() - self.last_connect - 1 > self.timeout:
                    self.stop()

            #shutdown por no realizar ninguna conexion
            if (not self.file or not self.file.cursor) and self.start_time and self.wait_time and not self.connected:
                if time.time() - self.start_time - 1 > self.wait_time:
                    self.stop()

            #shutdown tras la ultima conexion
            if (not self.file or not self.file.cursor) and self.timeout and self.connected and not self.is_playing_fnc:
                if time.time() - self.last_connect - 1 > self.timeout:
                    self.stop()

    def save_state(self):
        state=self._ses.save_state()
        with open(os.path.join(self.temp_path,self.state_file), 'wb') as f:
            pickle.dump(state,f)

     
    def _update_ready_pieces(self, alert_type, alert):
        if alert_type == 'read_piece_alert' and self.file:
            self.file.update_piece(alert.piece, alert.buffer)
            
            
    def _check_meta(self):
        if self.status.state>=3 and  self.status.state <= 5 and not self.has_meta:

            #Guardamos los metadatos
            self.meta = self._th.get_torrent_info()

            #Obtenemos la lista de archivos del meta
            fs=self.meta.files()
            files=fs if isinstance(fs, list) else [fs.at(i) for i in xrange(fs.num_files())]

            #Guardamos la lista de archivos
            self.files=self._choose_files(files)

            #Marcamos el primer archivo como activo
            self.set_file(self.files[0])

            #Damos por iniciada la descarga
            self.start_time = time.time()

            #Guardamos el .torrent en el cahce
            self._cache.file_complete(self._th.get_torrent_info())

            self.has_meta = True



    def _choose_files(self, files, search=None):
        #Obtenemos los archivos que la extension este en la lista
        videos=filter(lambda f: self.VIDEO_EXTS.has_key(os.path.splitext(f.path)[1]), files)

        if not videos:
            raise Exception('No video files in torrent')
        for v in videos:
            videos[videos.index(v)].index = files.index(v)
        return videos


    def set_file(self, f):
        #Seleccionamos el archivo que vamos a servir
        fmap=self.meta.map_file(f.index, 0,1)
        self.file=File(f.path, self.temp_path, f.index, f.size, fmap, self.meta.piece_length(), self)
        self.prioritize_file()
        
    def priorize_start_file(self):
        #Si tenemos un archivo seleccionado, pero no hay conexion, priorizamos el inicio del archivo
        if self.file and not self.file.cursor:
          for x in range(self.file.first_piece,self.file.last_piece):

            if not self._th.have_piece(x):
                for y in range(x,x+self.buffer_size):
                  if y == x+self.buffer_size-1 and not self._th.have_piece(self.file.last_piece):
                    self.prioritize_piece(self.file.last_piece,y-x)
                  else:
                    self.prioritize_piece(y,y-x)
                break
            
        



    def prioritize_piece(self, pc, idx):
        piece_duration=1000
        min_deadline=2000
        dl=idx*piece_duration+min_deadline
        self._th.set_piece_deadline(pc, dl,lt.deadline_flags.alert_when_available)
        
        if idx==0:  # it enough when first piece if prioritize, no need to repeat it for following pieces
            # we do not need to download pieces that are lower then current index, 
            # but last x pieces are special because players often  look at end of file
            # for mp4 metadata - so in this case,  we do not want to stop download of previous pieces
            tail_pieces=9
            if (self.file.last_piece - pc) > tail_pieces:
                for i in xrange(self.file.first_piece,pc):
                    self._th.piece_priority(i,0)
                    self._th.reset_piece_deadline(i)
                
            # If we are skipping back we'd like to re-enable all pieces after this one
            # e.g. give them at least priority 1
            for i in xrange(pc+1, self.file.last_piece+1):
                self._th.piece_priority(i,1)

    def prioritize_file(self):
        priorities=[1 if i>= self.file.first_piece and i<= self.file.last_piece else 0 \
                    for i in xrange(self.meta.num_pieces())]
        self._th.prioritize_pieces(priorities)


    def download_torrent(self,url):
        from core import scrapertools
        
        data = scrapertools.downloadpage(url)
        return data


    def start_url(self, uri):
        if self._th:
            raise Exception('Torrent is already started')

        if uri.startswith('http://') or uri.startswith('https://'):
            torrent_data = self.download_torrent(uri)
            info = lt.torrent_info(lt.bdecode(torrent_data))
            tp = {'ti':info}
            resume_data= self._cache.get_resume(info_hash=str(info.info_hash()))
            if resume_data:
                tp['resume_data']=resume_data

        elif uri.startswith('magnet:'):
            tp={'url':uri}
            resume_data=self._cache.get_resume(info_hash=Cache.hash_from_magnet(uri))
            if resume_data:
                tp['resume_data']=resume_data

        elif os.path.isfile(uri):
            if os.access(uri,os.R_OK):
                info = lt.torrent_info(uri)
                tp= {'ti':info}
                resume_data= self._cache.get_resume(info_hash=str(info.info_hash()))
                if resume_data:
                    tp['resume_data']=resume_data
            else:
                raise ValueError('Invalid torrent path %s' % uri)
        else:
            raise ValueError("Invalid torrent %s" %uri)
        
        tp.update(self.torrent_paramss)
        self._th = self._ses.add_torrent(tp)
        

        for tr in self.INITIAL_TRACKERS:
            self._th.add_tracker({'url':tr})

        self._th.set_sequential_download(True)
        self._th.force_reannounce()
        self._th.force_dht_announce()

        self._monitor.start()
        self._dispatcher.do_start(self._th, self._ses)
        self.server.run()

        
    def stop(self):
        self._dispatcher.stop()
        self._dispatcher.join()
        self._monitor.stop()
        self.server.stop()
        self._dispatcher.stop()
        if self._ses:
            self._ses.pause()
            if self._th:
                self.save_resume()
            self.save_state()
        self._stop_services()
        self._ses.remove_torrent(self._th, self.auto_delete)
        del self._ses
        self.closed = True

    def _start_services(self):
        self._ses.add_dht_router("router.bittorrent.com",6881)
        self._ses.add_dht_router("router.utorrent.com",6881)
        self._ses.add_dht_router("router.bitcomet.com",554)
        self._ses.add_dht_router("router.utorrent.com",6881)
        self._ses.add_dht_router("dht.transmissionbt.com",6881)            
        self._ses.start_dht()
        self._ses.start_lsd()
        self._ses.start_upnp()
        self._ses.start_natpmp()
        
        
    def _stop_services(self):
        self._ses.stop_natpmp()
        self._ses.stop_upnp()
        self._ses.stop_lsd()
        self._ses.stop_dht()


    def save_resume(self):
        if self._th.need_save_resume_data() and self._th.is_valid() and self.meta:
            r = ResumeData(self)
            start=time.time()
            while (time.time() - start) <= 5 :
                if r.data or r.failed:
                    break
                time.sleep(0.1)
            if r.data:
                self._cache.save_resume(self.unique_file_id,lt.bencode(r.data))

    @property
    def status(self):
        if self._th:
            s = self._th.status()
            s._download_rate = s.download_rate / 1000

            if self.file:
                pieces=s.pieces[self.file.first_piece:self.file.last_piece]
                progress= float(sum(pieces))/len(pieces)
            else:
                progress=0

            s.progress_file=progress * 100
            s.file_size=self.file.size / 1048576.0 if self.file else 0

            if self.file and self.file.cursor:
                percent = len(self.file.cursor.cache)
                percent = percent * 100 / self.buffer_size
                s.buffer = int(percent)

            elif self.file:
                bp = [True if (x == self.buffer_size -1 and self._th.have_piece(self.file.last_piece)) or (x < self.buffer_size -1 and self._th.have_piece(x)) else False for x in range(self.buffer_size ) ]
                percent = len([a for a in bp if a == True])
                percent = percent * 100 / self.buffer_size
                s.buffer = int(percent)

            else:
                s.buffer = 0

            if self.auto_shutdown:
                if self.connected:
                    if self.timeout:
                        s.timeout = int(self.timeout - (time.time() - self.last_connect -1))
                        if self.file and self.file.cursor:
                             s.timeout = self.timeout
                        if s.timeout < 0: s.timeout = "Cerrando"
                    else:
                        s.timeout = "---"
                else:
                    if self.start_time and self.wait_time:
                        s.timeout = int(self.wait_time - (time.time() - self.start_time -1))
                        if s.timeout < 0: s.timeout = "Cerrando"
                    else:
                        s.timeout = "---"

            else:
                s.timeout = "Off"

            STATE_STR = ['En cola', 'Comprobando', 'Descargando metadata', \
                    'Descargando', 'Finalizado', 'Seeding', 'Allocating', 'Comprobando fastresume']

            s.str_state = STATE_STR[s.state]

            if self._ses.dht_state() is not None:
                s.dht_state = "On"
                s.dht_nodes=self._ses.status().dht_nodes
            else:
                s.dht_state = "Off"
                s.dht_nodes = 0

            s.trackers= len(self._th.trackers())

            s.dht_peers = 0
            s.trk_peers = 0
            s.pex_peers = 0
            s.lsd_peers = 0

            for peer in self._th.get_peer_info():
                if peer.source & 1:
                    s.trk_peers +=1
                if peer.source & 2:
                    s.dht_peers +=1
                if peer.source & 4:
                    s.pex_peers +=1
                if peer.source & 8:
                    s.lsd_peers +=1

            return s

    def print_status(self):
        s = self.status

        archivo = "N/D"
        percent = s.buffer

        if percent < 75: color= "33"
        if percent < 40: color= "31"
        if percent > 75: color= "32"

        buffer = "\033[%sm%s%s\033[39m" % ( color ,u"\u25A0" * (percent/10), u"\u25A1" *(10-percent/10))

        print '\r\033[39;m%.2f%% de %.1fMB %s | %.1f kB/s | #%s %s | AutoClose: %s | S: %d(%d) P: %d(%d)) | TRK: %d DHT: %d PEX: %d LSD %d | DHT:%s (%d) | Trackers: %d' % \
            (s.progress_file , s.file_size, s.str_state, s._download_rate ,archivo, buffer, s.timeout , s.num_seeds, s.num_complete, s.num_peers, s.num_incomplete, s.trk_peers,s.dht_peers, s.pex_peers, s.lsd_peers, s.dht_state, s.dht_nodes, s.trackers),
示例#57
0
    def __init__(self, url = None, port=None, ip=None, auto_shutdown=True, wait_time=35, timeout=15, auto_delete=False, temp_path=None, is_playing_fnc=None, print_status = False):

        #server
        self.port = port if port else random.randint(8000,8099)
        self.ip = ip if ip else "127.0.0.1"
        self.server = Server((self.ip,self.port), Handler, client=self)


        #Options
        self.temp_path = temp_path if temp_path else os.path.join(os.path.dirname(__file__),"tmp")
        self.is_playing_fnc = is_playing_fnc
        self.timeout = timeout
        self.auto_delete = auto_delete
        self.wait_time= wait_time
        self.auto_shutdown = auto_shutdown
        self.buffer_size = 10
        self.state_file="state"
        self.torrent_paramss={'save_path':self.temp_path,'storage_mode':lt.storage_mode_t.storage_mode_sparse}



        #State
        self.has_meta = False
        self.meta = None
        self.start_time = None
        self.last_connect = 0
        self.connected = False
        self.closed = False
        self.file = None
        self.files = None
        self._th=None


        #Sesion
        self._cache=Cache(self.temp_path)
        self._ses=lt.session()
        self._ses.listen_on(0,0)
        #Cargamos el archivo de estado (si existe)
        if os.path.exists(os.path.join(self.temp_path,self.state_file)):
            try:
                with open(os.path.join(self.temp_path,self.state_file), "rb") as f:
                    state=pickle.load(f)
                    self._ses.load_state(state)
            except:
                pass
                
        
        self._start_services()
        
        #Monitor & Dispatcher
        self._monitor= Monitor(self)
        if print_status:
            self._monitor.add_listener(self.print_status)
        self._monitor.add_listener(self._check_meta)
        self._monitor.add_listener(self.save_state)
        self._monitor.add_listener(self.priorize_start_file)
        self._monitor.add_listener(self.announce_torrent)
        
        if self.auto_shutdown:
            self._monitor.add_listener(self._auto_shutdown)
        self._dispatcher=Dispatcher(self)
        self._dispatcher.add_listener(self._update_ready_pieces)

        #Iniciamos la URL
        if url:
            self.start_url(url)
示例#58
0
    def __init__(self,
                 url=None,
                 port=None,
                 ip=None,
                 auto_shutdown=True,
                 wait_time=20,
                 timeout=5,
                 auto_delete=True,
                 temp_path=None,
                 is_playing_fnc=None,
                 print_status=False):

        # server
        if port:
            self.port = port
        else:
            self.port = random.randint(8000, 8099)
        if ip:
            self.ip = ip
        else:
            self.ip = "127.0.0.1"
        self.server = Server((self.ip, self.port), Handler, client=self)

        # Options
        if temp_path:
            self.temp_path = temp_path
        else:
            self.temp_path = DOWNLOAD_PATH
        self.is_playing_fnc = is_playing_fnc
        self.timeout = timeout
        self.auto_delete = auto_delete
        self.wait_time = wait_time
        self.auto_shutdown = auto_shutdown
        self.buffer_size = BUFFER
        self.first_pieces_priorize = BUFFER
        self.last_pieces_priorize = 5
        self.state_file = "state"
        try:
            self.torrent_paramss = {
                'save_path': self.temp_path,
                'storage_mode': lt.storage_mode_t.storage_mode_allocate
            }
        except Exception as e:
            try:
                do = xbmcgui.Dialog()
                e = e1 or e2
                do.ok(
                    config.get_localized_string(30035) + 'BT Libtorrent',
                    config.get_localized_string(30036),
                    config.get_localized_string(60015), str(e))
            except:
                pass
            return

        # State
        self.has_meta = False
        self.meta = None
        self.start_time = None
        self.last_connect = 0
        self.connected = False
        self.closed = False
        self.file = None
        self.files = None
        self._th = None
        self.seleccion = 0
        self.index = 0

        # Sesion
        self._cache = Cache(self.temp_path)
        self._ses = lt.session()
        #self._ses.listen_on(0, 0)                                              ### ALFA: it blocks repro of some .torrents
        # Cargamos el archivo de estado (si existe)
        """                                                                     ### ALFA: it blocks repro of some .torrents
        if os.path.exists(os.path.join(self.temp_path, self.state_file)):
            try:
                f = open(os.path.join(self.temp_path, self.state_file), "rb")
                state = pickle.load(f)
                self._ses.load_state(state)
                f.close()
            except:
                pass
        """

        self._start_services()

        # Monitor & Dispatcher
        self._monitor = Monitor(self)
        if print_status:
            self._monitor.add_listener(self.print_status)
        self._monitor.add_listener(self._check_meta)
        self._monitor.add_listener(self.save_state)
        self._monitor.add_listener(self.priorize_start_file)
        self._monitor.add_listener(self.announce_torrent)

        if self.auto_shutdown:
            self._monitor.add_listener(self._auto_shutdown)

        self._dispatcher = Dispatcher(self)
        self._dispatcher.add_listener(self._update_ready_pieces)

        # Iniciamos la URL
        if url:
            self.start_url(url)
示例#59
0
def main():
    if len(sys.argv) != 7:
        print 'usage: sim_cache.py <BLOCKSIZE> <SIZE> <ASSOC> <REPLACEMENT_POLICY> <WRITE_POLICY> <trace_file>'
        print '<BLOCKSIZE>          Block size in bytes. Positive Integer, Power of two'
        print '<SIZE>               Total CACHE size in bytes. Positive Integer'
        print '<ASSOC>              Associativity, 1 if direct mapped, N if fully associative'
        print '<REPLACEMENT_POLICY> 0 for LRU, 1 for LFU'
        print '<WRITE_POLICY>       0 for WBWA, 1 for WTNA'
        print 'Example: 8KB 4-way set-associative cache with 32B block size, LRU replacement policy and WTNA write policy, gcc_trace input file'
        print 'Command: $ python cache_sim.py 4 32 4 0 1 gcc_trace.txt'
        # print 'Command: $ ./sim_cache 32 8192 4 0 1 gcc_trace'
        sys.exit(1)
    else:
        blockSize = int(sys.argv[1])
        cacheSize = int(sys.argv[2])
        associativity = int(sys.argv[3])
        replacementPolicy = int(sys.argv[4])
        writePolicy = int(sys.argv[5])
        fileName = sys.argv[6]

        def printContents(cache):
            print ' ', '{:=^35}'.format(' Simulator configuration ')
            print '{: <24}'.format('  L1_BLOCKSIZE:'), '{: >12}'.format(
                cache.config['blockSize'])
            print '{: <24}'.format('  L1_SIZE:'), '{: >12}'.format(
                cache.config['cacheSize'])
            print '{: <24}'.format('  L1_ASSOC:'), '{: >12}'.format(
                cache.config['associativity'])
            print '{: <24}'.format(
                '  L1_REPLACEMENT_POLICY:'), '{: >12}'.format(
                    cache.config['replacementPolicy'])
            print '{: <24}'.format('  L1_WRITE_POLICY:'), '{: >12}'.format(
                cache.config['writePolicy'])
            print '{: <24}'.format('  trace_file:'), '{: >12}'.format(fileName)
            print '{:=<37}'.format('  ')
            print ""
            print "===== L1 contents ====="
            for row in xrange(cache.rows):
                print "set%d:\t" % (row),
                for col in xrange(cache.cols):
                    val = hex(cache.TAG_MAT[row][col])
                    print "%s" % (val[2:len(val)]),
                    if cache.DIRTY_MAT[row][col] == 1:
                        print "D \t",
                    else:
                        print "\t",
                print ""
            print ""
            print ' ', '{:=^38}'.format(' Simulation results (raw) ')
            print '{: <31}'.format(
                '  a. number of L1 reads:'), '{: >8}'.format(
                    cache.stats['Reads'])
            print '{: <31}'.format(
                '  b. number of L1 read misses:'), '{: >8}'.format(
                    cache.stats['ReadMisses'])
            print '{: <31}'.format(
                '  c. number of L1 writes:'), '{: >8}'.format(
                    cache.stats['Writes'])
            print '{: <31}'.format(
                '  d. number of L1 write misses:'), '{: >8}'.format(
                    cache.stats['WriteMisses'])
            totalMisses = cache.stats['ReadMisses'] + cache.stats['WriteMisses']
            totalAccesses = cache.stats['Reads'] + cache.stats['Writes']
            missRate = totalMisses / float(totalAccesses)
            print '{: <31}'.format('  e. L1 miss rate:'), '{: >8}'.format(
                '%.4f' % missRate)
            print '{: <31}'.format(
                '  f. number of writebacks from L1:'), '{: >5}'.format(
                    cache.stats['WriteBacks'])
            print '{: <31}'.format(
                '  g. total memory traffic:'), '{: >8}'.format(
                    cache.stats['MemTraffic'])
            print ""
            print ' ', '{:=^42}'.format(' Simulation results (performance) ')
            # FIXME: logic for access time
            # L1 Cache Hit Time(in ns) = 0.25ns + 2.5ns * (L1_Cache Size / 512kB) + 0.025ns * (L1_BLOCKSIZE / 16B) + 0.025ns * L1_SET_ASSOCIATIVITY
            # L1 miss penalty(in ns) = 20 ns + 0.5 * (L1_BLOCKSIZE / 16 B / ns))
            # avg_access_time = (l1_hit_time + (l1_miss_rate * (miss_penalty))
            hitTime = 0.25 + (2.5 * (cache.config['cacheSize'] / (512 * 1024))) + \
                (0.025 * (cache.config['blockSize'] / 16)) + (0.025 * cache.config['associativity'])
            missPenalty = 20 + 0.5 * (cache.config['blockSize'] / 16)
            AAT = hitTime + (missRate * missPenalty)
            print '{: <23}'.format(
                '  1. average access time:'), '{: >18}'.format('%.4f ns' %
                                                               AAT),
            sys.stdout.flush()

        # L1 cache instantiation & initialization
        L1 = Cache(blockSize, cacheSize, associativity, replacementPolicy,
                   writePolicy)
        # parsing trace file
        instr_list = traceParse(fileName)
        for i in instr_list:
            if i[0] is 'r':
                L1.readFromAddress(i[1])
            elif i[0] is 'w':
                L1.writeToAddress(i[1])
        printContents(L1)
示例#60
0
文件: app.py 项目: n28div/itAIRQ
import threading
import re
import logging
from flask_apscheduler import APScheduler
from datetime import datetime

logging.basicConfig(level=logging.INFO,
                    format='%(asctime)s %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

app = Flask(__name__)
app.secret_key = settings.flask['SECRET_KEY']
# Setup cors header
CORS(app)

cache = Cache()
fetcher = Fetcher(cache)


def absolute_url_for(url_name: str, date: datetime, **values) -> str:
    """
    Calculate the absolute url for url_name joined with values

    :param url_name: The url name
    :param **values: The values required by url_for
    :return: The absolute url
    """
    with app.app_context():
        day = date.strftime('%d')
        month = date.strftime('%m')
        year = date.strftime('%Y')