def __init__(self, keypair=None):
        """ Constructor for LIVE_AUTHMETHOD_ECDSA authentication of the
        live source. If no keypair is specified, one is generated.

        @param keypair  (Optional) An M2Crypto.EC keypair.
        """
        LiveSourceAuthConfig.__init__(self, LIVE_AUTHMETHOD_ECDSA)
        if keypair is None:
            self.keypair = permidmod.generate_keypair()
        else:
            self.keypair = keypair
 def __init__(self,keypair=None):
     """ Constructor for LIVE_AUTHMETHOD_ECDSA authentication of the 
     live source. If no keypair is specified, one is generated.
     
     @param keypair  (Optional) An M2Crypto.EC keypair.
     """
     LiveSourceAuthConfig.__init__(self,LIVE_AUTHMETHOD_ECDSA)
     if keypair is None:
         self.keypair = generate_keypair()
     else:
         self.keypair = keypair
def generatePermIds(numOfPermids):
    permids = list()
    keypair = generate_keypair()
    
    #two equal permids for ease of testing
    permids.append((str(keypair.pub().get_der()), keypair))
    permids.append((str(keypair.pub().get_der()), keypair))
#    for i in range(numOfPermids):
#        keypair = generate_keypair()
#        permids.append((str(keypair.pub().get_der()), keypair))
    return permids
Beispiel #4
0
def generate_cs_keypair(keypair_filename=None, pubkey_filename=None):
    """
    Generate a keypair suitable for a Closed Swarm
    
    Saves to the given files if specified, returns keypair, pubkey
    """
    keypair = permid.generate_keypair()
    if keypair_filename:
        permid.save_keypair(keypair, keypair_filename)

    pubkey = encodestring(str(keypair.pub().get_der())).replace("\n", "")
    if pubkey_filename:
        permid.save_pub_key(keypair, pubkey_filename)

    return keypair, pubkey
def generate_cs_keypair(keypair_filename=None, pubkey_filename=None):
    """
    Generate a keypair suitable for a Closed Swarm
    
    Saves to the given files if specified, returns keypair, pubkey
    """
    keypair = permid.generate_keypair()
    if keypair_filename:
        permid.save_keypair(keypair, keypair_filename)

    pubkey = encodestring(str(keypair.pub().get_der())).replace("\n","")
    if pubkey_filename:
        permid.save_pub_key(keypair, pubkey_filename)
    
    return keypair, pubkey
    def setUpPostSession(self):
        """ override TestAsServer """
        TestAsServer.setUpPostSession(self)

        self.mypermid = str(self.my_keypair.pub().get_der())
        self.hispermid = str(self.his_keypair.pub().get_der())
        
        self.another_keypair = generate_keypair()
        self.anotherpermid = str(self.another_keypair.pub().get_der())
        
        self.testInfohash = hashlib.sha1("yoman!").digest()
        
        #copy subtitles in the collecting dir
        nldName = SubUtils.getSubtitleFileRelativeName(self.anotherpermid, self.testInfohash, "nld")
        engName = SubUtils.getSubtitleFileRelativeName(self.anotherpermid, self.testInfohash, "eng")
        
        self.sub1 = os.path.join(self.collecting_dir, nldName)
        self.sub2 = os.path.join(self.collecting_dir, engName)
        
        shutil.copyfile(self.src1, self.sub1)
    def setUp(self):
        self.keyfiles = [
            ".node_a_keypair", ".node_b_keypair", ".torrent_keypair"
        ]
        for filename in self.keyfiles:
            if not os.path.exists(filename):
                keypair = permid.generate_keypair()
                permid.save_keypair(keypair, filename)

        self.node_a_keypair = permid.read_keypair(".node_a_keypair")
        self.node_b_keypair = permid.read_keypair(".node_b_keypair")
        self.torrent_keypair = permid.read_keypair(".torrent_keypair")

        self.torrent_id = "1234"

        # Shortcuts
        self.node_a_pub_permid = str(self.node_a_keypair.pub().get_der())
        self.node_b_pub_permid = str(self.node_b_keypair.pub().get_der())
        self.torrent_pubkeys = [
            encodestring(str(self.torrent_keypair.pub().get_der())).replace(
                "\n", "")
        ]

        # Create the certificate for this torrent ("proof of access")
        self.poa_a = ClosedSwarm.create_poa(self.torrent_id,
                                            self.torrent_keypair,
                                            self.node_a_pub_permid)

        self.poa_b = ClosedSwarm.create_poa(self.torrent_id,
                                            self.torrent_keypair,
                                            self.node_b_pub_permid)

        self.cs_a = ClosedSwarm.ClosedSwarm(self.node_a_keypair,
                                            self.torrent_id,
                                            self.torrent_pubkeys, self.poa_a)

        self.cs_b = ClosedSwarm.ClosedSwarm(self.node_b_keypair,
                                            self.torrent_id,
                                            self.torrent_pubkeys, self.poa_b)
Beispiel #8
0
    def __init__(self, availableLangs, infohash = None):

        self._keypair = generate_keypair()

        self._permId = str(self._keypair.pub().get_der())

        if infohash == None :
            hasher = hashlib.sha1()
            hasher.update(self._permId + "a")
            infohash = hasher.digest()

        self.channel = self._permId
        self.infohash = infohash
        self.description = u""
        self.resetTimestamp()
        self._subtitles = {}

        hasher = hashlib.sha1() #fake checksums for subs

        for lang in availableLangs:
            hasher.update(lang + "123")
            checksum = hasher.digest()
            self.addSubtitle(SubtitleInfo(lang, None, checksum))
Beispiel #9
0
 def __init__(self, availableLangs, infohash = None):
     
     self._keypair = generate_keypair()
     
     self._permId = str(self._keypair.pub().get_der())
     
     if infohash == None :
         hasher = hashlib.sha1()
         hasher.update(self._permId + "a")
         infohash = hasher.digest()
     
     self.channel = self._permId
     self.infohash = infohash
     self.description = u""
     self.resetTimestamp()
     self._subtitles = {}
     
     hasher = hashlib.sha1() #fake checksums for subs
     
     for lang in availableLangs:
         hasher.update(lang + "123")
         checksum = hasher.digest()
         self.addSubtitle(SubtitleInfo(lang, None, checksum))
Beispiel #10
0
    def setUp(self):
        self.keyfiles = [".node_a_keypair",".node_b_keypair",".torrent_keypair"]
        for filename in self.keyfiles:
            if not os.path.exists(filename):
                keypair = permid.generate_keypair()
                permid.save_keypair(keypair, filename)
                
        self.node_a_keypair = permid.read_keypair(".node_a_keypair")
        self.node_b_keypair = permid.read_keypair(".node_b_keypair")
        self.torrent_keypair = permid.read_keypair(".torrent_keypair")

        self.torrent_id = "1234"

        # Shortcuts
        self.node_a_pub_permid = str(self.node_a_keypair.pub().get_der())
        self.node_b_pub_permid = str(self.node_b_keypair.pub().get_der())
        self.torrent_pubkeys = [encodestring(str(self.torrent_keypair.pub().get_der())).replace("\n","")]
        
        # Create the certificate for this torrent ("proof of access")
        self.poa_a = ClosedSwarm.create_poa(self.torrent_id,
                                            self.torrent_keypair,
                                            self.node_a_pub_permid)

        self.poa_b = ClosedSwarm.create_poa(self.torrent_id,
                                            self.torrent_keypair,
                                            self.node_b_pub_permid)
        
        self.cs_a = ClosedSwarm.ClosedSwarm(self.node_a_keypair,
                                            self.torrent_id,
                                            self.torrent_pubkeys,
                                            self.poa_a)
        
        self.cs_b = ClosedSwarm.ClosedSwarm(self.node_b_keypair,
                                            self.torrent_id,
                                            self.torrent_pubkeys,
                                            self.poa_b)
    def setUpPostSession(self):
        """ override TestAsServer """
        TestAsServer.setUpPostSession(self)

        self.mypermid = str(self.my_keypair.pub().get_der())
        self.hispermid = str(self.his_keypair.pub().get_der())

        self.another_keypair = generate_keypair()
        self.anotherpermid = str(self.another_keypair.pub().get_der())

        self.testInfohash = hashlib.sha1("yoman!").digest()

        #copy subtitles in the collecting dir
        nldName = SubUtils.getSubtitleFileRelativeName(self.anotherpermid,
                                                       self.testInfohash,
                                                       "nld")
        engName = SubUtils.getSubtitleFileRelativeName(self.anotherpermid,
                                                       self.testInfohash,
                                                       "eng")

        self.sub1 = os.path.join(self.collecting_dir, nldName)
        self.sub2 = os.path.join(self.collecting_dir, engName)

        shutil.copyfile(self.src1, self.sub1)
Beispiel #12
0
    def __init__(self,scfg=None,ignore_singleton=False):
        """
        A Session object is created which is configured following a copy of the
        SessionStartupConfig scfg. (copy constructor used internally)
        
        @param scfg SessionStartupConfig object or None, in which case we
        look for a saved session in the default location (state dir). If
        we can't find it, we create a new SessionStartupConfig() object to 
        serve as startup config. Next, the config is saved in the directory
        indicated by its 'state_dir' attribute.
        
        In the current implementation only a single session instance can exist
        at a time in a process. The ignore_singleton flag is used for testing.
        """
        
        # ProxyService 90s Test_
#        self.start_time = time.time()
        # _ProxyService 90s Test
        
        if not ignore_singleton:
            if Session.__single:
                raise RuntimeError, "Session is singleton"
            Session.__single = self
        
        self.sesslock = NoDispersyRLock()

        # Determine startup config to use
        if scfg is None: # If no override
            try:
                # Then try to read from default location
                state_dir = Session.get_default_state_dir()
                cfgfilename = Session.get_default_config_filename(state_dir)
                scfg = SessionStartupConfig.load(cfgfilename)
            except:
                # If that fails, create a fresh config with factory defaults
                print_exc()
                scfg = SessionStartupConfig()
            self.sessconfig = scfg.sessconfig
        else: # overrides any saved config
            # Work from copy
            self.sessconfig = copy.copy(scfg.sessconfig)
            
        #Niels: 11/05/2012, turning off overlay
        self.sessconfig['overlay'] = 0
        self.sessconfig['crawler'] = 0
        
        # Create dir for session state, if not exist    
        state_dir = self.sessconfig['state_dir']
        if state_dir is None:
            state_dir = Session.get_default_state_dir()
            self.sessconfig['state_dir'] = state_dir
            
        if not os.path.isdir(state_dir):
            os.makedirs(state_dir)

        collected_torrent_dir = self.sessconfig['torrent_collecting_dir']
        if not collected_torrent_dir:
            collected_torrent_dir = os.path.join(self.sessconfig['state_dir'], STATEDIR_TORRENTCOLL_DIR)
            self.sessconfig['torrent_collecting_dir'] = collected_torrent_dir
            
        collected_subtitles_dir = self.sessconfig.get('subtitles_collecting_dir',None)
        if not collected_subtitles_dir:
            collected_subtitles_dir = os.path.join(self.sessconfig['state_dir'], STATEDIR_SUBSCOLL_DIR)
            self.sessconfig['subtitles_collecting_dir'] = collected_subtitles_dir
            
        if not os.path.exists(collected_torrent_dir):
            os.makedirs(collected_torrent_dir)
            
        if not self.sessconfig['peer_icon_path']:
            self.sessconfig['peer_icon_path'] = os.path.join(self.sessconfig['state_dir'], STATEDIR_PEERICON_DIR)
            
        # PERHAPS: load default TorrentDef and DownloadStartupConfig from state dir
        # Let user handle that, he's got default_state_dir, etc.

        # Core init
        #print >>sys.stderr,'Session: __init__ config is', self.sessconfig

        if GOTM2CRYPTO:
            permidmod.init()

            #
            # Set params that depend on state_dir
            #
            # 1. keypair
            #
            pairfilename = os.path.join(self.sessconfig['state_dir'],'ec.pem')
            if self.sessconfig['eckeypairfilename'] is None:
                self.sessconfig['eckeypairfilename'] = pairfilename
            
            if os.access(self.sessconfig['eckeypairfilename'],os.F_OK):
                # May throw exceptions
                self.keypair = permidmod.read_keypair(self.sessconfig['eckeypairfilename'])
            else:
                self.keypair = permidmod.generate_keypair()

                # Save keypair
                pubfilename = os.path.join(self.sessconfig['state_dir'],'ecpub.pem')
                permidmod.save_keypair(self.keypair,pairfilename)
                permidmod.save_pub_key(self.keypair,pubfilename)
        
        # 2. Downloads persistent state dir
        dlpstatedir = os.path.join(self.sessconfig['state_dir'],STATEDIR_DLPSTATE_DIR)
        if not os.path.isdir(dlpstatedir):
            os.mkdir(dlpstatedir)
        
        # 3. tracker
        trackerdir = self.get_internal_tracker_dir()
        if not os.path.exists(trackerdir):
            os.mkdir(trackerdir)

        if self.sessconfig['tracker_dfile'] is None:
            self.sessconfig['tracker_dfile'] = os.path.join(trackerdir,'tracker.db')    

        if self.sessconfig['tracker_allowed_dir'] is None:
            self.sessconfig['tracker_allowed_dir'] = trackerdir    
        
        if self.sessconfig['tracker_logfile'] is None:
            if sys.platform == "win32":
                # Not "Nul:" but "nul" is /dev/null on Win32
                sink = 'nul'
            else:
                sink = '/dev/null'
            self.sessconfig['tracker_logfile'] = sink

        # 4. superpeer.txt and crawler.txt
        if self.sessconfig['superpeer_file'] is None:
            self.sessconfig['superpeer_file'] = os.path.join(self.sessconfig['install_dir'],LIBRARYNAME,'Core','superpeer.txt')
        if 'crawler_file' not in self.sessconfig or self.sessconfig['crawler_file'] is None:
            self.sessconfig['crawler_file'] = os.path.join(self.sessconfig['install_dir'], LIBRARYNAME,'Core','Statistics','crawler.txt')

        # 5. peer_icon_path
        if self.sessconfig['peer_icon_path'] is None:
            self.sessconfig['peer_icon_path'] = os.path.join(self.sessconfig['state_dir'],STATEDIR_PEERICON_DIR)
            if not os.path.isdir(self.sessconfig['peer_icon_path']):
                os.mkdir(self.sessconfig['peer_icon_path'])

        # 6. Poor man's versioning of SessionConfig, add missing
        # default values. Really should use PERSISTENTSTATE_CURRENTVERSION 
        # and do conversions.
        for key,defvalue in sessdefaults.iteritems():
            if key not in self.sessconfig:
                self.sessconfig[key] = defvalue

        # 7. proxyservice_dir
        if self.sessconfig['overlay']: #NIELS: proxyservice_on/off is set at runtime, always make sure proxyservice_ and self.sessconfig['proxyservice_status'] == PROXYSERVICE_ON:
            if self.sessconfig['proxyservice_dir'] is None:
                self.sessconfig['proxyservice_dir'] = os.path.join(get_default_dest_dir(), PROXYSERVICE_DESTDIR)
            # Jelle: under linux, default_dest_dir can be /tmp. Then proxyservice_dir can be deleted in between
            # sessions.
            if not os.path.isdir(self.sessconfig['proxyservice_dir']):
                os.makedirs(self.sessconfig['proxyservice_dir'])

        if not 'live_aux_seeders' in self.sessconfig:
            # Poor man's versioning, really should update PERSISTENTSTATE_CURRENTVERSION
            self.sessconfig['live_aux_seeders'] = sessdefaults['live_aux_seeders']

        if not 'nat_detect' in self.sessconfig:
            self.sessconfig['nat_detect'] = sessdefaults['nat_detect']
        if not 'puncturing_internal_port' in self.sessconfig:
            self.sessconfig['puncturing_internal_port'] = sessdefaults['puncturing_internal_port']
        if not 'stun_servers' in self.sessconfig:
            self.sessconfig['stun_servers'] = sessdefaults['stun_servers']
        if not 'pingback_servers' in self.sessconfig:
            self.sessconfig['pingback_servers'] = sessdefaults['pingback_servers']
        if not 'mainline_dht' in self.sessconfig:
            self.sessconfig['mainline_dht'] = sessdefaults['mainline_dht']

        # SWIFTPROC
        if self.sessconfig['swiftpath'] is None:
            if sys.platform == "win32":
                self.sessconfig['swiftpath'] = os.path.join(self.sessconfig['install_dir'],"swift.exe")
            else:
                self.sessconfig['swiftpath'] = os.path.join(self.sessconfig['install_dir'],"swift")

        # Checkpoint startup config
        self.save_pstate_sessconfig()

        # Create handler for calling back the user via separate threads
        self.uch = UserCallbackHandler(self)

        # Create engine with network thread
        self.lm = TriblerLaunchMany()
        self.lm.register(self,self.sesslock)
        self.lm.start()
Beispiel #13
0
    def __init__(self,scfg=None,ignore_singleton=False):
        """
        A Session object is created which is configured following a copy of the
        SessionStartupConfig scfg. (copy constructor used internally)
        
        @param scfg SessionStartupConfig object or None, in which case we
        look for a saved session in the default location (state dir). If
        we can't find it, we create a new SessionStartupConfig() object to 
        serve as startup config. Next, the config is saved in the directory
        indicated by its 'state_dir' attribute.
        
        In the current implementation only a single session instance can exist
        at a time in a process. The ignore_singleton flag is used for testing.
        """
        if not ignore_singleton:
            if Session.__single:
                raise RuntimeError, "Session is singleton"
            Session.__single = self
        
        self.sesslock = RLock()

        # Determine startup config to use
        if scfg is None: # If no override
            try:
                # Then try to read from default location
                state_dir = Session.get_default_state_dir()
                cfgfilename = Session.get_default_config_filename(state_dir)
                scfg = SessionStartupConfig.load(cfgfilename)
            except:
                # If that fails, create a fresh config with factory defaults
                print_exc()
                scfg = SessionStartupConfig()
            self.sessconfig = scfg.sessconfig
        else: # overrides any saved config
            # Work from copy
            self.sessconfig = copy.copy(scfg.sessconfig)
        
        # Create dir for session state, if not exist    
        state_dir = self.sessconfig['state_dir']
        if state_dir is None:
            state_dir = Session.get_default_state_dir()
            self.sessconfig['state_dir'] = state_dir
            
        if not os.path.isdir(state_dir):
            os.makedirs(state_dir)

        collected_torrent_dir = self.sessconfig['torrent_collecting_dir']
        if not collected_torrent_dir:
            collected_torrent_dir = os.path.join(self.sessconfig['state_dir'], STATEDIR_TORRENTCOLL_DIR)
            self.sessconfig['torrent_collecting_dir'] = collected_torrent_dir
            
        if not os.path.exists(collected_torrent_dir):
            os.makedirs(collected_torrent_dir)
            
        if not self.sessconfig['peer_icon_path']:
            self.sessconfig['peer_icon_path'] = os.path.join(self.sessconfig['state_dir'], STATEDIR_PEERICON_DIR)
            
        # PERHAPS: load default TorrentDef and DownloadStartupConfig from state dir
        # Let user handle that, he's got default_state_dir, etc.

        # Core init
        #print >>sys.stderr,time.asctime(),'-', 'Session: __init__ config is', self.sessconfig

        if GOTM2CRYPTO:
            permidmod.init()

            #
            # Set params that depend on state_dir
            #
            # 1. keypair
            #
            pairfilename = os.path.join(self.sessconfig['state_dir'],'ec.pem')
            if self.sessconfig['eckeypairfilename'] is None:
                self.sessconfig['eckeypairfilename'] = pairfilename
            
            if os.access(self.sessconfig['eckeypairfilename'],os.F_OK):
                # May throw exceptions
                self.keypair = permidmod.read_keypair(self.sessconfig['eckeypairfilename'])
            else:
                self.keypair = permidmod.generate_keypair()

                # Save keypair
                pubfilename = os.path.join(self.sessconfig['state_dir'],'ecpub.pem')
                permidmod.save_keypair(self.keypair,pairfilename)
                permidmod.save_pub_key(self.keypair,pubfilename)
        
        # 2. Downloads persistent state dir
        dlpstatedir = os.path.join(self.sessconfig['state_dir'],STATEDIR_DLPSTATE_DIR)
        if not os.path.isdir(dlpstatedir):
            os.mkdir(dlpstatedir)
        
        # 3. tracker
        trackerdir = self.get_internal_tracker_dir()
        if not os.path.isdir(trackerdir):
            os.mkdir(trackerdir)

        if self.sessconfig['tracker_dfile'] is None:
            self.sessconfig['tracker_dfile'] = os.path.join(trackerdir,'tracker.db')    

        if self.sessconfig['tracker_allowed_dir'] is None:
            self.sessconfig['tracker_allowed_dir'] = trackerdir    
        
        if self.sessconfig['tracker_logfile'] is None:
            if sys.platform == "win32":
                # Not "Nul:" but "nul" is /dev/null on Win32
                sink = 'nul'
            else:
                sink = '/dev/null'
            self.sessconfig['tracker_logfile'] = sink

        # 4. superpeer.txt and crawler.txt
        if self.sessconfig['superpeer_file'] is None:
            self.sessconfig['superpeer_file'] = os.path.join(self.sessconfig['install_dir'],LIBRARYNAME,'Core','superpeer.txt')
        if 'crawler_file' not in self.sessconfig or self.sessconfig['crawler_file'] is None:
            self.sessconfig['crawler_file'] = os.path.join(self.sessconfig['install_dir'], LIBRARYNAME,'Core','Statistics','crawler.txt')

        # 5. download_help_dir
        if self.sessconfig['overlay'] and self.sessconfig['download_help']:
            if self.sessconfig['download_help_dir'] is None:
                self.sessconfig['download_help_dir'] = os.path.join(get_default_dest_dir(),DESTDIR_COOPDOWNLOAD)
            # Jelle: under linux, default_dest_dir can be /tmp. Then download_help_dir can be deleted inbetween
            # sessions.
            if not os.path.isdir(self.sessconfig['download_help_dir']):
                os.makedirs(self.sessconfig['download_help_dir'])

        # 6. peer_icon_path
        if self.sessconfig['peer_icon_path'] is None:
            self.sessconfig['peer_icon_path'] = os.path.join(self.sessconfig['state_dir'],STATEDIR_PEERICON_DIR)
            if not os.path.isdir(self.sessconfig['peer_icon_path']):
                os.mkdir(self.sessconfig['peer_icon_path'])

        # 7. Poor man's versioning of SessionConfig, add missing
        # default values. Really should use PERSISTENTSTATE_CURRENTVERSION 
        # and do conversions.
        for key,defvalue in sessdefaults.iteritems():
            if key not in self.sessconfig:
                self.sessconfig[key] = defvalue

        if not 'live_aux_seeders' in self.sessconfig:
            # Poor man's versioning, really should update PERSISTENTSTATE_CURRENTVERSION
            self.sessconfig['live_aux_seeders'] = sessdefaults['live_aux_seeders']

        if not 'nat_detect' in self.sessconfig:
            self.sessconfig['nat_detect'] = sessdefaults['nat_detect']
        if not 'puncturing_internal_port' in self.sessconfig:
            self.sessconfig['puncturing_internal_port'] = sessdefaults['puncturing_internal_port']
        if not 'stun_servers' in self.sessconfig:
            self.sessconfig['stun_servers'] = sessdefaults['stun_servers']
        if not 'pingback_servers' in self.sessconfig:
            self.sessconfig['pingback_servers'] = sessdefaults['pingback_servers']
        if not 'mainline_dht' in self.sessconfig:
            self.sessconfig['mainline_dht'] = sessdefaults['mainline_dht']
            
        # Checkpoint startup config
        self.save_pstate_sessconfig()

        # Create handler for calling back the user via separate threads
        self.uch = UserCallbackHandler(self)

        # Create engine with network thread
        self.lm = TriblerLaunchMany()
        self.lm.register(self,self.sesslock)
        self.lm.start()
from __future__ import with_statement
from Tribler.Core.Overlay.permid import generate_keypair
from Tribler.Test.Core.Subtitles.simple_mocks import  \
    MockOverlayBridge, MockSubsMsgHander, MockMetadataDBHandler, MockSession
from Tribler.Core.Utilities.Crypto import sha
from Tribler.Core.Subtitles.MetadataDomainObjects.Languages import LanguagesProvider
import logging
import os
import unittest
import codecs
from Tribler.Core.Subtitles.SubtitlesHandler import SubtitlesHandler,\
    getSubtitleFileRelativeName
from Tribler.Core.Overlay.SecureOverlay import OLPROTO_VER_FOURTEENTH

logging.basicConfig(level=logging.DEBUG)
_keypairs = (generate_keypair(), generate_keypair(), generate_keypair())
testChannelId = str(_keypairs[0].pub().get_der())
testDestPermId = str(_keypairs[1].pub().get_der())
testMyPermId = str(_keypairs[2].pub().get_der())

testInfohash = sha("yoman!").digest()

RES_DIR = os.path.join('..', '..', 'subtitles_test_res')


class TestSubtitlesHandler(unittest.TestCase):
    def setUp(self):

        self._session = MockSession()
        self.ol_bridge = MockOverlayBridge()
        self.rmdDBHandler = MockMetadataDBHandler()
import unittest
import logging
import time
from struct import pack
from Tribler.Core.Subtitles.SubtitleHandler.SubsMessageHandler import SubsMessageHandler
from Tribler.Test.Core.Subtitles.simple_mocks import MockOverlayBridge, MockTokenBucket, MockMsgListener
from Tribler.Core.Overlay.permid import generate_keypair
from Tribler.Core.Utilities.Crypto import sha
from Tribler.Core.Subtitles.MetadataDomainObjects.Languages import LanguagesProvider
from Tribler.Core.MessageID import GET_SUBS, SUBS
from Tribler.Core.Utilities.bencode import bencode, bdecode
from Tribler.Core.Overlay.SecureOverlay import OLPROTO_VER_FOURTEENTH


logging.basicConfig(level=logging.DEBUG)
_keypairs = (generate_keypair(), generate_keypair(), generate_keypair())
testChannelId = str(_keypairs[0].pub().get_der())
testDestPermId = str(_keypairs[1].pub().get_der())
testMyPermId = str(_keypairs[2].pub().get_der())

testInfohash = sha("yoman!").digest()

class TestSubtitlesMsgHandlerIsolation(unittest.TestCase):
    
    def setUp(self):
        self.ol_bridge = MockOverlayBridge()
        self.tokenBucket = MockTokenBucket()
        self.underTest = SubsMessageHandler(self.ol_bridge,self.tokenBucket,1000000)
        
    def test_addToRequestedSubtitles(self):
        langUtil = LanguagesProvider.getLanguagesInstance()
Beispiel #16
0
    def __init__(self, scfg=None, ignore_singleton=False):
        """
        A Session object is created which is configured following a copy of the
        SessionStartupConfig scfg. (copy constructor used internally)
        
        @param scfg SessionStartupConfig object or None, in which case we
        look for a saved session in the default location (state dir). If
        we can't find it, we create a new SessionStartupConfig() object to 
        serve as startup config. Next, the config is saved in the directory
        indicated by its 'state_dir' attribute.
        
        In the current implementation only a single session instance can exist
        at a time in a process. The ignore_singleton flag is used for testing.
        """

        # ProxyService 90s Test_
        #        self.start_time = time.time()
        # _ProxyService 90s Test

        if not ignore_singleton:
            if Session.__single:
                raise RuntimeError, "Session is singleton"
            Session.__single = self

        self.sesslock = RLock()

        # Determine startup config to use
        if scfg is None:  # If no override
            try:
                # Then try to read from default location
                state_dir = Session.get_default_state_dir()
                cfgfilename = Session.get_default_config_filename(state_dir)
                scfg = SessionStartupConfig.load(cfgfilename)
            except:
                # If that fails, create a fresh config with factory defaults
                print_exc()
                scfg = SessionStartupConfig()
            self.sessconfig = scfg.sessconfig
        else:  # overrides any saved config
            # Work from copy
            self.sessconfig = copy.copy(scfg.sessconfig)

        # Create dir for session state, if not exist
        state_dir = self.sessconfig["state_dir"]
        if state_dir is None:
            state_dir = Session.get_default_state_dir()
            self.sessconfig["state_dir"] = state_dir

        if not os.path.isdir(state_dir):
            os.makedirs(state_dir)

        collected_torrent_dir = self.sessconfig["torrent_collecting_dir"]
        if not collected_torrent_dir:
            collected_torrent_dir = os.path.join(self.sessconfig["state_dir"], STATEDIR_TORRENTCOLL_DIR)
            self.sessconfig["torrent_collecting_dir"] = collected_torrent_dir

        collected_subtitles_dir = self.sessconfig.get("subtitles_collecting_dir", None)
        if not collected_subtitles_dir:
            collected_subtitles_dir = os.path.join(self.sessconfig["state_dir"], STATEDIR_SUBSCOLL_DIR)
            self.sessconfig["subtitles_collecting_dir"] = collected_subtitles_dir

        if not os.path.exists(collected_torrent_dir):
            os.makedirs(collected_torrent_dir)

        if not self.sessconfig["peer_icon_path"]:
            self.sessconfig["peer_icon_path"] = os.path.join(self.sessconfig["state_dir"], STATEDIR_PEERICON_DIR)

        # PERHAPS: load default TorrentDef and DownloadStartupConfig from state dir
        # Let user handle that, he's got default_state_dir, etc.

        # Core init
        # print >>sys.stderr,'Session: __init__ config is', self.sessconfig

        if GOTM2CRYPTO:
            permidmod.init()

            #
            # Set params that depend on state_dir
            #
            # 1. keypair
            #
            pairfilename = os.path.join(self.sessconfig["state_dir"], "ec.pem")
            if self.sessconfig["eckeypairfilename"] is None:
                self.sessconfig["eckeypairfilename"] = pairfilename

            if os.access(self.sessconfig["eckeypairfilename"], os.F_OK):
                # May throw exceptions
                self.keypair = permidmod.read_keypair(self.sessconfig["eckeypairfilename"])
            else:
                self.keypair = permidmod.generate_keypair()

                # Save keypair
                pubfilename = os.path.join(self.sessconfig["state_dir"], "ecpub.pem")
                permidmod.save_keypair(self.keypair, pairfilename)
                permidmod.save_pub_key(self.keypair, pubfilename)

        # 2. Downloads persistent state dir
        dlpstatedir = os.path.join(self.sessconfig["state_dir"], STATEDIR_DLPSTATE_DIR)
        if not os.path.isdir(dlpstatedir):
            os.mkdir(dlpstatedir)

        # 3. tracker
        trackerdir = self.get_internal_tracker_dir()
        if not os.path.isdir(trackerdir):
            os.mkdir(trackerdir)

        if self.sessconfig["tracker_dfile"] is None:
            self.sessconfig["tracker_dfile"] = os.path.join(trackerdir, "tracker.db")

        if self.sessconfig["tracker_allowed_dir"] is None:
            self.sessconfig["tracker_allowed_dir"] = trackerdir

        if self.sessconfig["tracker_logfile"] is None:
            if sys.platform == "win32":
                # Not "Nul:" but "nul" is /dev/null on Win32
                sink = "nul"
            else:
                sink = "/dev/null"
            self.sessconfig["tracker_logfile"] = sink

        # 4. superpeer.txt and crawler.txt
        if self.sessconfig["superpeer_file"] is None:
            self.sessconfig["superpeer_file"] = os.path.join(
                self.sessconfig["install_dir"], LIBRARYNAME, "Core", "superpeer.txt"
            )
        if "crawler_file" not in self.sessconfig or self.sessconfig["crawler_file"] is None:
            self.sessconfig["crawler_file"] = os.path.join(
                self.sessconfig["install_dir"], LIBRARYNAME, "Core", "Statistics", "crawler.txt"
            )

        # 5. peer_icon_path
        if self.sessconfig["peer_icon_path"] is None:
            self.sessconfig["peer_icon_path"] = os.path.join(self.sessconfig["state_dir"], STATEDIR_PEERICON_DIR)
            if not os.path.isdir(self.sessconfig["peer_icon_path"]):
                os.mkdir(self.sessconfig["peer_icon_path"])

        # 6. Poor man's versioning of SessionConfig, add missing
        # default values. Really should use PERSISTENTSTATE_CURRENTVERSION
        # and do conversions.
        for key, defvalue in sessdefaults.iteritems():
            if key not in self.sessconfig:
                self.sessconfig[key] = defvalue

        # 7. proxyservice_dir
        if self.sessconfig[
            "overlay"
        ]:  # NIELS: proxyservice_on/off is set at runtime, always make sure proxyservice_ and self.sessconfig['proxyservice_status'] == PROXYSERVICE_ON:
            if self.sessconfig["proxyservice_dir"] is None:
                self.sessconfig["proxyservice_dir"] = os.path.join(get_default_dest_dir(), PROXYSERVICE_DESTDIR)
            # Jelle: under linux, default_dest_dir can be /tmp. Then proxyservice_dir can be deleted in between
            # sessions.
            if not os.path.isdir(self.sessconfig["proxyservice_dir"]):
                os.makedirs(self.sessconfig["proxyservice_dir"])

        if not "live_aux_seeders" in self.sessconfig:
            # Poor man's versioning, really should update PERSISTENTSTATE_CURRENTVERSION
            self.sessconfig["live_aux_seeders"] = sessdefaults["live_aux_seeders"]

        if not "nat_detect" in self.sessconfig:
            self.sessconfig["nat_detect"] = sessdefaults["nat_detect"]
        if not "puncturing_internal_port" in self.sessconfig:
            self.sessconfig["puncturing_internal_port"] = sessdefaults["puncturing_internal_port"]
        if not "stun_servers" in self.sessconfig:
            self.sessconfig["stun_servers"] = sessdefaults["stun_servers"]
        if not "pingback_servers" in self.sessconfig:
            self.sessconfig["pingback_servers"] = sessdefaults["pingback_servers"]
        if not "mainline_dht" in self.sessconfig:
            self.sessconfig["mainline_dht"] = sessdefaults["mainline_dht"]

        # SWIFTPROC
        if self.sessconfig["swiftpath"] is None:
            if sys.platform == "win32":
                self.sessconfig["swiftpath"] = os.path.join(self.sessconfig["install_dir"], "swift.exe")
            else:
                self.sessconfig["swiftpath"] = os.path.join(self.sessconfig["install_dir"], "swift")

        # Checkpoint startup config
        self.save_pstate_sessconfig()

        # Create handler for calling back the user via separate threads
        self.uch = UserCallbackHandler(self)

        # Create engine with network thread
        self.lm = TriblerLaunchMany()
        self.lm.register(self, self.sesslock)
        self.lm.start()
Beispiel #17
0
class TestMetadataDBHandler(unittest.TestCase):
    _keypair1 = generate_keypair()
    aPermId = str(_keypair1.pub().get_der())
    _keypair2 = generate_keypair()
    anotherPermId = str(_keypair2.pub().get_der())

    def setUp(self):
        #createDB = not os.path.isfile(SQL_DB)
        self.db = SimpleMetadataDB(CREATE_SQL_FILE, SQL_DB)
        self.underTest = MetadataDBHandler(self.db)

    def tearDown(self):
        self.db.close()
        #if os.path.isfile(SQL_DB) :
            #os.remove(SQL_DB)



    def testInitHandler(self):
        self.assertTrue(self.underTest is not None)

    def testSingleton(self):

        instance1 = MetadataDBHandler.getInstance()
        instance2 = MetadataDBHandler.getInstance()
        self.assertTrue(instance1 is instance2)

    def testInsertNewMetadataSubs(self):
        metadataDTO = MockMetadataDTO(["nld","ita"])
        metadataDTO.sign(metadataDTO._keypair)
        self.underTest.insertMetadata(metadataDTO)

        testquery = "SELECT * FROM Metadata WHERE publisher_id=?" \
            + " AND infohash=?;"
        results = self.db.fetchall(testquery, (bin2str(metadataDTO.channel),bin2str(metadataDTO.infohash)))

        self.assertTrue(len(results) == 1)
        tupl = results[0]
        self.assertTrue(tupl[0] is not None and isinstance(tupl[0], int))
        self.assertEquals(bin2str(metadataDTO.channel),tupl[1])
        self.assertEquals(bin2str(metadataDTO.infohash),tupl[2])
        self.assertEquals(metadataDTO.description, tupl[3])
        self.assertEquals(metadataDTO.timestamp, tupl[4])
        self.assertEquals(bin2str(metadataDTO.signature), tupl[5])

        subtitlesQuery = "SELECT * FROM Subtitles WHERE metadata_id_fk=?;"

        subtitles = self.db.fetchall(subtitlesQuery, (tupl[0],))
        self.assertEquals(2,len(subtitles))

        for lang in ("ita", "nld"):
            found = False
            foundSub = None
            for subtuple in subtitles:
                if subtuple[1] == lang:
                    found = True
                    foundSub = subtuple
                    break

            self.assertTrue(found)
            self.assertEquals(bin2str(metadataDTO.getSubtitle(lang).checksum), foundSub[3])


    def testGetMetadataInstance(self):
        metadataDTO = MockMetadataDTO(["nld","ita"])
        metadataDTO.sign(metadataDTO._keypair)
        self.underTest.insertMetadata(metadataDTO)

        retrievedMetadata = self.underTest.getMetadata(metadataDTO.channel,
                                                       metadataDTO.infohash)

        self.assertFalse(retrievedMetadata is None)
        self.assertFalse(retrievedMetadata is metadataDTO)
        self.assertEquals(metadataDTO,retrievedMetadata)




    def testInsertNewMetadataNoSubs(self):
        metadataDTO = MockMetadataDTO([])
        metadataDTO.sign(metadataDTO._keypair)
        self.underTest.insertMetadata(metadataDTO)

        testquery = "SELECT * FROM Metadata WHERE publisher_id=?" \
            + " AND infohash=?;"

        channel = bin2str(metadataDTO.channel)
        infohash = bin2str(metadataDTO.infohash)
        results = self.db.fetchall(testquery, (channel,infohash))

        self.assertTrue(len(results) == 1)
        tupl = results[0]
        self.assertTrue(tupl[0] is not None and isinstance(tupl[0], int))
        self.assertEquals(channel,tupl[1])
        self.assertEquals(infohash,tupl[2])
        self.assertEquals(metadataDTO.description, tupl[3])
        self.assertEquals(metadataDTO.timestamp, tupl[4])
        self.assertEquals(bin2str(metadataDTO.signature), tupl[5])

        subtitlesQuery = "SELECT * FROM Subtitles WHERE metadata_id_fk=?;"

        subtitles = self.db.fetchall(subtitlesQuery, (tupl[0],))
        self.assertEquals(0,len(subtitles))

    def testUpdateExistingWithOlder(self):


        metadataDTO = MockMetadataDTO(["nld", "ita"])
        metadataDTO.sign(metadataDTO._keypair)
        self.underTest.insertMetadata(metadataDTO)

        olderMetadataDTO = copy.copy(metadataDTO)
        olderMetadataDTO.timestamp = 1 #*really* old
        olderMetadataDTO.sign(olderMetadataDTO._keypair)

        self.underTest.insertMetadata(olderMetadataDTO)

        #assert the the older did not replace the newer
        testquery = "SELECT * FROM Metadata WHERE publisher_id=?" \
            + " AND infohash=?;"
        channel = bin2str(metadataDTO.channel)
        infohash = bin2str(metadataDTO.infohash)
        results = self.db.fetchall(testquery, (channel,infohash))

        self.assertTrue(len(results) == 1)
        tupl = results[0]
        self.assertTrue(tupl[0] is not None and isinstance(tupl[0], int))
        self.assertEquals(channel,tupl[1])
        self.assertEquals(infohash,tupl[2])
        self.assertEquals(metadataDTO.description, tupl[3])
        self.assertEquals(metadataDTO.timestamp, tupl[4])
        self.assertEquals(bin2str(metadataDTO.signature), tupl[5])

        subtitlesQuery = "SELECT * FROM Subtitles WHERE metadata_id_fk=?;"

        subtitles = self.db.fetchall(subtitlesQuery, (tupl[0],))
        self.assertEquals(2,len(subtitles))

        for lang in ("ita", "nld"):
            found = False
            foundSub = None
            for subtuple in subtitles:
                if subtuple[1] == lang:
                    found = True
                    foundSub = subtuple
                    break

            self.assertTrue(found)
            self.assertEquals(bin2str(metadataDTO.getSubtitle(lang).checksum), foundSub[3])


    def testUpdateExistingWithNewerSameSub(self):
        metadataDTO = MockMetadataDTO(["nld", "ita"])
        metadataDTO.sign(metadataDTO._keypair)
        self.underTest.insertMetadata(metadataDTO)

        newerMetadataDTO = copy.copy(metadataDTO)
        newerMetadataDTO.description = u"I'm newer!"
        newerMetadataDTO.timestamp = newerMetadataDTO.timestamp +1 #newer
        newerMetadataDTO.sign(newerMetadataDTO._keypair)


        self.underTest.insertMetadata(newerMetadataDTO)

        #assert the the older has been replaced
        testquery = "SELECT * FROM Metadata WHERE publisher_id=?" \
            + " AND infohash=?;"

        channel = bin2str(metadataDTO.channel)
        infohash = bin2str(metadataDTO.infohash)
        results = self.db.fetchall(testquery, (channel,infohash))

        self.assertTrue(len(results) == 1)
        tupl = results[0]
        self.assertTrue(tupl[0] is not None and isinstance(tupl[0], int))
        self.assertEquals(channel,tupl[1])
        self.assertEquals(infohash,tupl[2])
        self.assertEquals(newerMetadataDTO.description, tupl[3])
        self.assertEquals(newerMetadataDTO.timestamp, tupl[4])
        self.assertEquals(bin2str(newerMetadataDTO.signature), tupl[5])

        #testing subtitles with the old once since they are not changed
        subtitlesQuery = "SELECT * FROM Subtitles WHERE metadata_id_fk=?;"

        subtitles = self.db.fetchall(subtitlesQuery, (tupl[0],))
        self.assertEquals(2,len(subtitles))

        for lang in ("ita", "nld"):
            found = False
            foundSub = None
            for subtuple in subtitles:
                if subtuple[1] == lang:
                    found = True
                    foundSub = subtuple
                    break

            self.assertTrue(found)
            self.assertEquals(bin2str(metadataDTO.getSubtitle(lang).checksum), foundSub[3])



    def testUpdateExistingWithNewerNewSubs(self):
        metadataDTO = MockMetadataDTO(["nld", "ita"])
        metadataDTO.sign(metadataDTO._keypair)
        self.underTest.insertMetadata(metadataDTO)

        newerMetadataDTO = MockMetadataDTO(["nld","ita","eng"])
        newerMetadataDTO.channel = metadataDTO.channel
        newerMetadataDTO.infohash = metadataDTO.infohash
        newerMetadataDTO._keypair = metadataDTO._keypair
        newerMetadataDTO.timestamp = metadataDTO.timestamp +1 #newer
        newerMetadataDTO.sign(newerMetadataDTO._keypair)


        self.underTest.insertMetadata(newerMetadataDTO)

        #assert the the older has been replaced
        testquery = "SELECT * FROM Metadata WHERE publisher_id=?" \
            + " AND infohash=?;"

        channel = bin2str(metadataDTO.channel)
        infohash = bin2str(metadataDTO.infohash)
        results = self.db.fetchall(testquery, (channel,infohash))

        self.assertTrue(len(results) == 1)
        tupl = results[0]
        self.assertTrue(tupl[0] is not None and isinstance(tupl[0], int))
        self.assertEquals(channel,tupl[1])
        self.assertEquals(infohash,tupl[2])
        self.assertEquals(newerMetadataDTO.description, tupl[3])
        self.assertEquals(newerMetadataDTO.timestamp, tupl[4])
        self.assertEquals(bin2str(newerMetadataDTO.signature), tupl[5])

        subtitlesQuery = "SELECT * FROM Subtitles WHERE metadata_id_fk=?;"

        subtitles = self.db.fetchall(subtitlesQuery, (tupl[0],))
        self.assertEquals(3,len(subtitles))

        for lang in ("ita", "nld","eng"):
            found = False
            foundSub = None
            for subtuple in subtitles:
                if subtuple[1] == lang:
                    found = True
                    foundSub = subtuple
                    break

            self.assertTrue(found)
            self.assertEquals(bin2str(newerMetadataDTO.getSubtitle(lang).checksum), foundSub[3])

    def testUpdateExistingWithNewerSubsDeleted(self):
        metadataDTO = MockMetadataDTO(["nld", "ita"])
        metadataDTO.sign(metadataDTO._keypair)
        self.underTest.insertMetadata(metadataDTO)

        newerMetadataDTO = MockMetadataDTO(["nld","eng"])
        newerMetadataDTO.channel = metadataDTO.channel
        newerMetadataDTO.infohash = metadataDTO.infohash
        newerMetadataDTO._keypair = metadataDTO._keypair
        newerMetadataDTO.timestamp = metadataDTO.timestamp +1 #newer
        newerMetadataDTO.sign(newerMetadataDTO._keypair)


        self.underTest.insertMetadata(newerMetadataDTO)

        #assert the the older has been replaced
        testquery = "SELECT * FROM Metadata WHERE publisher_id=?" \
            + " AND infohash=?;"
        channel = bin2str(metadataDTO.channel)
        infohash = bin2str(metadataDTO.infohash)
        results = self.db.fetchall(testquery, (channel,infohash))

        self.assertTrue(len(results) == 1)
        tupl = results[0]
        self.assertTrue(tupl[0] is not None and isinstance(tupl[0], int))
        self.assertEquals(channel,tupl[1])
        self.assertEquals(infohash,tupl[2])
        self.assertEquals(newerMetadataDTO.description, tupl[3])
        self.assertEquals(newerMetadataDTO.timestamp, tupl[4])
        self.assertEquals(bin2str(newerMetadataDTO.signature), tupl[5])

        subtitlesQuery = "SELECT * FROM Subtitles WHERE metadata_id_fk=?;"

        subtitles = self.db.fetchall(subtitlesQuery, (tupl[0],))
        self.assertEquals(2,len(subtitles))

        for lang in ("nld","eng"):
            found = False
            foundSub = None
            for subtuple in subtitles:
                if subtuple[1] == lang:
                    found = True
                    foundSub = subtuple
                    break

            self.assertTrue(found)
            self.assertEquals(bin2str(newerMetadataDTO.getSubtitle(lang).checksum), foundSub[3])

    def testGetAllMetadataForInfohashEmtpy(self):
        metadataDTO = MockMetadataDTO(["nld", "ita"])
        metadataDTO.sign(metadataDTO._keypair)
        self.underTest.insertMetadata(metadataDTO)


        otherinfohash = _generateFakeInfohash()

        results = self.underTest.getAllMetadataForInfohash(otherinfohash)
        self.assertTrue(len(results)==0)

    def testGetAllMetadataForInfohashNotEmpty(self):
        infohash = _generateFakeInfohash()
        metadataDTO1 = MockMetadataDTO(["nld", "ita"],infohash)
        metadataDTO1.sign(metadataDTO1._keypair)
        self.underTest.insertMetadata(metadataDTO1)

        #different channels since the channel is automatically
        #generated by MockMetadata DTO
        metadataDTO2 = MockMetadataDTO(["rus", "eng"],infohash)
        metadataDTO2.sign(metadataDTO2._keypair)
        self.underTest.insertMetadata(metadataDTO2)

        #a 3rd instance with different channel and infohash
        metadataDTO3 = MockMetadataDTO(["rus", "spa", "jpn"])
        metadataDTO3.sign(metadataDTO3._keypair)
        self.underTest.insertMetadata(metadataDTO3)

        results = self.underTest.getAllMetadataForInfohash(infohash)
        self.assertTrue(len(results)==2)

        #in checks for equality, not reference equality
        self.assertTrue(metadataDTO1 in results)
        self.assertTrue(metadataDTO2 in results)
        self.assertFalse(metadataDTO3 in results)



    def testDeleteSubtitle(self):
        infohash = _generateFakeInfohash()
        metadataDTO = MockMetadataDTO(["eng","kor"], infohash)

        metadataDTO.sign(metadataDTO._keypair)
        self.underTest.insertMetadata(metadataDTO)

        res = self.underTest.getAllSubtitles(metadataDTO.channel, infohash)
        self.assertTrue("eng" in res and "kor" in res)

        #delete a subtitle that does not exist
        self.underTest._deleteSubtitleByChannel(metadataDTO.channel, infohash, "ita")
        res = self.underTest.getAllSubtitles(metadataDTO.channel, infohash)
        self.assertTrue("eng" in res and "kor" in res)

        self.underTest._deleteSubtitleByChannel(metadataDTO.channel, infohash, "eng")
        res = self.underTest.getAllSubtitles(metadataDTO.channel, infohash)
        self.assertTrue("kor" in res and not "eng" in res)


    def testSelectLocalSubtitles(self):

        infohash1 = _generateFakeInfohash()
        metadataDTO1 = MockMetadataDTO(["eng","kor"], infohash1)

        metadataDTO1.sign(metadataDTO1._keypair)
        self.underTest.insertMetadata(metadataDTO1)

        res = self.underTest.getAllLocalSubtitles()

        self.assertTrue(len(res) == 0)

        infohash2 = _generateFakeInfohash()
        metadataDTO2 = MockMetadataDTO(["nld","spa"], infohash2)

        metadataDTO2.getSubtitle("nld").path = "/bla/bla"

        metadataDTO2.sign(metadataDTO2._keypair)
        self.underTest.insertMetadata(metadataDTO2)

        res = self.underTest.getAllLocalSubtitles()

        self.assertTrue(len(res) == 1)

        self.assertTrue(metadataDTO2.channel in res)

        self.assertTrue(infohash2 in res[metadataDTO2.channel])
        self.assertEquals(1, len(res[metadataDTO2.channel][infohash2]))

        self.assertEquals(metadataDTO2.getSubtitle("nld"), res[metadataDTO2.channel][infohash2][0])

    def testSelectLocalSubtitles2(self):
        infohash1 = _generateFakeInfohash()
        metadataDTO1 = MockMetadataDTO(["eng","kor", "nld"], infohash1)

        metadataDTO1.getSubtitle("nld").path = "/bla/bla"
        metadataDTO1.getSubtitle("eng").path = "/bla/bla"
        metadataDTO1.sign(metadataDTO1._keypair)
        self.underTest.insertMetadata(metadataDTO1)

        infohash2 = _generateFakeInfohash()
        metadataDTO2 = MockMetadataDTO(["ita","spa"], infohash2)
        metadataDTO2.getSubtitle("ita").path = "/a/b"
        metadataDTO2.getSubtitle("spa").path = "/c/d"
        metadataDTO2.sign(metadataDTO2._keypair)
        self.underTest.insertMetadata(metadataDTO2)


        res = self.underTest.getLocalSubtitles(metadataDTO1.channel, infohash1)
        self.assertEquals(2, len(res))

        self.assertTrue("eng" in res)
        self.assertEquals(metadataDTO1.getSubtitle("eng"), res["eng"])

        self.assertTrue("nld" in res)
        self.assertEquals(metadataDTO1.getSubtitle("nld"), res["nld"])

        self.assertFalse("kor" in res)

    def testUpdateSubtitlesWithNonePathValue(self):


        infohash1 = _generateFakeInfohash()
        metadataDTO1 = MockMetadataDTO(["eng","kor"], infohash1)

        metadataDTO1.getSubtitle("eng").path = os.path.abspath(os.path.join("bla","bla"))
        metadataDTO1.sign(metadataDTO1._keypair)
        self.underTest.insertMetadata(metadataDTO1)

        sub = self.underTest.getSubtitle(metadataDTO1.channel, infohash1, "eng")
        self.assertEquals(os.path.abspath(os.path.join("bla","bla")), sub.path)

        self.underTest.updateSubtitlePath(metadataDTO1.channel, infohash1,
                                          "eng", None, True)

        sub = self.underTest.getSubtitle(metadataDTO1.channel, infohash1, "eng")
        self.assertEquals(None, sub.path)


    def testUpdateSubtitles(self):
        sub1path= os.path.join(RES_DIR,"fake0.srt")
        sub2path=os.path.join(RES_DIR,"fake1.srt")
        infohash = _generateFakeInfohash()
        metadataDTO = MockMetadataDTO([], infohash)
        sub1 = SubtitleInfo("ita", None, _computeSHA1(sub1path))
        sub2 = SubtitleInfo("eng",None,_computeSHA1(sub2path))

        metadataDTO.addSubtitle(sub1)
        metadataDTO.addSubtitle(sub2)
        metadataDTO.sign(metadataDTO._keypair)
        self.underTest.insertMetadata(metadataDTO)

        res1 = self.underTest.getSubtitle(metadataDTO.channel, infohash,"ita")
        self.assertEquals(sub1,res1)

        res2 = self.underTest.getSubtitle(metadataDTO.channel, infohash, "eng")
        self.assertEquals(sub2,res2)

        sub1bis = copy.copy(sub1)
        sub1bis.path = sub1path
        sub2bis = copy.copy(sub2)
        sub2bis.path = sub2path

        self.underTest.updateSubtitlePath(metadataDTO.channel, infohash,
                                      sub1bis.lang, sub1bis.path, False)
        self.underTest.updateSubtitlePath(metadataDTO.channel, infohash,
                                      sub2bis.lang, sub2bis.path , False)


        self.underTest.commit()

        #still unchanged since I did not commit
        res1 = self.underTest.getSubtitle(metadataDTO.channel, infohash,"ita")
        self.assertTrue(sub1== res1 and sub1.path != res1.path)
        self.assertTrue(sub1bis == res1 and sub1bis.path == res1.path)

        res2 = self.underTest.getSubtitle(metadataDTO.channel, infohash, "eng")
        self.assertTrue(sub2 == res2 and sub2.path != res2.path)
        self.assertTrue(sub2bis == res2 and sub2bis.path == res2.path)


    # 30-05-2010 Testing of the new added table (SubtitlesHave) manipulation
    # methods.

    def testInsertAndGetHaveMask(self):


        infohash = _generateFakeInfohash()
        metadataDTO1 = MockMetadataDTO(["nld","spa"], infohash)
        channel = metadataDTO1.channel

        metadataDTO1.sign(metadataDTO1._keypair)
        self.underTest.insertMetadata(metadataDTO1)

        peer_id = TestMetadataDBHandler.anotherPermId

        #inserting a negative mask has to be refused
        havemask = -1
        funcToTest =\
            lambda : self.underTest.insertHaveMask(channel, infohash,
                                                   peer_id, havemask)

        self.assertRaises(MetadataDBException, funcToTest)

        #also a bitmask must be smaller then 2**32
        havemask = 2**32

        funcToTest =\
            lambda : self.underTest.insertHaveMask(channel, infohash,
                                                   peer_id, havemask)

        self.assertRaises(MetadataDBException, funcToTest)


        #now it's time for a correct value
        havemask1=0x80000001
        self.underTest.insertHaveMask(channel, infohash, peer_id, havemask1)

        mask = self.underTest.getHaveMask(channel, infohash,peer_id)
        self.assertEqual(mask,havemask1)

        #duplicate insertions should raise an error
        havemask2=0xffffffff
        funcToTest = \
           lambda : self.underTest.insertHaveMask(channel, infohash,
                                                  peer_id, havemask2)

        self.assertRaises(MetadataDBException, funcToTest)

        #insertion for another peer should go fine
        self.underTest.insertHaveMask(channel, infohash, channel, havemask2)

        mask1 = self.underTest.getHaveMask(channel, infohash,peer_id)
        self.assertEqual(mask1,havemask1)
        mask2 = self.underTest.getHaveMask(channel, infohash,channel)
        self.assertEqual(mask2,havemask2)

        #getting an have mask for an unexistent channel, infohash shall
        #return None
        mask1 = \
            self.underTest.getHaveMask(channel, _generateFakeInfohash(),peer_id)
        self.assertTrue(mask1 is None)

        #as it should happen for asking for an unexisting peer_id
        mask1 = self.underTest.getHaveMask(channel, infohash,
                                           TestMetadataDBHandler.aPermId)
        self.assertTrue(mask1 is None)

    def testUpdateHaveMask(self):
        infohash = _generateFakeInfohash()
        metadataDTO1 = MockMetadataDTO(["nld","spa"], infohash)
        channel = metadataDTO1.channel

        metadataDTO1.sign(metadataDTO1._keypair)
        self.underTest.insertMetadata(metadataDTO1)

        peer_id = TestMetadataDBHandler.anotherPermId


        #adding an have mask to the db
        havemask1=0x80000001
        self.underTest.insertHaveMask(channel, infohash, peer_id, havemask1)

        mask = self.underTest.getHaveMask(channel, infohash,peer_id)
        self.assertEqual(mask,havemask1)

        #updating it to a different value
        new_havemask = 0x1111ffff
        self.underTest.updateHaveMask(channel, infohash, peer_id,
                                      new_havemask)
        mask = self.underTest.getHaveMask(channel, infohash,peer_id)
        self.assertEqual(mask,new_havemask)

        #trying to update a non existing row should cause an error
        # -- currently this doesn't happen
        # implementing this beahaviour would slow down the db
        #funcToTest = \
        #    lambda: self.underTest.updateHaveMask(channel, infohash,
        #                                         channel, new_havemask)
        # self.assertRaises(MetadataDBException, funcToTest)


    def testDeleteHaveEntry(self):
        infohash = _generateFakeInfohash()
        metadataDTO1 = MockMetadataDTO(["nld","spa"], infohash)
        channel = metadataDTO1.channel

        metadataDTO1.sign(metadataDTO1._keypair)
        self.underTest.insertMetadata(metadataDTO1)

        peer_id = TestMetadataDBHandler.anotherPermId


        #adding an have mask to the db
        havemask1=0x80000001
        self.underTest.insertHaveMask(channel, infohash, peer_id, havemask1)

        havemask2=0x02324123
        self.underTest.insertHaveMask(channel, infohash, channel, havemask2)

        self.underTest.deleteHaveEntry(channel, infohash, peer_id)

        mask = self.underTest.getHaveMask(channel, infohash, peer_id)
        self.assertTrue(mask is None)

        mask = self.underTest.getHaveMask(channel, infohash, channel)
        self.assertEquals(havemask2,mask)

        # deleting an entry that does not exist should leave
        # the db unchanged
        self.underTest.deleteHaveEntry(channel, infohash, peer_id)

        mask = self.underTest.getHaveMask(channel, infohash, channel)
        self.assertEquals(havemask2,mask)


    def testGetAllHaveEntries(self):

        infohash = _generateFakeInfohash()
        metadataDTO1 = MockMetadataDTO(["nld","spa"], infohash)
        channel = metadataDTO1.channel

        metadataDTO1.sign(metadataDTO1._keypair)
        self.underTest.insertMetadata(metadataDTO1)

        peer_id = TestMetadataDBHandler.anotherPermId


        #adding an have mask to the db
        havemask1=0x80000001
        self.underTest.insertHaveMask(channel, infohash, peer_id, havemask1)

        time.sleep(1) # otherwise they would have the same timestamp
        havemask2=0x02324123
        self.underTest.insertHaveMask(channel, infohash, channel, havemask2)

        d = self.underTest.getHaveEntries(channel, infohash)

        #the second inserted havemask has to be returned first
        # since it is newer
        firstTuple = d[0]
        self.assertEquals(channel, firstTuple[0])
        self.assertEquals(havemask2,firstTuple[1])
        self.assertTrue(firstTuple[2] is not None)

        self.assertEquals(peer_id, d[1][0])
        self.assertEquals(havemask1,d[1][1])
        self.assertTrue(d[1][2] is not None)



    def testCleanUpAllHave(self):
        infohash1 = _generateFakeInfohash()
        metadataDTO1 = MockMetadataDTO(["nld","spa"], infohash1)
        channel1 = metadataDTO1.channel

        metadataDTO1.sign(metadataDTO1._keypair)
        self.underTest.insertMetadata(metadataDTO1)

        infohash2 = _generateFakeInfohash()
        metadataDTO2 = MockMetadataDTO(["nld","spa"], infohash2)
        channel2 = metadataDTO2.channel

        metadataDTO2.sign(metadataDTO2._keypair)
        self.underTest.insertMetadata(metadataDTO2)


        peer_id1 = TestMetadataDBHandler.anotherPermId
        peer_id2 = TestMetadataDBHandler.aPermId

        #inserting some data: 4 have maskes for each of the two channels with
        # custom timestamps
        # older then 1275295300
        self.underTest.insertHaveMask(channel1, infohash1, channel1, 0x42, 1275295290)
        self.underTest.insertHaveMask(channel1, infohash1, peer_id1, 0x42, 1275295291)
        # newer then 1275295300
        self.underTest.insertHaveMask(channel1, infohash1, peer_id2, 0x42, 1275295300)
        self.underTest.insertHaveMask(channel1, infohash1, channel2, 0x42, 1275295301)


        # older then 1275295300
        self.underTest.insertHaveMask(channel2, infohash2, channel1, 0x42, 1275295290)
        self.underTest.insertHaveMask(channel2, infohash2, peer_id1, 0x42, 1275295291)

        # newer then 1275295300
        self.underTest.insertHaveMask(channel2, infohash2, peer_id2, 0x42, 1275295300)
        self.underTest.insertHaveMask(channel2, infohash2, channel2, 0x42, 1275295301)

        self.underTest.cleanupOldHave(1275295300)
        haveForEntry1 = self.underTest.getHaveEntries(channel1, infohash1)
        expectedList1 = [(channel2,0x42,1275295301), (peer_id2, 0x42, 1275295300),
                         (channel1, 0x42, 1275295290)]
        self.assertEquals(expectedList1, haveForEntry1)

        haveForEntry2 = self.underTest.getHaveEntries(channel2, infohash2)
        expectedList2 = [(channel2, 0x42, 1275295301),(peer_id2, 0x42, 1275295300)]
        self.assertEquals(expectedList2,haveForEntry2)
import unittest
from Tribler.Core.Subtitles.MetadataDomainObjects.MetadataDTO import MetadataDTO
import Tribler.Core.Subtitles.MetadataDomainObjects.MetadataDTO as MDUtil
from Tribler.Core.Overlay.permid import generate_keypair
from Tribler.Core.CacheDB.sqlitecachedb import bin2str
import time
from Tribler.Core.Utilities.bencode import bdecode
from Tribler.Core.Subtitles.MetadataDomainObjects.SubtitleInfo import SubtitleInfo
from Tribler.Core.Subtitles.MetadataDomainObjects.Languages import LanguagesProvider
from Tribler.Core.Utilities.utilities import str2bin
import os.path
from struct import pack

RES_DIR = os.path.join('..', '..', '..', 'subtitles_test_res')

test_keypair = generate_keypair()
test_perm_id = str(test_keypair.pub().get_der())


class TestMetadataDTO(unittest.TestCase):
    def setUp(self):
        self._srtSubs = {
            "eng": os.path.join(RES_DIR, "fake.srt"),
            "ita": os.path.join(RES_DIR, "fake1.srt"),
            "rus": os.path.join(RES_DIR, "fake2.srt")
        }

    def testMetadataDTOInit(self):
        badInfohash = str2bin("GEh/o8rtTLB1wZJzFcSZSS4u9qo=")
        dto = MetadataDTO(test_perm_id, badInfohash)
        self.assertFalse(dto is None)
import unittest
from Tribler.Core.Subtitles.MetadataDomainObjects.MetadataDTO import MetadataDTO
import Tribler.Core.Subtitles.MetadataDomainObjects.MetadataDTO as MDUtil
from Tribler.Core.Overlay.permid import generate_keypair
from Tribler.Core.CacheDB.sqlitecachedb import bin2str
import time
from Tribler.Core.Utilities.bencode import bdecode
from Tribler.Core.Subtitles.MetadataDomainObjects.SubtitleInfo import SubtitleInfo
from Tribler.Core.Subtitles.MetadataDomainObjects.Languages import LanguagesProvider
from Tribler.Core.Utilities.utilities import str2bin
import os.path
from struct import pack

RES_DIR = os.path.join("..", "..", "..", "subtitles_test_res")

test_keypair = generate_keypair()
test_perm_id = str(test_keypair.pub().get_der())


class TestMetadataDTO(unittest.TestCase):
    def setUp(self):
        self._srtSubs = {
            "eng": os.path.join(RES_DIR, "fake.srt"),
            "ita": os.path.join(RES_DIR, "fake1.srt"),
            "rus": os.path.join(RES_DIR, "fake2.srt"),
        }

    def testMetadataDTOInit(self):
        badInfohash = str2bin("GEh/o8rtTLB1wZJzFcSZSS4u9qo=")
        dto = MetadataDTO(test_perm_id, badInfohash)
        self.assertFalse(dto is None)