def __init__(self, username, password, url): super(Peer, self).__init__(username, password, url) # Override f3ds default network timeout if set in socialscan config. try: from socialscan.config import loadDefaultConfig self.timeout = loadDefaultConfig().core.network_timeout except: pass
def setUpClass(cls): 'Initialize sqlite in-memory for use by tests.' cls.engine = create_engine('sqlite:///:memory:') Session = sessionmaker(bind=cls.engine) cls.session = Session() Base.metadata.create_all(cls.engine) now = trim_microseconds(datetime.utcnow()) cls.siginfo = SigInfo('Test Model Scanner v1.0', 'Generic Signature 0.3', now) cls.config = loadDefaultConfig() owner = owner = Peer('slmgrtest-owner', 'slmgrtest-owner', 'slmgrtest-owner') cls.config.owner = owner
def main(): """ Start the system and twisted reactor. """ from twisted.internet import reactor log.stdoutlog = True logger = log.Logger("Main") logger.log("Loading configuration") config = loadDefaultConfig() logger.log("Setting up DB") session, engine = setupDB(config.database.url) owner = Peer.getByName(session, config.general.localpeer) config.owner = owner # start the digest manager logger.log("Start digest manager") digestmanager = DigestManager(config, session) digestmanager._initJobs() # start the scanlog manager logger.log("Start scan log manager") scanlogmanager = ScanLogManager(config, session) scanlogmanager._initJobs() # construct the sharing system that other peers can download from and talk to logger.log("Initialize sharing system") root = resource.Resource() root.putChild('RPC2', SocialScanRPCCommands(config, session)) # add the local static content under shared/ that other peers can download digests and such from sharedir = os.path.realpath("data/shared/") #root.putChild('shared', File(sharedir)) if not os.path.exists(sharedir): os.makedirs(sharedir) dn = os.path.dirname sourcedir = dn(dn(sharedir)) shutil.copy2(os.path.join(sourcedir, 'url_malicious.html'), os.path.join(sourcedir, 'data', 'malicious.html')) #root.putChild('malicious', File("url_malicious.html")) logger.log("Initializing TCP listening") reactor.listenTCP(port=int(config.sharing.rpcport), interface=config.sharing.bindhost, factory=Site(root)) endpoint = TCP4ServerEndpoint(reactor, int(config.scanning._core_port), interface="127.0.0.1") endpoint.listen(SocialScanCore(config, session, digestmanager, scanlogmanager)) logger.log("Running reactor") reactor.run()
def set_hash_function(algorithm=''): 'Choose a hashlib algorithm from those always supported. Default is sha256.' local_supported = {'md5': hashlib.md5, 'sha1': hashlib.sha1, 'sha224': hashlib.sha224, 'sha256': hashlib.sha256, 'sha384': hashlib.sha384, 'sha512': hashlib.sha512} try: if not algorithm: algorithm = loadDefaultConfig().sethash.algorithm return local_supported[algorithm] except: return local_supported['sha256']
def sqlite_to_mysql(): from socialscan.db import setupDB from socialscan.config import loadDefaultConfig from socialscan.model import Scan, ScanDigestFile, SocialRelationship from socialscan.model import QueuedRequest, SentScanRequest, Peer config = loadDefaultConfig() session_local, engine_local = setupDB(config.database.url) coredev_url = '%s://%s:%s@%s:%s/%s' % ('mysql+pymysql', 'socialscanexp', 't@uc3u6e*h', '10.214.131.48', '3306', 'socialscanexp') config.database.coredev_url = coredev_url session_remote, engine_remote = setupDB(config.database.coredev_url) for table in [Scan(), ScanDigestFile(), SocialRelationship(), QueuedRequest(), SentScanRequest(), Peer()]: merged_table = session_local.merge(table) session_remote.add_all(session_local.query(merged_table).all()) session_remote.commit()
self.stop() url = self.parseLine(line) if not url: self.logger.log("URL empty, ignoring: %r" % url) return factory = CoreClientFactory(url, self.callback) self.endpoint.connect(factory) def main(config): """ Run the redirector. Uses Twisted's StandardIO to run L{Redirector} as squid expects. @param config: initialized config. @type config: L{AttributeConfig} """ redirector = Redirector(config) stdio.StandardIO(redirector) from twisted.internet import reactor reactor.run() redirector.logger.log("SocialScan Redirector is exiting...\n") if __name__ == '__main__': os.chdir(os.path.dirname(os.path.realpath(__file__))) config = loadDefaultConfig() main(config)
def makepeers(): """ Generate a list of peers with random relationships, for testing """ random.seed(0) addresses = deque(sys.argv[2:]) if not len(addresses): print makepeers.__doc__ return if "127.0.0.1" in addresses: ouraddress = "127.0.0.1" else: ouraddress = util.getIP() ourname = None peercount = len(addresses) names = """ bebo boidoc merodoc friabo bodoc sabo sobo merido merom sigrin bidoc budoc sodo froigrin merem bado subo sabo budo perigo budoc driam bigrin sebo bebo bubo sido bigo bedoc bebo bugo perubo""".split()[:peercount] names = list(set(names)) random.shuffle(names) session.query(Peer).delete() session.query(SocialRelationship).delete() peers = [] config = loadDefaultConfig() for name in names: address = addresses.pop() peers.append(Peer(name, "", "http://%s:%s/" % (address, config.sharing.rpcport))) print "making peer %s (%s)" % (name, address) if address == ouraddress: ourname = name session.add_all(peers) session.commit() for peer in peers: print "peer %s relationships:" % peer.name for otherpeer in peers: if peer.getRelationship(session, otherpeer) != None or peer == otherpeer: continue distance = 0.9 # nextDistance() relationship = SocialRelationship(peer.id, otherpeer.id, distance) session.add(relationship) print " %f -> %s" % (distance, otherpeer.name) session.commit() if ourname: print "assigned name %r to local peer" % ourname parser = ConfigParser.ConfigParser() f = os.path.join(os.path.dirname(__file__), "socialscan.config") parser.readfp(open(f, "r")) parser.set("general", "localpeer", ourname) try: parser.add_section("sharing") except ConfigParser.DuplicateSectionError: pass parser.set("sharing", "bindhost", ouraddress) parser.set("sharing", "port", config.sharing.port) parser.set("sharing", "rpcport", config.sharing.rpcport) parser.write(open(f, "w"))