예제 #1
0
    def siteAnnounce(self, address):
        from Site.Site import Site
        from Site import SiteManager
        SiteManager.site_manager.load()

        logging.info("Opening a simple connection server")
        global file_server
        from File import FileServer
        file_server = FileServer("127.0.0.1", 1234)
        file_server.start()

        logging.info("Announcing site %s to tracker..." % address)
        site = Site(address)

        s = time.time()
        site.announce()
        print "Response time: %.3fs" % (time.time() - s)
        print site.peers
예제 #2
0
    def need(self, address, all_file=True):
        from Site import Site
        site = self.get(address)
        if not site:  # Site not exist yet
            if not self.isAddress(address):
                return False  # Not address: %s % address
            logging.debug("Added new site: %s" % address)
            site = Site(address)
            self.sites[address] = site
            if not site.settings["serving"]:  # Maybe it was deleted before
                site.settings["serving"] = True
                site.saveSettings()
            if all_file:  # Also download user files on first sync
                site.download(blind_includes=True)
        else:
            if all_file:
                site.download()

        return site
예제 #3
0
	def load(self):
		from Site import Site
		if not self.sites: self.sites = {}
		address_found = []
		added = 0
		# Load new adresses
		for address in json.load(open("data/sites.json")):
			if address not in self.sites and os.path.isfile("data/%s/content.json" % address):
				self.sites[address] = Site(address)
				added += 1
			address_found.append(address)

		# Remove deleted adresses
		for address in self.sites.keys():
			if address not in address_found: 
				del(self.sites[address])
				logging.debug("Removed site: %s" % address)

		if added: logging.debug("SiteManager added %s sites" % added)
예제 #4
0
파일: main.py 프로젝트: Moustikitos/ZeroNet
    def siteSign(self,
                 address,
                 privatekey=None,
                 passphrase=False,
                 inner_path="content.json",
                 publish=False,
                 remove_missing_optional=False):
        from Site import Site
        from Site import SiteManager
        from Debug import Debug
        SiteManager.site_manager.load()
        address = getPublicKey(address)
        logging.info("Signing site: %s..." % address)
        site = Site(address, allow_create=False)

        if passphrase:
            import getpass
            passphrase = getpass.getpass(
                "Type your passphrase (input hidden): ")
            privatekey = hashlib.sha256(
                passphrase if isinstance(passphrase, bytes) else passphrase.
                encode("utf-8")).hexdigest()
        elif not privatekey:  # If no privatekey defined
            from User import UserManager
            user = UserManager.user_manager.get()
            if user:
                site_data = user.getSiteData(address)
                privatekey = site_data.get("privatekey")
            else:
                privatekey = None
            if not privatekey:
                # Not found in users.json, ask from console
                import getpass
                privatekey = getpass.getpass("Private key (input hidden):")
        try:
            succ = site.content_manager.sign(
                inner_path=inner_path,
                privatekey=privatekey,
                update_changed_files=True,
                remove_missing_optional=remove_missing_optional)
        except Exception, err:
            logging.error("Sign error: %s" % Debug.formatException(err))
            succ = False
예제 #5
0
파일: main.py 프로젝트: wsunxa/ZeroNet
    def siteNeedFile(self, address, inner_path):
        from Site import Site

        def checker():
            while 1:
                s = time.time()
                time.sleep(1)
                print "Switch time:", time.time() - s

        gevent.spawn(checker)

        logging.info("Opening a simple connection server")
        global file_server
        from Connection import ConnectionServer
        file_server = ConnectionServer("127.0.0.1", 1234)

        site = Site(address)
        site.announce()
        print site.needFile(inner_path, update=True)
예제 #6
0
    def siteCreate(self):
        logging.info("Generating new privatekey...")
        from Crypt import CryptBitcoin
        privatekey = CryptBitcoin.newPrivatekey()
        logging.info(
            "----------------------------------------------------------------------"
        )
        logging.info("Site private key: %s" % privatekey)
        logging.info(
            "                  !!! ^ Save it now, required to modify the site ^ !!!"
        )
        address = CryptBitcoin.privatekeyToAddress(privatekey)
        logging.info("Site address:     %s" % address)
        logging.info(
            "----------------------------------------------------------------------"
        )

        while True and not config.batch:
            if raw_input("? Have you secured your private key? (yes, no) > "
                         ).lower() == "yes":
                break
            else:
                logging.info(
                    "Please, secure it now, you going to need it to modify your site!"
                )

        logging.info("Creating directory structure...")
        from Site import Site
        from Site import SiteManager
        SiteManager.site_manager.load()

        os.mkdir("%s/%s" % (config.data_dir, address))
        open("%s/%s/index.html" % (config.data_dir, address),
             "w").write("Hello %s!" % address)

        logging.info("Creating content.json...")
        site = Site(address)
        site.content_manager.sign(privatekey=privatekey,
                                  extend={"postmessage_nonce_security": True})
        site.settings["own"] = True
        site.saveSettings()

        logging.info("Site created!")
예제 #7
0
def sign(address, content, zeronet_directory):
    privatekey = None
    with open(zeronet_directory + "data/users.json") as f:
        users = json.loads(f.read())

        try:
            user = users[users.keys()[0]]
            privatekey = user["certs"]["zeroid.bit"]["auth_privatekey"]
        except KeyError:
            raise TypeError(
                "Private key for zeroid.bit not found in users.json")

    from Site import Site
    site = Site(address, allow_create=False)

    site.content_manager.sign(inner_path=content,
                              privatekey=privatekey,
                              update_changed_files=True,
                              remove_missing_optional=False)
예제 #8
0
def parseQafqazInfo(query):
    query = querize(query)

    request = Request('https://qafqazinfo.az/news/search?keyword=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')

    news_tags = file.find_all('a', class_='https://qafqazinfo.aznews/search', href=True)

    site = Site('QafqazInfo')

    for i in range(len(news_tags)): 
        tag = news_tags[i]
        site.results.append(News(tag['href'], tag.get_text().strip(), '00.00.0000', '00:00'))

    for r in site.results:
        print(r.source + '|' + r.headline + '|' + r.date)

    return site
예제 #9
0
    def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", diffs={}):
        global file_server
        from Site import Site
        from Site import SiteManager
        from File import FileServer  # We need fileserver to handle incoming file requests
        from Peer import Peer
        SiteManager.site_manager.load()

        logging.info("Loading site...")
        site = Site(address, allow_create=False)
        site.settings["serving"] = True  # Serving the site even if its disabled

        logging.info("Creating FileServer....")
        file_server = FileServer()
        site.connection_server = file_server
        file_server_thread = gevent.spawn(file_server.start, check_sites=False)  # Dont check every site integrity
        time.sleep(0.001)

        if not file_server_thread.ready():
            # Started fileserver
            file_server.openport()
            if peer_ip:  # Announce ip specificed
                site.addPeer(peer_ip, peer_port)
            else:  # Just ask the tracker
                logging.info("Gathering peers from tracker")
                site.announce()  # Gather peers
            published = site.publish(5, inner_path, diffs=diffs)  # Push to peers
            if published > 0:
                time.sleep(3)
                logging.info("Serving files (max 60s)...")
                gevent.joinall([file_server_thread], timeout=60)
                logging.info("Done.")
            else:
                logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
        else:
            # Already running, notify local client on new content
            logging.info("Sending siteReload")
            my_peer = Peer("127.0.0.1", config.fileserver_port)
            logging.info(my_peer.request("siteReload", {"site": site.address, "inner_path": inner_path}))
            logging.info("Sending sitePublish")
            logging.info(my_peer.request("sitePublish", {"site": site.address, "inner_path": inner_path, "diffs": diffs}))
            logging.info("Done.")
예제 #10
0
def site(request):
    con = pymysql.connect(host='localhost',
                          user='******',
                          password='',
                          db='dan_py',
                          charset='utf8mb4',
                          autocommit=True,
                          cursorclass=pymysql.cursors.DictCursor)
    SITE = Site()
    SITE.db = con.cursor()
    path = request.match_info.get('url', '')
    SITE.path = path
    SITE.p = path.split('/')
    i = len(SITE.p)
    while i < 7:
        SITE.p.append('')
        i += 1
    SITE.request = request

    return SITE
예제 #11
0
def parseEastNews(query):
    query = querize(query)
    
    request = Request('https://eastnews.org/search?query=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')
    
    parent = file.find('div', class_='col-1')
    news_tags = parent.find_all('a', {'style' : 'color:#000'}, href=True)

    site = Site("Eastnews.org")

    for i in range(len(news_tags)): 
        tag = news_tags[i]
        site.results.append(News(tag['href'], tag.get_text().strip(), ' ', '00:00'))

    for r in site.results:
       print(r.source + '|' + r.headline + '|' + r.date)

    return site
예제 #12
0
    def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False):
        from Site import Site
        logging.info("Signing site: %s..." % address)
        site = Site(address, allow_create=False)

        if not privatekey:  # If no privatekey definied
            from User import UserManager
            user = UserManager.user_manager.get()
            if user:
                site_data = user.getSiteData(address)
                privatekey = site_data.get("privatekey")
            else:
                privatekey = None
            if not privatekey:
                # Not found in users.json, ask from console
                import getpass
                privatekey = getpass.getpass("Private key (input hidden):")
        succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True)
        if succ and publish:
            self.sitePublish(address, inner_path=inner_path)
예제 #13
0
def parseAzadliq(query):
    query = querize(query)

    request = Request('https://www.azadliq.org/s?k=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')

    source_tags = file.find_all('a', class_='img-wrap img-wrap--t-spac img-wrap--size-3 img-wrap--float img-wrap--xs', href=True)
    headline_tags = file.find_all('h4', class_='media-block__title media-block__title--size-3')
    date_tags = file.find_all('span', class_='date date--mb date--size-3')

    site = Site('Azadliq Radiosu')
    
    for i in range(len(source_tags)): 
        source = source_tags[i]
        headline = source['title']
        date = date_tags[i]
        site.results.append(News('https://www.azadliq.org' + source['href'], headline, date.string, '00:00'))

    return site
예제 #14
0
def parseAzToday(query):
    query = querize(query)

    request = Request('https://www.aztoday.az/?s=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')

    parent = file.find('ul', class_='penci-wrapper-data penci-grid')
    
    news_tags = parent.find_all('a', class_='penci-image-holder penci-lazy', href=True)
    date_tags = parent.find_all('time', class_='entry-date published')

    site = Site('Aztoday.az')

    for i in range(len(news_tags)): 
        tag = news_tags[i]
        date = date_tags[i]
        site.results.append(News(tag['href'], tag['title'], date.get_text().strip(), '00:00'))

    return site
예제 #15
0
def parseRealTV(query):
    query = querize(query)
    
    request = Request('https://www.realtv.az/search/?text=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')
    
    parent_tags = file.find_all('h4', class_='card-title gel-pica-bold')
    news_tags = list()

    site = Site("Real TV")
    
    for tag in parent_tags:
        news_tags.append(tag.find('a', href=True))
    
    for i in range(len(news_tags)): 
        tag = news_tags[i]
        site.results.append(News('https://www.lent.az' + tag['href'], tag.get_text().strip(), ' ', '00:00'))

    return site
def get_random_site(problem):
    num_operations = problem.num_operations
    random_site = Site(num_operations)

    #Create a list of size equal to number of operation, and every operation is marked by a number which equals the job
    job_operation_list = []
    for i in range(problem.num_jobs):
        for j in range(problem.num_machines):
            job_operation_list.append(i)

    for i in range(len(random_site.location)):
        random_operation_nr = random.randint(0, len(job_operation_list) - 1)

        # print("random_operation_nr: ", random_operation_nr)
        # print("len(job_operation_list): ", len(job_operation_list))

        random_site.location[i] = job_operation_list[random_operation_nr]
        del job_operation_list[random_operation_nr]

    return random_site
예제 #17
0
파일: main.py 프로젝트: volker48/ZeroNet
def siteVerify(address):
    from Site import Site
    logging.info("Verifing site: %s..." % address)
    site = Site(address)

    logging.info("Verifing content.json signature...")
    if site.verifyFile("content.json",
                       open(site.getPath("content.json"), "rb"),
                       force=True) != False:  # Force check the sign
        logging.info("[OK] content.json signed by address %s!" % address)
    else:
        logging.error("[ERROR] Content.json not signed by address %s!" %
                      address)

    logging.info("Verifying site files...")
    bad_files = site.verifyFiles()
    if not bad_files:
        logging.info("[OK] All file sha512sum matches!")
    else:
        logging.error("[ERROR] Error during verifying site files!")
예제 #18
0
def parseOrduAz(query):
    query = querize(query)

    request = Request('https://ordu.az/index.php?search=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')

    super_parent = file.find('div', class_='col-md-9 col-sm-12')
    parent = super_parent.find('div', class_='row')

    source_tags = parent.find_all('a', href=True)
    headline_tags = parent.find_all('div', class_='news-title')
    
    site = Site('Ordu.az')

    for i in range(len(source_tags)): 
        source = source_tags[i]
        headline = headline_tags[i]
        site.results.append(News(source['href'], headline.get_text().strip(), '00.00.0000', '00:00'))

    return site
예제 #19
0
    def siteNeedFile(self, address, inner_path):
        from Site import Site
        from Site import SiteManager
        SiteManager.site_manager.load()

        def checker():
            while 1:
                s = time.time()
                time.sleep(1)
                print "Switch time:", time.time() - s
        gevent.spawn(checker)

        logging.info("Opening a simple connection server")
        global file_server
        from File import FileServer
        file_server = FileServer("127.0.0.1", 1234)
        file_server_thread = gevent.spawn(file_server.start, check_sites=False)

        site = Site(address)
        site.announce()
        print site.needFile(inner_path, update=True)
예제 #20
0
    def need(self, address, all_file=True):
        from Site import Site
        site = self.get(address)
        if not site:  # Site not exist yet
            # Try to find site with differect case
            for recover_address, recover_site in self.sites.items():
                if recover_address.lower() == address.lower():
                    return recover_site

            if not self.isAddress(address):
                return False  # Not address: %s % address
            self.log.debug("Added new site: %s" % address)
            site = Site(address)
            self.sites[address] = site
            if not site.settings["serving"]:  # Maybe it was deleted before
                site.settings["serving"] = True
            site.saveSettings()
            if all_file:  # Also download user files on first sync
                site.download(check_size=True, blind_includes=True)

        return site
예제 #21
0
def parseFemidaAz(query):
    query = querize(query)

    request = Request('http://femida.az/?search=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')

    parent = file.find('ul', class_='list')

    news_tags = parent.find_all('a', href=True)

    site = Site('Femida.az')

    for i in range(len(news_tags)): 
        tag = news_tags[i]
        site.results.append(News(tag['href'], tag.get_text().strip(), '00.00.0000', '00:00'))

    for r in site.results:
        print(r.source + '|' + r.headline + '|' + r.date)

    return site
예제 #22
0
def parseDemokratAz(query):
    query = querize(query)

    request = Request('https://demokrat.az/search?q=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')

    parent = file.find('div', {'id' : 'wrapper'})

    news_tags = parent.find_all('a', href=True)

    site = Site('Demokrat.az')

    for i in range(len(news_tags)): 
        tag = news_tags[i]
        site.results.append(News('https://demokrat.az/' + tag['href'], tag.get_text().strip(), '00.00.0000', '00:00'))

    for r in site.results:
        print(r.source + '|' + r.headline + '|' + r.date)

    return site
예제 #23
0
def parseAzVision(query):
    query = querize(query)

    request = Request('https://azvision.az/search.php?search=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')

    parent = file.find('ul', 'contents')

    news_tags = file.find_all('a', {'itemprop' : 'name'})

    site = Site('AzVision.az')

    for i in range(len(news_tags)): 
        tag = news_tags[i]
        site.results.append(News('https://azvision.az' + tag['href'], tag.get_text().strip(), '00.00.0000', '00:00'))

    for r in site.results:
        print(r.source + '|' + r.headline + '|' + r.date)

    return site
예제 #24
0
 def getSite(self,siteId = None, dateReq = NOW.strftime("%Y-%m-%d %H:00:00"), end = NOW.strftime("%Y-%m-%d %H:00:00"),db=None,locked=False):
     #to get site description specified by time              
     dateReq = str(dateReq)
     end = str(end)
     site = None
     dbStatus = False
     
     if siteId == None:
         return None
     
     if datetime.strptime(dateReq, "%Y-%m-%d %H:00:00") - datetime.strptime(end, "%Y-%m-%d %H:00:00") > timedelta(hours=0) :
         end = dateReq
     
     if db == None:
         db = Database()
         if db.connect():
             dbStatus = True
     else:
         dbStatus = True
             
     if dbStatus :
         try:
             if not locked :
                 db.lock({'site':'READ','schedule':'READ','reservation':'READ','site_reserved':'READ'})
             
             db.execute('SELECT * FROM `site` WHERE `site_id` = "'+str(siteId)+'";')
             data = db.getCursor().fetchone()
 
             site = Site(site=data,db=db)
             res = site.getResources()
 
             for i in range(0,len(res)):
                 res[i].setAvailableAmount(db=db,begin=dateReq,end=end)
          
             site.setRunningAmount(db=db,aTime=dateReq)
             db.unlock()
         finally:
             if not locked :
                 db.close()
             return site
예제 #25
0
def parsePublikaAz(query):
    query = querize(query)

    request = Request('https://publika.az/search.php?query=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')

    parent = file.find('div', class_='page_layout clearfix')

    news_tags = parent.find_all('a', href=True)
    date_tags = parent.find_all('li', class_='date')

    print(len(news_tags))
    print(len(date_tags))

    length = len(news_tags)

    del news_tags[length - 1]
    del news_tags[length - 2]

    i = 0
    
    while (i < len(news_tags)):
        news_tags[i] = date_tags[0]
        i = i + 2

    while date_tags[0] in news_tags:
        news_tags.remove(date_tags[0])

    site = Site('Publika.az')

    for i in range(len(news_tags)): 
        tag = news_tags[i]
        date = date_tags[i]
        site.results.append(News(tag['href'], tag.get_text().strip(), date.get_text().strip(), '00:00'))

    for r in site.results:
        print(r.source + '|' + r.headline + '|' + r.date)

    return site
예제 #26
0
 def load(self, cleanup=True):
     self.log.debug("Loading sites...")
     self.loaded = False
     from Site import Site
     if self.sites is None:
         self.sites = {}
     address_found = []
     added = 0
     # Load new adresses
     for address, settings in json.load(open("%s/sites.json" % config.data_dir)).iteritems():
         if address not in self.sites and os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
             s = time.time()
             try:
                 site = Site(address, settings=settings)
                 site.content_manager.contents.get("content.json")
             except Exception, err:
                 self.log.debug("Error loading site %s: %s" % (address, err))
                 continue
             self.sites[address] = site
             self.log.debug("Loaded site %s in %.3fs" % (address, time.time() - s))
             added += 1
         address_found.append(address)
예제 #27
0
def parseQaynarInfo(query):
    query = querize(query)

    request = Request('https://qaynarinfo.az/az/search/?query=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')

    parent = file.find('div', class_='posts-wrap')
    
    source_tags = parent.find_all('a', href=True)
    headline_tags = parent.find_all('h2', 'post-title')
    date_tags = parent.find_all('div', class_='post-date')

    site = Site('Qaynarinfo.az')

    for i in range(len(source_tags)): 
        source = source_tags[i]
        headline = headline_tags[i]
        date = date_tags[i]
        site.results.append(News(source['href'], headline.get_text().strip(), date.get_text().strip(), '00:00'))

    return site
예제 #28
0
def parseReportAz(query):
    query = querize(query)

    request = Request('https://www.report.az/search?query=' + query, None, HEADERS)
    source = urlopen(request)
    file = bs.BeautifulSoup(source, 'lxml')

    news_tags = file.find_all('a', class_='title', href=True)
    date_tags = file.find_all('div', class_='news-date')

    site = Site('Report.az')

    for i in range(len(news_tags)): 
        tag = news_tags[i]
        date = date_tags[i]
        site.results.append(News("https://report.az/" + tag['href'], tag.get_text().strip(), date.get_text().strip(), '00:00'))

    # to print for debugging
    for r in site.results:
        print(r.source + '|' + r.headline + '|' + r.date)

    return site
예제 #29
0
    def load(self, cleanup=True):
        self.log.debug("Loading sites...")
        self.loaded = False
        from Site import Site
        if self.sites is None:
            self.sites = {}
        address_found = []
        added = 0
        # Load new adresses
        for address, settings in json.load(
                open("%s/sites.json" % config.data_dir)).iteritems():
            if address not in self.sites and os.path.isfile(
                    "%s/%s/content.json" % (config.data_dir, address)):
                s = time.time()
                self.sites[address] = Site(address, settings=settings)
                self.log.debug("Loaded site %s in %.3fs" %
                               (address, time.time() - s))
                added += 1
            address_found.append(address)

        # Remove deleted adresses
        if cleanup:
            for address in self.sites.keys():
                if address not in address_found:
                    del (self.sites[address])
                    self.log.debug("Removed site: %s" % address)

            # Remove orpan sites from contentdb
            for row in ContentDb.getContentDb().execute("SELECT * FROM site"):
                if row["address"] not in self.sites:
                    self.log.info("Deleting orphan site from content.db: %s" %
                                  row["address"])
                    ContentDb.getContentDb().execute(
                        "DELETE FROM site WHERE ?",
                        {"address": row["address"]})

        if added:
            self.log.debug("SiteManager added %s sites" % added)
        self.loaded = True
예제 #30
0
    def testVerify():
        from Site import Site
        site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")

        content_manager = ContentManager(site)
        print "Loaded contents:", content_manager.contents.keys()

        file = open(
            site.storage.getPath(
                "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json"))
        print "content.json valid:", content_manager.verifyFile(
            "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json",
            file,
            ignore_same=False)

        file = open(
            site.storage.getPath(
                "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json"))
        print "messages.json valid:", content_manager.verifyFile(
            "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json",
            file,
            ignore_same=False)