def main():
	db = database.get_cursor()
	db.execute("SELECT torrent_id, tracker_id, hash, tracker_url, last_scrape FROM vw_scrape")	
	all_trackers = {}	
	for x in range(0, db.rowcount):
		row = db.fetchone()
		turl = row["tracker_url"]
		hash = row["hash"]
		if turl in all_trackers.iterkeys():
			if hash not in all_trackers[turl]:
				all_trackers[turl].append(hash)
		else:
			all_trackers[turl] = [hash]		

	for tracker, hashes in all_trackers.iteritems():
		try:
			result = scrape(tracker, hashes)	
			if result:
				for hash, stats in result.iteritems():
					db.execute("""UPDATE tracker SET seeds=%s, leechers=%s, completed=%s,
						last_scrape=CURRENT_TIMESTAMP, scrape_error=NULL
						WHERE torrent_id=(SELECT id FROM torrent WHERE hash=%s) AND tracker_url=%s""",
						(stats["seeds"], stats["peers"], stats["complete"], hash, tracker))
				db.commit()
		except (RuntimeError, NameError, ValueError, socket.timeout) as e:
			print "Error: %s" % e
			format_strings = ','.join(["'%s'"] * len(hashes)) % tuple(hashes)			
			db.execute("""UPDATE tracker SET last_scrape=CURRENT_TIMESTAMP, scrape_error=%s
				WHERE tracker_url=%s AND torrent_id IN (SELECT id FROM torrent WHERE hash IN (""" + format_strings + "))", (e, tracker))
			db.commit()
	db.close()
Beispiel #2
0
def index():
    db = database.get_cursor()
    db.execute(
        "SELECT hash, name, total_size_bytes FROM torrent WHERE retrieving_data=0 ORDER BY create_date DESC LIMIT 10"
    )
    lastten = db.fetchall()
    db.close()
    return template("index.html", lastten=lastten)
Beispiel #3
0
def add_to_database(hash, full_magnet_uri=None, already_exists=False, fetch_metadata=True):
	db = database.get_cursor()
	if not already_exists:
		db.execute("INSERT INTO torrent(hash, retrieving_data) VALUES (%s, 1)", (hash))
		db.commit()
	if fetch_metadata:
		magnet_uri = full_magnet_uri if full_magnet_uri else get_magnet_uri(hash)		
		thread.start_new_thread(fetch_magnet, (magnet_uri,))
	db.close()
Beispiel #4
0
def get_base64_metadata(hash, decode=False):
	db = database.get_cursor()
	db.execute("SELECT base64_metadata FROM torrent WHERE hash=%s", hash)	
	if (db.rowcount > 0):
		data = db.fetchone()
		db.close()
		metadata = data['base64_metadata']
		return base64.b64decode(metadata) if decode else metadata
	db.close()
	return None
Beispiel #5
0
def get_base64_metadata(hash, decode=False):
    db = database.get_cursor()
    db.execute("SELECT base64_metadata FROM torrent WHERE hash=%s", hash)
    if (db.rowcount > 0):
        data = db.fetchone()
        db.close()
        metadata = data['base64_metadata']
        return base64.b64decode(metadata) if decode else metadata
    db.close()
    return None
Beispiel #6
0
def add_to_database(hash,
                    full_magnet_uri=None,
                    already_exists=False,
                    fetch_metadata=True):
    db = database.get_cursor()
    if not already_exists:
        db.execute("INSERT INTO torrent(hash, retrieving_data) VALUES (%s, 1)",
                   (hash))
        db.commit()
    if fetch_metadata:
        magnet_uri = full_magnet_uri if full_magnet_uri else get_magnet_uri(
            hash)
        thread.start_new_thread(fetch_magnet, (magnet_uri, ))
    db.close()
Beispiel #7
0
def scrape_trackers(hash, tracker_list):
	db = database.get_cursor()
	for url in tracker_list:
		try:
			result = scrape(url, [hash])				
			for hash, stats in result.iteritems():				
				db.execute("""UPDATE tracker SET seeds=%s, leechers=%s, completed=%s,
					last_scrape=CURRENT_TIMESTAMP, scrape_error=NULL
					WHERE torrent_id=(SELECT id FROM torrent WHERE hash=%s) AND tracker_url=%s""",
					(stats["seeds"], stats["peers"], stats["complete"], hash, url))
			db.commit()
		except (RuntimeError, NameError, ValueError, socket.timeout) as e:			
			db.execute("""UPDATE tracker SET last_scrape=CURRENT_TIMESTAMP, scrape_error=%s
				WHERE tracker_url=%s AND torrent_id=(SELECT id FROM torrent WHERE hash=%s)""", (e, url, hash))
			db.commit()
	db.close()			
Beispiel #8
0
def add_from_torrent_info(info, torrent_metadata=None):
	db = database.get_cursor()
	torrent_hash = str(info.info_hash())	
	if is_in_database(torrent_hash):
		db.execute("DELETE FROM torrent WHERE hash=%s", (torrent_hash))	
	total_size = sum([f.size for f in info.files()])
	db.execute("INSERT INTO torrent(hash, name, total_size_bytes, retrieving_data, base64_metadata) VALUES (%s, %s, %s, 0, %s)",
		(torrent_hash, info.name(), total_size, base64.b64encode(torrent_metadata)))
	torrent_id = db.lastrowid

	for f in info.files():
		db.execute("INSERT INTO file(torrent_id, name, full_location, length_bytes) VALUES (%s, %s, %s, %s)",
			(torrent_id, os.path.basename(f.path), f.path, f.size))
	for t in info.trackers():		
		db.execute("INSERT INTO tracker(torrent_id, tracker_url) VALUES (%s, %s)",
			(torrent_id, t.url))
	db.commit()	
	db.close()
	scrape_trackers(torrent_hash, [t.url for t in info.trackers()])
Beispiel #9
0
def scrape_trackers(hash, tracker_list):
    db = database.get_cursor()
    for url in tracker_list:
        try:
            result = scrape(url, [hash])
            for hash, stats in result.iteritems():
                db.execute(
                    """UPDATE tracker SET seeds=%s, leechers=%s, completed=%s,
					last_scrape=CURRENT_TIMESTAMP, scrape_error=NULL
					WHERE torrent_id=(SELECT id FROM torrent WHERE hash=%s) AND tracker_url=%s""",
                    (stats["seeds"], stats["peers"], stats["complete"], hash,
                     url))
            db.commit()
        except (RuntimeError, NameError, ValueError, socket.timeout) as e:
            db.execute(
                """UPDATE tracker SET last_scrape=CURRENT_TIMESTAMP, scrape_error=%s
				WHERE tracker_url=%s AND torrent_id=(SELECT id FROM torrent WHERE hash=%s)""",
                (e, url, hash))
            db.commit()
    db.close()
Beispiel #10
0
def add_from_torrent_info(info, torrent_metadata=None):
    db = database.get_cursor()
    torrent_hash = str(info.info_hash())
    if is_in_database(torrent_hash):
        db.execute("DELETE FROM torrent WHERE hash=%s", (torrent_hash))
    total_size = sum([f.size for f in info.files()])
    db.execute(
        "INSERT INTO torrent(hash, name, total_size_bytes, retrieving_data, base64_metadata) VALUES (%s, %s, %s, 0, %s)",
        (torrent_hash, info.name(), total_size,
         base64.b64encode(torrent_metadata)))
    torrent_id = db.lastrowid

    for f in info.files():
        db.execute(
            "INSERT INTO file(torrent_id, name, full_location, length_bytes) VALUES (%s, %s, %s, %s)",
            (torrent_id, os.path.basename(f.path), f.path, f.size))
    for t in info.trackers():
        db.execute(
            "INSERT INTO tracker(torrent_id, tracker_url) VALUES (%s, %s)",
            (torrent_id, t.url))
    db.commit()
    db.close()
    scrape_trackers(torrent_hash, [t.url for t in info.trackers()])
Beispiel #11
0
def main():
    db = database.get_cursor()
    db.execute(
        "SELECT torrent_id, tracker_id, hash, tracker_url, last_scrape FROM vw_scrape"
    )
    all_trackers = {}
    for x in range(0, db.rowcount):
        row = db.fetchone()
        turl = row["tracker_url"]
        hash = row["hash"]
        if turl in all_trackers.iterkeys():
            if hash not in all_trackers[turl]:
                all_trackers[turl].append(hash)
        else:
            all_trackers[turl] = [hash]

    for tracker, hashes in all_trackers.iteritems():
        try:
            result = scrape(tracker, hashes)
            if result:
                for hash, stats in result.iteritems():
                    db.execute(
                        """UPDATE tracker SET seeds=%s, leechers=%s, completed=%s,
						last_scrape=CURRENT_TIMESTAMP, scrape_error=NULL
						WHERE torrent_id=(SELECT id FROM torrent WHERE hash=%s) AND tracker_url=%s""",
                        (stats["seeds"], stats["peers"], stats["complete"],
                         hash, tracker))
                db.commit()
        except (RuntimeError, NameError, ValueError, socket.timeout) as e:
            print "Error: %s" % e
            format_strings = ','.join(["'%s'"] * len(hashes)) % tuple(hashes)
            db.execute(
                """UPDATE tracker SET last_scrape=CURRENT_TIMESTAMP, scrape_error=%s
				WHERE tracker_url=%s AND torrent_id IN (SELECT id FROM torrent WHERE hash IN ("""
                + format_strings + "))", (e, tracker))
            db.commit()
    db.close()
Beispiel #12
0
def is_in_database(hash):
	db = database.get_cursor()
	db.execute("SELECT id FROM torrent WHERE hash=%s", (hash,))
	ret = db.rowcount > 0
	db.close()
	return ret
Beispiel #13
0
def info(hash=None):
    """
	<h5>Description:</h5>
	<p>
		Checks the information we have of a torrent that has been added.
		If the torrent metadata is still being retrieved, 'torrent.retrieving_data' will be 1.
		If the torrent metadata has been retrieved, 'torrent.download_link' will have a value
	</p>

	<h5>Parameters:</h5>
	<ul>
		<li><strong>hash</strong> - The hash returned from the /api/upload call</li>
	</ul>

	<h5>Returns:</h5>
	<p>
		On success (metadata is still downloading):
<pre>
{
    "data": {		        
        "torrent": {		            
            "retrieving_data": 1,
            "hash": "ddceab34ac388ca56b0cdbc6eb726ca1844233c6"		            
        }		        
    },
    "success": true
}
</pre>
		On success (metadata has downloaded):
<pre>
{
    "data": {
        "files": [{
                "size_bytes": 392560640,
                "full_location": "Pioneer.One.S01E03.Xvid-VODO/Pioneer.One.S01E03.Xvid-VODO.avi",
                "name": "Pioneer.One.S01E03.Xvid-VODO.avi"
        }],
        "torrent": {
            "hash": "ddceab34ac388ca56b0cdbc6eb726ca1844233c5",
            "name": "Pioneer.One.S01E03.Xvid-VODO",
            "download_link": "/api/metadata/ddceab34ac388ca56b0cdbc6eb726ca1844233c5.torrent",
            "total_size_bytes": 402477447
        },
        "trackers": [{
                "leechers": 0,
                "completed": 0,
                "seeds": 0,
                "tracker_url": "udp://tracker.openbittorrent.com:80"                
        }]
    },
    "success": true
}
</pre>
		On error:
<pre>
{
    "message": "Torrent ddceab34ac388ca56b0cdbc6eb726ca1844233c6 is not in database! Call /api/upload to add it",
    "success": false
}
</pre>
	</p>

	<h5>Example:</h5>
	<p>
		<strong>Check the info of torrent 'ddceab34ac388ca56b0cdbc6eb726ca1844233c6'</strong><br />
		/api/info/ddceab34ac388ca56b0cdbc6eb726ca1844233c6
	</p>
	"""
    if not hash or not is_hash(hash):
        return api_error("%s is not a valid hash" % hash)
    db = database.get_cursor()
    db.execute(
        "SELECT id, hash, name, total_size_bytes, retrieving_data FROM torrent WHERE hash=%s",
        hash)
    torrent = db.fetchone()
    if not torrent:
        return api_error(
            "Torrent %s is not in database! Call /api/upload to add it" % hash)
    id = torrent["id"]
    del torrent["id"]

    if torrent["retrieving_data"]:
        del torrent["total_size_bytes"]
        del torrent["name"]
        return api_success({"torrent": torrent})
    else:
        del torrent["retrieving_data"]
        torrent["download_link"] = get_url(
            "/api/metadata/<hash:re:[a-fA-F0-9]{40}>.torrent", hash=hash)
        torrent["nice_size"] = size(torrent["total_size_bytes"])

    db.execute(
        "SELECT tracker_url, seeds, leechers, completed, scrape_error FROM tracker WHERE torrent_id=%s",
        id)
    trackers = db.fetchall()
    for tracker in trackers:
        if not tracker["scrape_error"]:
            del tracker["scrape_error"]

    db.execute(
        "SELECT name, full_location, length_bytes as size_bytes FROM file WHERE torrent_id=%s",
        id)
    files = db.fetchall()
    for file in files:
        file["nice_size"] = size(file["size_bytes"])

    db.close()
    return api_success({
        "torrent": torrent,
        "files": files,
        "trackers": trackers
    })
Beispiel #14
0
def get_magnet_uri(hash):
	#use these public trackers to help find the torrent via magnet, otherwise itll never be found
	public_trackers = "tr=udp://tracker.openbittorrent.com:80&tr=udp://tracker.publicbt.com:80&tr=udp://tracker.istole.it:6969&tr=udp://tracker.ccc.de:80"
	return "magnet:?xt=urn:btih:%s&%s" % (hash, public_trackers)

def scrape_trackers(hash, tracker_list):
	db = database.get_cursor()
	for url in tracker_list:
		try:
			result = scrape(url, [hash])				
			for hash, stats in result.iteritems():				
				db.execute("""UPDATE tracker SET seeds=%s, leechers=%s, completed=%s,
					last_scrape=CURRENT_TIMESTAMP, scrape_error=NULL
					WHERE torrent_id=(SELECT id FROM torrent WHERE hash=%s) AND tracker_url=%s""",
					(stats["seeds"], stats["peers"], stats["complete"], hash, url))
			db.commit()
		except (RuntimeError, NameError, ValueError, socket.timeout) as e:			
			db.execute("""UPDATE tracker SET last_scrape=CURRENT_TIMESTAMP, scrape_error=%s
				WHERE tracker_url=%s AND torrent_id=(SELECT id FROM torrent WHERE hash=%s)""", (e, url, hash))
			db.commit()
	db.close()			

#get all torrents that were still trying to retrieve metadata and re-add them
db = database.get_cursor()
db.execute("SELECT hash FROM torrent WHERE retrieving_data = 1")
data = db.fetchall()
for item in data:
	logger.debug("Reloading '%s' since its metadata hasnt been retrieved yet" % item["hash"])
	add_to_database(item["hash"], get_magnet_uri(item["hash"]), True)
db.close()
Beispiel #15
0
def is_in_database(hash):
    db = database.get_cursor()
    db.execute("SELECT id FROM torrent WHERE hash=%s", (hash, ))
    ret = db.rowcount > 0
    db.close()
    return ret
Beispiel #16
0
def info(hash=None):
	"""
	<h5>Description:</h5>
	<p>
		Checks the information we have of a torrent that has been added.
		If the torrent metadata is still being retrieved, 'torrent.retrieving_data' will be 1.
		If the torrent metadata has been retrieved, 'torrent.download_link' will have a value
	</p>

	<h5>Parameters:</h5>
	<ul>
		<li><strong>hash</strong> - The hash returned from the /api/upload call</li>
	</ul>

	<h5>Returns:</h5>
	<p>
		On success (metadata is still downloading):
<pre>
{
    "data": {		        
        "torrent": {		            
            "retrieving_data": 1,
            "hash": "ddceab34ac388ca56b0cdbc6eb726ca1844233c6"		            
        }		        
    },
    "success": true
}
</pre>
		On success (metadata has downloaded):
<pre>
{
    "data": {
        "files": [{
                "size_bytes": 392560640,
                "full_location": "Pioneer.One.S01E03.Xvid-VODO/Pioneer.One.S01E03.Xvid-VODO.avi",
                "name": "Pioneer.One.S01E03.Xvid-VODO.avi"
        }],
        "torrent": {
            "hash": "ddceab34ac388ca56b0cdbc6eb726ca1844233c5",
            "name": "Pioneer.One.S01E03.Xvid-VODO",
            "download_link": "/api/metadata/ddceab34ac388ca56b0cdbc6eb726ca1844233c5.torrent",
            "total_size_bytes": 402477447
        },
        "trackers": [{
                "leechers": 0,
                "completed": 0,
                "seeds": 0,
                "tracker_url": "udp://tracker.openbittorrent.com:80"                
        }]
    },
    "success": true
}
</pre>
		On error:
<pre>
{
    "message": "Torrent ddceab34ac388ca56b0cdbc6eb726ca1844233c6 is not in database! Call /api/upload to add it",
    "success": false
}
</pre>
	</p>

	<h5>Example:</h5>
	<p>
		<strong>Check the info of torrent 'ddceab34ac388ca56b0cdbc6eb726ca1844233c6'</strong><br />
		/api/info/ddceab34ac388ca56b0cdbc6eb726ca1844233c6
	</p>
	"""
	if not hash or not is_hash(hash):
		return api_error("%s is not a valid hash" % hash)
	db = database.get_cursor()
	db.execute("SELECT id, hash, name, total_size_bytes, retrieving_data FROM torrent WHERE hash=%s", hash)
	torrent = db.fetchone()
	if not torrent:
		return api_error("Torrent %s is not in database! Call /api/upload to add it" % hash)
	id = torrent["id"]
	del torrent["id"]

	if torrent["retrieving_data"]:
		del torrent["total_size_bytes"]
		del torrent["name"]
		return api_success({
			"torrent" : torrent
		})
	else:
		del torrent["retrieving_data"]
		torrent["download_link"] = get_url("/api/metadata/<hash:re:[a-fA-F0-9]{40}>.torrent", hash=hash)
		torrent["nice_size"] = size(torrent["total_size_bytes"])

	db.execute("SELECT tracker_url, seeds, leechers, completed, scrape_error FROM tracker WHERE torrent_id=%s", id)
	trackers = db.fetchall()
	for tracker in trackers:
		if not tracker["scrape_error"]:
			del tracker["scrape_error"]

	db.execute("SELECT name, full_location, length_bytes as size_bytes FROM file WHERE torrent_id=%s", id)
	files = db.fetchall()
	for file in files:
		file["nice_size"] = size(file["size_bytes"])

	db.close()
	return api_success({
		"torrent" : torrent, "files" : files, "trackers" : trackers
	})
Beispiel #17
0
def scrape_trackers(hash, tracker_list):
    db = database.get_cursor()
    for url in tracker_list:
        try:
            result = scrape(url, [hash])
            for hash, stats in result.iteritems():
                db.execute(
                    """UPDATE tracker SET seeds=%s, leechers=%s, completed=%s,
					last_scrape=CURRENT_TIMESTAMP, scrape_error=NULL
					WHERE torrent_id=(SELECT id FROM torrent WHERE hash=%s) AND tracker_url=%s""",
                    (stats["seeds"], stats["peers"], stats["complete"], hash,
                     url))
            db.commit()
        except (RuntimeError, NameError, ValueError, socket.timeout) as e:
            db.execute(
                """UPDATE tracker SET last_scrape=CURRENT_TIMESTAMP, scrape_error=%s
				WHERE tracker_url=%s AND torrent_id=(SELECT id FROM torrent WHERE hash=%s)""",
                (e, url, hash))
            db.commit()
    db.close()


#get all torrents that were still trying to retrieve metadata and re-add them
db = database.get_cursor()
db.execute("SELECT hash FROM torrent WHERE retrieving_data = 1")
data = db.fetchall()
for item in data:
    logger.debug("Reloading '%s' since its metadata hasnt been retrieved yet" %
                 item["hash"])
    add_to_database(item["hash"], get_magnet_uri(item["hash"]), True)
db.close()