def module_test(): """ Quick test using… python -m pyrocore.daemon.webapp """ import pprint from pyrocore import connect try: engine = connect() print("%s - %s" % (engine.engine_id, engine.open())) pprint.pprint(stats.engine_data(engine)) print("%s - %s" % (engine.engine_id, engine.open())) except (error.LoggableError, xmlrpc.ERRORS), torrent_exc: print("ERROR: %s" % torrent_exc)
def module_test(): """ Quick test using… python -m pyrocore.torrent.jobs """ import pprint from pyrocore import connect try: engine = connect() print("%s - %s" % (engine.engine_id, engine.open())) data, views = _flux_engine_data(engine) print "data = ", pprint.pprint(data) print "views = ", pprint.pprint(views) print("%s - %s" % (engine.engine_id, engine.open())) except (error.LoggableError, xmlrpc.ERRORS), torrent_exc: print("ERROR: %s" % torrent_exc)
def collectDB(): # setup pyrocore from pyrocore import connect rt = connect() # setup sqlite conn = sqlite3.connect(os.path.expanduser('~/.config/rtdb.db')) c = conn.cursor() changedUploadedRows = 0 for t in rt.items(): #print '|'.join([t.hash,t.alias,str(t.uploaded)]) c.execute('SELECT hash FROM hash_uploaded WHERE hash = ?',[t.hash]) if c.fetchone() == None: c.execute('INSERT OR IGNORE INTO hash_uploaded VALUES (?,CURRENT_TIMESTAMP,?)',[t.hash,str(t.uploaded)]) changedUploadedRows += c.rowcount else: c.execute(''' INSERT INTO hash_uploaded SELECT hu.hash, CURRENT_TIMESTAMP, ? AS input_uploaded FROM hash_uploaded hu JOIN (SELECT hash, max(timestamp) AS max_timestamp FROM hash_uploaded WHERE hash = ?) huc ON hu.hash = huc.hash AND hu.timestamp = huc.max_timestamp AND hu.uploaded <> ? WHERE hu.hash = ? ''',[str(t.uploaded),t.hash,str(t.uploaded),t.hash]) changedUploadedRows += c.rowcount c.execute('INSERT OR IGNORE INTO hash_alias VALUES (?,?,CURRENT_TIMESTAMP)',[t.hash,t.alias]) conn.commit() conn.close() return changedUploadedRows
def updateRT(): # set up pyrocore from pyrocore import connect rt = connect() proxy = rt.open() # set up sqlite conn = sqlite3.connect(os.path.expanduser('~/.config/rtdb.db')) c = conn.cursor() c.execute ('SELECT * from v_hash_uploaded_rpt') while True: r = c.fetchone() if r == None: break update_values = [] update_values.append([r[0].encode('ascii','replace'),'uploaded_last_day',str(r[1])]) update_values.append([r[0].encode('ascii','replace'),'uploaded_last_week',str(r[2])]) update_values.append([r[0].encode('ascii','replace'),'uploaded_last_month',str(r[3])]) for u in update_values: try: result = proxy.d.set_custom(*u) if result != 0: print 'something happened on '+u except Fault as e: print e
print('Warning: we have two files with the same name (%s)' % fname) print(' %s' % file) print(' %s' % files[fname]) print('Skipping file...') files[fname] = '__CONFLICT__' else: files[fname] = file print('{0} files treated, of which {1} have been skipped due to size conflicts'.format(totalItems, conflictItems)) print('It represents about {0} bytes of data to be re-downloaded'.format(conflictBytes)) rt = connect() proxy = rt.open() torrents = proxy.download_list() print('Treating torrents') for hash in torrents: name = proxy.d.get_name(hash) multi = proxy.d.is_multi_file(hash) # exists = os.path.exists(dir) # print(' %s (%s %s)' % (name, dir, u'\033[92m\u2713\033[0m' if exists else u'\033[91m\u2717\033[0m')) # last = os.path.basename(os.path.dirname(dir)) newPath = '/home/username/torrents/rtorrent/Bulk/' + name if multi == 1 else '' fileCount = proxy.d.size_files(hash)