Example #1
0
def list_folder (root_folder_name):
	f.write ("Traversing " + root_folder_name + '\n');

	folder_metadata = client.metadata(root_folder_name, list=True, include_deleted=True)
	
	for file_name in folder_metadata['contents']:
		if (file_name['is_dir']):
			list_folder(file_name['path'])
		else:
			file_mod = parser.parse(file_name['modified'])

			if file_name.get('is_deleted', False): # file has been deleted
				if (file_mod >= cutoff_bottom) and (file_mod <= cutoff_top): # and only recently
					#Now look for the PREVIOUS version
					revisions = client.revisions(file_name['path'], 100);
					#print "Found " + str(len(revisions)) + " revisions: \n";
					revisions.reverse();
					revisions.pop() # ignore the latest, this is the deleted version
					myrev = revisions.pop();

					f.write("Recovering " + file_name['path'] + "(" + myrev['modified'] + ", rev " + str(myrev['rev']) + '\n');

					result = client.restore(file_name['path'], myrev['rev']);
					print "Restored " + file_name['path'] + ". ("  + myrev['modified'] + ", " + str(result['size']) + ")"; 
				else:
					f.write("Not recovering " + file_name['path'] + ' rev ' + file_name['rev'] + '\n');

	return;
def process_folder(path):
    print 'process folder', path.encode('ascii', 'replace')
    folder_metadata = client.metadata(path, include_deleted=True)
    for meta in folder_metadata['contents']:
        if meta['is_dir']:
            process_folder(meta['path'])
        elif meta.has_key('is_deleted') and meta['is_deleted']:
            f = meta['path']
            print 'restore file', f.encode('ascii', 'replace')
            revs = client.revisions(f, rev_limit=2)
            if len(revs) >= 2:
                client.restore(f, revs[1]['rev'])
Example #3
0
    token_secret = access_token.secret
    
    f = open(TOKEN_FILE, 'w')
    f.write("%s|%s" % (token_key, token_secret) )
    f.close()
    print "wrote new token"
else:
    sess.set_token(token_key, token_secret)

client = client.DropboxClient(sess)

print "client constructed"

recentf, recentmeta = client.get_file_and_metadata(FILE_PATH)

revisions = client.revisions(FILE_PATH)

print "files/revisions obtained"

DAT_PATH = "/.DocumentRevisions-V100/DAT/"

if not os.path.exists(DAT_PATH):
    os.makedirs(DAT_PATH)

for i in revisions:
    rev = i['rev']
    print "revision", rev, "obtained"
    f = client.get_file(FILE_PATH, rev)
    outfile = open(DAT_PATH + rev + "_dat.txt", 'w')
    outfile.write(f.read())
    f.close()
Example #4
0
# ACCESS_TYPE should be 'dropbox' or 'app_folder' as configured for your app
ACCESS_TYPE = 'dropbox'
sess = session.DropboxSession(APP_KEY, APP_SECRET, ACCESS_TYPE)
# get req token
request_token = sess.obtain_request_token()
url = sess.build_authorize_url(request_token)
print "url:", url
print "Please visit this website and press the 'Allow' button, then hit 'Enter' here."
raw_input()

# This will fail if the user didn't visit the above URL and hit 'Allow'
access_token = sess.obtain_access_token(request_token)

client = client.DropboxClient(sess)

with open ('out_grepped.txt','rb') as f:
	for line in f:
		delfile =json.loads(line)
		print delfile['path']
		revs = client.revisions(delfile['path'])
		for r in revs:
			print r
			if 'is_deleted' not in r:
				rev = r['rev']
				client.restore(delfile['path'],rev)
				break




Example #5
0
        date = datetime.datetime.strptime(filedata["modified"], DATE_FORMAT)

        # this is where we'll restore it to.
        target = os.path.join(recover_to, filedata["path"][1:])

        if os.path.exists(target):
            # already recovered
            pass
        elif date < datetime.datetime.now() - datetime.timedelta(days=MAX_DAYS):
            # not deleted recently
            pass
        else:
            print "  %s is deleted"%(filedata["path"])

            # fetch file history, and pick the first non-deleted revision.
            revisions = client.revisions(filedata["path"], rev_limit=10)
            alive = filter(lambda r: not r.get("is_deleted", False), revisions)[0]

            # create destination folder.
            try:
                os.makedirs(os.path.dirname(target))
            except OSError:
                pass

            if USE_RESTORE:

                restore = client.restore(filedata["path"], alive["rev"])
                print restore
            else:

                # try to download file.
Example #6
0
m = client.metadata("/")
if m['is_dir']:
	files = m['contents']
else:
	files = [m]
for (i,f) in enumerate(files):
	print i, f['path']

print "Which file?"
ixFile = int(raw_input())
selectedFile = files[ixFile]
if selectedFile['is_dir']:
	print "Sorry, but only files have revisions."
	sys.exit(2)
print "Getting revisions of %s" % selectedFile['path']
revs = client.revisions(selectedFile['path'])
print "There are %d revisions of %s." % (len(revs), selectedFile['path'])
if len(revs) < 2:
	response = raw_input("There's only one revision of this file. Are you sure you want to export it? [Y/n] ")
	if not response or response.lower()[0] == 'y':
		pass
	else:
		print "Aborting."
		sys.exit(0)

basename = selectedFile['path'].split('/')[-1]
print "basename: %s" % str(basename)

scratchDir = tempfile.mkdtemp()
os.chdir(scratchDir)
f = open(os.path.join(scratchDir, basename), "w")
Example #7
0
#print "linked account:", client.account_info()

# To store time stamps of each revision
dictTimeStamps={}

# To Get All Files in a Folder you can use:
#folder_metadata = client.metadata('/Research/ThesisOzanSener/')#name of the folder we are interested
#for i in range(1,len(folder_metadata['contents'])):
#	if folder_metadata['contents'][i]['is_dir']==False:

#Full path of the files we are interested in their stories
filel = ['file1','file2']
for fN in filel:
	dictFS = {} #Processed value of a single file
	dictFSStr = {} #Yet another value
	revs = client.revisions(fN)
	for i in range(len(revs)): #Maximum number of revision is 1000 in default case
		f, metadata = client.get_file_and_metadata( revs[i]['path'],revs[i]['rev']) Fetch data
		# Write revision of the file
		out = open("RESULT_DIR" + revs[i]['path'] + str(revs[i]['revision']), 'w')
		out.write(f.read())
		out.close()
		dictTimeStamps[revs[i]['revision']]=revs[i]['modified']
		# Process your files to get necessary data
		wcBeforeStrip=int(commands.getstatusoutput('wc -w '+"RESULT_DIR" + revs[i]['path'] + str(revs[i]['revision'])+' |cut -f1 -d" "')[1])
		os.system("./StripLatexComments.pl "+"RESULT_DIR" + revs[i]['path'] + str(revs[i]['revision'])+" >tmp.txt")
		wcAfterStrip=int(commands.getstatusoutput('wc -w '+"tmp.txt"+' |cut -f1 -d" "')[1])
		# Store the data
		dictFS[revs[i]['revision']]=wcBeforeStrip
		dictFSStr[revs[i]['revision']]=wcAfterStrip
	# Save the story of a single file to a CSV file
Example #8
0
        # this is where we'll restore it to.
        target = os.path.join(recover_to, filedata["path"][1:])

        if os.path.exists(target):
            # already recovered
            pass
        elif date < datetime.datetime.now() - datetime.timedelta(
                days=MAX_DAYS):
            # not deleted recently
            pass
        else:
            print "  %s is deleted" % (filedata["path"])

            # fetch file history, and pick the first non-deleted revision.
            revisions = client.revisions(filedata["path"], rev_limit=10)
            alive = filter(lambda r: not r.get("is_deleted", False),
                           revisions)[0]

            # create destination folder.
            try:
                os.makedirs(os.path.dirname(target))
            except OSError:
                pass

            if USE_RESTORE:

                restore = client.restore(filedata["path"], alive["rev"])
                print restore
            else: