Exemplo n.º 1
0
def list_folder (root_folder_name):
	f.write ("Traversing " + root_folder_name + '\n');

	folder_metadata = client.metadata(root_folder_name, list=True, include_deleted=True)
	
	for file_name in folder_metadata['contents']:
		if (file_name['is_dir']):
			list_folder(file_name['path'])
		else:
			file_mod = parser.parse(file_name['modified'])

			if file_name.get('is_deleted', False): # file has been deleted
				if (file_mod >= cutoff_bottom) and (file_mod <= cutoff_top): # and only recently
					#Now look for the PREVIOUS version
					revisions = client.revisions(file_name['path'], 100);
					#print "Found " + str(len(revisions)) + " revisions: \n";
					revisions.reverse();
					revisions.pop() # ignore the latest, this is the deleted version
					myrev = revisions.pop();

					f.write("Recovering " + file_name['path'] + "(" + myrev['modified'] + ", rev " + str(myrev['rev']) + '\n');

					result = client.restore(file_name['path'], myrev['rev']);
					print "Restored " + file_name['path'] + ". ("  + myrev['modified'] + ", " + str(result['size']) + ")"; 
				else:
					f.write("Not recovering " + file_name['path'] + ' rev ' + file_name['rev'] + '\n');

	return;
def app_list(request):
    client = db_client(request)
    if not client:
        return redirect(dropbox_auth)
    folder_metadata = client.metadata('/')
    folder_names = [metadata["path"][1:] for metadata in folder_metadata["contents"] if metadata["is_dir"]]
    return render(request, "app_list.html", {"apps": folder_names})
Exemplo n.º 3
0
Arquivo: app.py Projeto: sirwart/Tou
def dropbox_ls():
    dropbox_access_token = session.get(DROPBOX_ACCESS_KEY, None)
    if dropbox_access_token is None:
        return "please log into dropbox first"

    root = request.args.get('dir', '/')
    dropbox_client = get_client(dropbox_access_token)
    try:
        resp = dropbox_client.metadata(root)
        if 'contents' in resp:
            results = list()
            for f in resp['contents']:
                result = dict()
                result['name'] = os.path.basename(f['path'])
                result['path'] = f['path']
                result['isDirectory'] = False
                result['isFile'] = True
                if f['is_dir']:
                    result['isDirectory'] = True
                    result['isFile'] = False
                
                results.append(result)
            return json.dumps(results)
        else:
            return "malformed response from dropbox:\n %s" % str(resp)
    except Exception, e:
        return e.message
Exemplo n.º 4
0
def app_list(request):
    client = db_client(request)
    if not client:
        return redirect(dropbox_auth)
    folder_metadata = client.metadata('/')
    folder_names = [
        metadata["path"][1:] for metadata in folder_metadata["contents"]
        if metadata["is_dir"]
    ]
    return render(request, "app_list.html", {"apps": folder_names})
Exemplo n.º 5
0
Arquivo: views.py Projeto: osa1/dolap
def collect_files(path, client, result):
    """Recursively collect files in path and it's subfolders using an Dropbox client"""
    files = client.metadata(path)['contents']
    print files
    for f in files:
        if f['is_dir']:
            collect_files(f['path'], client, result)
        else:
            result.append(f)

    return result
Exemplo n.º 6
0
def get_metadata(access_token,d):
  """Returns a metadata listing on directory/file"""
  resp = {}
  message = ''
  try:
    client = get_client(access_token)
    resp = client.metadata(d)
  except rest.ErrorResponse as e:
    message = str(e)

  return (resp,message)
Exemplo n.º 7
0
def recover_tree(folder="/", recover_to=recover_to):
    # called recursively. We're going to walk the entire Dropbox
    # file tree, starting at 'folder', files first, and recover anything
    # deleted in the last 5 days.
    print "walking in %s" % folder

    try:
        meta = client.metadata(folder, include_deleted=True, file_limit=10000)
    except rest.ErrorResponse, e:
        print e  # normally "too many files". Dropbox will only list 10000 files in
        # a folder. THere is probably a way around this, but I haven't needed it yet.
        return
Exemplo n.º 8
0
def get_directory(client, current_path = "/"):
	list = client.metadata(current_path, list=True, file_limit=10000, hash=None, rev=None, include_deleted=False)
	info = []
	for content in list['contents']:
		path = content['path'].split(current_path)[1:]
		path = ''.join(path)
		is_dir = content['is_dir']
		insert = {'path' : path, 'is_dir' : is_dir}
		info.append(insert)
	info = sorted(info, key = itemgetter('is_dir'), reverse=True) # sort by type
	data = {'directory' : info, 'current_path' : current_path}
	return data
def process_folder(path):
    print 'process folder', path.encode('ascii', 'replace')
    folder_metadata = client.metadata(path, include_deleted=True)
    for meta in folder_metadata['contents']:
        if meta['is_dir']:
            process_folder(meta['path'])
        elif meta.has_key('is_deleted') and meta['is_deleted']:
            f = meta['path']
            print 'restore file', f.encode('ascii', 'replace')
            revs = client.revisions(f, rev_limit=2)
            if len(revs) >= 2:
                client.restore(f, revs[1]['rev'])
Exemplo n.º 10
0
def check_status(job):
    """
    Checks the status of a job, returns metadata if complete, False otherwise.
    """
    client = get_dropbox_client()

    try:
        return client.metadata(
            '/Video Automation Platform/jobs/{job}/{job}.png'.format(job=job))

    except ErrorResponse:
        return False
Exemplo n.º 11
0
def recover_tree(folder = "/", recover_to=recover_to):
    # called recursively. We're going to walk the entire Dropbox
    # file tree, starting at 'folder', files first, and recover anything
    # deleted in the last 5 days.
    print "walking in %s"%folder

    try:
        meta = client.metadata(folder, include_deleted=True, file_limit=10000)
    except rest.ErrorResponse, e:
        print e # normally "too many files". Dropbox will only list 10000 files in
        # a folder. THere is probably a way around this, but I haven't needed it yet.
        return
Exemplo n.º 12
0
  def posts(self,page=1):
    if(self.session.needs_authentication()):
      raise cherrypy.HTTPRedirect(self.session.get_auth_url('/set_dropbox_auth'))
    
    client = self.session.get_client()
    posts = []
    contents = sorted(client.metadata('/')['contents'], key=lambda post: parse(post['client_mtime']), reverse=True)
    startIdx = (int(page)-1) * 10
    for f in contents[startIdx:startIdx+10]:
      if(f['path'].endswith('.md')):
        posts.append(self.generator.generate_post(f['path']))  

    num_posts = len(contents)
    are_more_posts =  (int(page) * 10) < num_posts
    
    template = Template(filename='index.html')	

    return template.render(posts=posts, is_index=True, is_post=False, are_more_posts=are_more_posts, page=int(page), analytics=analytics )
    def retrieve_images(self):
        """list files in remote directory"""
        
        dropbox_client = self.get_client()
        
        #resp = client.metadata(self.current_path)
        path = dropbox_client.metadata(path='https://www.dropbox.com/')
        
        #path='https://dl.dropbox.com/sh/1294vo0qreb8iad/tZIay8d54h'
        
        meta = client.metadata(path)
        filelist = []

        for item in meta['contents']:
			if item['is_dir']:
				filelist += self._listfiles(client,item['path'])
			else:
				filelist.append(item['path'])
            
        return filelist
Exemplo n.º 14
0
    def download(self,file,options):
        '''Plugin interface method to download a file and create a new dataset

        :param file: Path to the remote file to download
        :type file: str
        :param options: context parameters
        :type options: list
        :return: tmp file path
        '''
        from dropbox import client
        self.sess.set_token(options['drop_access_token'].key,options['drop_access_token'].secret)
        client = client.DropboxClient(self.sess)
        logging.warn("DropBox - download request for "+file)
        folder_metadata = client.metadata('/')
        logging.warn("/ content "+str(folder_metadata))
        f, metadata = client.get_file_and_metadata(file)
        (out,file_path) = tempfile.mkstemp()
        output_file = open(file_path, 'wb')
        output_file.write(f.read())
        return file_path
Exemplo n.º 15
0
def main():
	# Include the Dropbox SDK libraries
	from dropbox import client, rest, session

	# Get your app key and secret from the Dropbox developer website
	APP_KEY = 'INSERT_APP_KEY_HERE'
	APP_SECRET = 'INSERT_SECRET_HERE'

	# ACCESS_TYPE should be 'dropbox' or 'app_folder' as configured for your app
	ACCESS_TYPE = 'INSERT_ACCESS_TYPE_HERE'

	sess = session.DropboxSession(APP_KEY, APP_SECRET, ACCESS_TYPE)

	request_token = sess.obtain_request_token()

	# Make the user sign in and authorize this token
	url = sess.build_authorize_url(request_token)
	print "url:", url
	print "Please authorize in the browser. After you're done, press enter."
	raw_input()

	# This will fail if the user didn't visit the above URL and hit 'Allow'
	access_token = sess.obtain_access_token(request_token)

	client = client.DropboxClient(sess)
	print "linked account:", client.account_info()

	f = open('working-draft.txt')
	response = client.put_file('/magnum-opus.txt', f)
	print "uploaded:", response

	folder_metadata = client.metadata('/')
	print "metadata:", folder_metadata

	f, metadata = client.get_file_and_metadata('/magnum-opus.txt')
	out = open('magnum-opus.txt', 'w')
	out.write(f.read())
	out.close()
	print(metadata)
Exemplo n.º 16
0
def list_files(src):
    response = client.metadata(src)
    #print "Response:\n" + response
    return response
Exemplo n.º 17
0
# Include the Dropbox SDK libraries
from dropbox import client, rest, session
import os

# Get your app key and secret from the Dropbox developer website
APP_KEY = 'e6lpdx1vtw8bc0p'
APP_SECRET = 'dg30b9zm1biajh3'

# ACCESS_TYPE should be 'dropbox' or 'app_folder' as configured for your app
ACCESS_TYPE = 'app_folder'

sess = session.DropboxSession(APP_KEY, APP_SECRET, ACCESS_TYPE)

import simplejson as json
with open('token','r')as f:
    access_token = json.load(f, encoding='utf-8')
    
sess.set_token(access_token["key"].encode(), access_token[u"secret"].encode())

client = client.DropboxClient(sess)
#print "linked account:", client.account_info()

# folder metadata
folder_metadata = client.metadata('/')
#print "metadata:", folder_metadata

for content in folder_metadata["contents"]:
    print "%s(%d)" % (content["path"], int(content["bytes"]))

Exemplo n.º 18
0
sess = session.DropboxSession(APP_KEY, APP_SECRET, ACCESS_TYPE)

request_token = sess.obtain_request_token()

url = sess.build_authorize_url(request_token)

print url
raw_input()

access_token = sess.obtain_access_token(request_token)

client = client.DropboxClient(sess)

print "linked account:" 
pprint(client.account_info())

folder_metadata = client.metadata('/')

print "metadata:" 
pprint(folder_metadata)

if folder_metadata['is_dir'] == True:
	print "isdir"
else:
	print "ko"

for folder in map(lambda n : n['path'][1:] , client.metadata('/')['contents']):
	print folder


Exemplo n.º 19
0
def request_cat_media_links():
    files = client.metadata("/")["contents"]  # Get all the files in cat folder
    paths = [file["path"] for file in files]  # Get the paths of all files
    return [client.media(path) for path in paths]  # Get media for each path
Exemplo n.º 20
0
def db_client_update_file(client, filename, file):
    try:
        metadata = client.metadata(filename)['rev']
    except Exception:
        metadata = None
    client.put_file(filename, file, parent_rev=metadata)
Exemplo n.º 21
0
client = client.DropboxClient(sess)

# Show account information...method returns a dictionary, store that for further tests -- Not needed for now
#Client_info = client.account_info()



#Now create a dictionary containing the file name, and it's size
if os.path.lexists(SYNC_DIR):    
    for file in os.listdir(SYNC_DIR):
        SYNC_DIR_LIST[file] = os.path.getsize(SYNC_DIR + '/' + file)


#Get the same from your dropbox

if DEST_DIR in client.metadata('/')['contents'][0]['path']:
    SYNC_DIR_META = client.metadata(DEST_DIR)
    print 'Destination directory already exists'
else:
    client.file_create_folder(DEST_DIR)
    SYNC_DIR_META = client.metadata(DEST_DIR)
    print 'Just created the destination directory'
    
SYNC_DIR_META_LIST = {}
#Create a dictionary with the same info as SYNC_DIR_LIST
for file in SYNC_DIR_META['contents']:
    SYNC_DIR_META_LIST[os.path.split(file['path'])[1]] = file['bytes']

#Copy only if file does not exist on Dropbox, or size different from source(modified file) or does not start with . (hidden files)
for file in SYNC_DIR_LIST:
    if (file[0] != '.'):
Exemplo n.º 22
0
	print "url:", url
	print "Please authorize in the browser. After you're done, press Enter."
	raw_input()

	access_token = sess.obtain_access_token(request_token)
	try:
		with open("/tmp/dropbox_token.pkl", "w") as f:
			f.write("%s|%s" % (access_token.key, access_token.secret))
	except IOError:
		pass
	except AttributeError as e:
		print "Internal Error: " % e

client = client.DropboxClient(sess)

m = client.metadata("/")
if m['is_dir']:
	files = m['contents']
else:
	files = [m]
for (i,f) in enumerate(files):
	print i, f['path']

print "Which file?"
ixFile = int(raw_input())
selectedFile = files[ixFile]
if selectedFile['is_dir']:
	print "Sorry, but only files have revisions."
	sys.exit(2)
print "Getting revisions of %s" % selectedFile['path']
revs = client.revisions(selectedFile['path'])
Exemplo n.º 23
0
def db_client_update_file(client,filename,file):
    try:
        metadata = client.metadata(filename)['rev'];
    except Exception:
        metadata = None
    client.put_file(filename,file,parent_rev=metadata)
    	print "Creating directory:", path 
        os.makedirs(path)
        
        
def lreplace(pattern, sub, string):
    return re.sub('^%s' % pattern, sub, string)

# make connection to dropbox
print "Connecting to dropbox..."
sess = session.DropboxSession(args.app_key, args.app_secret, args.access_type)
sess.set_token(args.access_token_key, args.access_token_secret)
client = client.DropboxClient(sess)

# get list of files for args.dropbox_dir
print "Fetching list of files in:", args.dropbox_dir
metadata = client.metadata(args.dropbox_dir)
def files_only(metadata): return metadata['is_dir'] == False
files = map(lambda metadata: metadata['path'], filter(files_only, metadata['contents']))

# download files to args.dest_dir
assure_path_exists(args.dest_dir)
for file in files:
    dest_file = args.dest_dir + lreplace(args.dropbox_dir, '', file)
    print "Downloading file: %s to: %s" % (file, dest_file)
    response = client.get_file(file)
    with open(dest_file, 'wb') as out:
        while not response.isclosed():
            out.write(response.read(1024 * 1024))
    
# remove files in args.dropbox_dir
if args.remove_downloaded_files == True:
Exemplo n.º 25
0
def blog_index_handle(template='index.html', content_type='text/html'):
    log.debug('blog_index_handle()')

    target_file = "posts"

    client = dropbox.client.DropboxClient(CONFIG.DROPBOX_PRIVATE_TOKEN)

    dropbox_response = client.metadata(
        target_file, list=True)

    files = dropbox_response['contents']

    files = sorted(
        files,
        key=lambda f: f['path'],
        reverse=True)
    files = files[:10]

    log.debug(files)

    for f in files:
        log.debug(f['path'])

        file_response, dropbox_meta = client.get_file_and_metadata(
            f['path'])

        f.update(dropbox_meta)
        log.debug(f['path'])

        file_content = file_response.read()

        fdata = process_markdown(
            target_file, file_content)

        log.debug(fdata)

        f.update(fdata)
        log.debug(f['path'])

        # fix title
        f.update(f['meta'])
        if 'Title' in f:
            f['title'] = f['Title']

        # permalink
        f['permalink'] = url_for_path(f['path'])

        log.debug(f)

    # log.debug(files)

    template_response, meta = client.get_file_and_metadata(
        'templates/%s' % template)
    template_content = template_response.read()

    page_content = render_template(template_content, {
            'posts': files,
        })

    resp = make_response(page_content)
    resp.headers["Content-Type"] = content_type

    return resp
Exemplo n.º 26
0
def list_files(src):
	response = client.metadata(src)
	#print "Response:\n" + response
	return response
Exemplo n.º 27
0
# coding: utf-8

# https://forum.omz-software.com/topic/3224/tls-ssl-exceptions-from-pythonista-2-1/2

from dropbox import client, session

APP_KEY = 'aaaaaaaaaaaaaaa'
APP_SECRET = 'bbbbbbbbbbbbbbb'
ACCESS_TYPE = 'dropbox'
ACCESS_KEY = 'cccccccccccccccc'
ACCESS_SECRET = 'ddddddddddddddd'

sess = session.DropboxSession(APP_KEY, APP_SECRET, ACCESS_TYPE)
sess.set_token(ACCESS_KEY, ACCESS_SECRET)
client = client.DropboxClient(sess)
folder_metadata = client.metadata('/')
# --------------------
dropbox.dropbox.requests.packages.urllib3.disable_warnings()
# --------------------

Exemplo n.º 28
0
def recursive_get_folder(client, folder):
    folder_metadata = client.metadata(folder)

    for c in folder_metadata["contents"]:
        if c["is_dir"]:
            dirname = BASE_PATH + c["path"]
            sys.stdout.write("Found directory %s ... " % dirname)
            sys.stdout.flush()

            if os.path.exists(dirname):
                sys.stdout.write("Exists local.\n")
            else:
                sys.stdout.write("Creating local...")
                sys.stdout.flush()
                os.makedirs(dirname)
                sys.stdout.write("Done.\n")

            sys.stdout.flush()

            previous_cwd = os.getcwd()
            os.chdir(dirname)
            recursive_get_folder(client, c["path"])
            os.chdir(previous_cwd)
        else:
            # Check if the file exists
            download = True
            local_filename = BASE_PATH + c["path"]
            sys.stdout.write("Checking %s ... " % c["path"])
            sys.stdout.flush()

            if os.path.exists(local_filename):
                sys.stdout.write("Exists ")
                sys.stdout.flush()
                # Compare if they are the same
                if os.path.getsize(local_filename) == c["bytes"]:

                # FIXME: Improve reliability
                #if os.path.getmtime(filename) == c.client_mtime:

                    sys.stdout.write("and are the same.\n")
                    sys.stdout.flush()
                    download = False
                else:
                    sys.stdout.write("but aren't the same. ")
            else:
                sys.stdout.write("Does not exist. ")
                sys.stdout.flush()

            if download:
                # Download the file
                sys.stdout.write("\nDownloading.. ")
                sys.stdout.flush()

                out = open(local_filename, "wb")
                f, metadata = client.get_file_and_metadata(c["path"])
                total_size = metadata["bytes"]
                written = 0
                while True:
                    data = f.read(CHUNK_SIZE)
                    if data == '':
                        break
                    else:
                        out.write(data)
                        written = written + len(data)

                    percent = written * 30 / total_size

                    #percent_line = "[" + ("*" * percent) +
                    #               (" " * (30 - percent)) +
                    #                "] " + str(written) + "/" +
                    #                str(total_size)
                    percent_line = "[%s%s] %d/%d" % (("*" * percent),
                                                     (" " * (30 - percent)),
                                                     written, total_size)

                    sys.stderr.write(percent_line)
                    sys.stderr.write("\b" * (len(percent_line)))
                    sys.stderr.flush()

                out.close()

                sys.stdout.write("Done. \n")