def sample_test():

    print "Testing sample test"

    clquery = doc_client.DocsQuery(show_collections='true', show_root='true')
    client,token = doc_list_util.login('myname', 'mypassword', 'blah')
    #print query_uri
    # maybe pass in query_uri
    feed = client.GetAllResources(show_root=True, q=clquery)

    item0 = feed[0]

    labels0 = item0.GetLabels()
    print "Labels: {0}".format(labels0)
    type0 = item0.GetResourceType()
    print "Resource Type: {0}".format(type0)
    collections0 = item0.InCollections()
    print "Collections: {0}".format(collections0)
    isTrashed0 = item0.IsTrashed()
    print "Is Trashed: {0}".format(isTrashed0)
    deleted0 = item0.deleted
    print "Is Deleted: {0}".format(deleted0)
    filename0 = item0.filename
    print "Filename: {0}".format(filename0)
    id0 = item0.id
    print "ID: {0}".format(id0)
    resourceid0 = item0.resource_id
    print "Resource ID: {0}".format(resourceid0)
    suggestedfilename0 = item0.suggested_filename
    print "Suggested Filename: {0}".format(suggestedfilename0)
    title0 = item0.title
    print "Title: {0}".format(title0)
    updated0 = item0.updated
    print "Updated: {0}".format(updated0)
    
    print "Done testing sample test"
def main():
    logger.log("Preparing to back up Google documents", i_debug=True)

    """
    last_time = last time the docs were backed up.

    For every resource id in the database file
    that has been deleted or
    trashed on Google docs, remove it from disk.

    Build up a list of unique paths for all gdocs
    collections.
    # This handles new, deleted, trashed,
    # and already-existing collections.
    # Note: this will fail if the list of
    # collection paths cannot fit in memory
    new_collection_map = {}
    new_collection_paths_already_used = []
    for coll in collections on gdocs:
      create_collection_paths(coll, new_collection_map, new_collection_paths_already_used, old_collection_map, old_collection_paths_already_used)


    # Create a list of file locations used before the
    # backup
    old_placed_files = []
    for paths_for_file in old_file_map.values():
      old_placed_files.extend(paths_for_file)

    new_file_map = {}
    new_placed_files = []

    for file in files on gdocs:
      place_file(file, new_file_map, new_placed_files, new_collection_map, old_file_map, old_placed_files)



    Store the updated timestamp.
    """

    # Try opening debug file

    try:
        debug_file = open(_debug_file, 'w')
    except:
        output = "Failed to open the debug file at {0}".format(_debug_file)
        logger.log(output, i_debug=_debug, i_force=True)
        debug_file = None

    output = "Preparing to backup Google Docs."
    logger.log(output, i_debug=_debug, i_force=True)

    # Get the time this backup started to process
    start_time = time.localtime()
    output = "Time this backup started: {0}".format(start_time)
    logger.log(output, i_debug=_debug, o_log_file=debug_file, i_force=True)

    output = "Opening config file at {0}".format(_config_file)
    logger.log(output, i_debug=_debug, o_log_file=debug_file, i_force=False)
    try:
        config_map = shelve.open(_config_file)
    except:
        output = "Failed to open the config file at {0}".format(_config_file)
        logger.log(output, i_debug=_debug, i_force=True)
        return
    
    # Get the time the last backup was performed
    output = "Getting the time of the last backup."
    logger.log(output, i_debug=_debug, i_force=True)
    prev_time = config_map.get(_time_key)
    if not prev_time:
        # The script has never been run. Set time
        # to be from 1970, before Google Docs
        # ever existed. This ensures ALL data
        # will be synced
        output = "First time script has been run. "
        output += "Downloading all documents."
        logger.log(output, i_debug=_debug, i_force=True)
        prev_time = time.struct_time((
        1970, # year
        1, # month
        1, # day
        0, # hour
        0, # minute
        0, # second
        -1, # doesn't matter
        -1, # doesn't matter
        0, # doesn't matter
        ))
        # The remaining attributes in time.struct_time
        # don't matter
    output = "Time of last backup: {0}".format(prev_time)
    logger.log(output, i_debug=_debug, o_log_file=debug_file)

    #client,token = doc_list_util.login('*****@*****.**', 'mypassword', 'my app')
    # TODO the meat of the algorithm
    old_collection_map = config_map.get(_collections_key, dict())
    old_file_map = config_map.get(_files_key, dict())


    #doc_query = doc_service.DocumentQuery()
    #doc_query['show_collections'] = 'true'
    #query_uri = doc_query.ToUri()
    clquery = doc_client.DocsQuery(show_collections='true', show_root='true')
    client,token = doc_list_util.login('myname', 'mypassword', 'blah')
    #print query_uri
    # maybe pass in query_uri
    feed = client.GetAllResources(show_root=True, q=clquery)
    # /feeds/documents/private/full?show_collections=True

    #for item in feed:
        #print "ha"

    """
    For every resource id in the database file
    that has been deleted or
    trashed on Google docs, remove it from disk
    """
    output = "Removing all deleted or trashed docs from "
    output += "disk"
    logger.log(output, i_debug=_debug, i_force=True)

    #files_on_gdocs = client.GetAllResources(show_root=True)

    # This is very slow because it does a full search
    # of the gdocs ids for every single id in the map
    for id in old_file_map.keys():
        for _item in feed:
            if (id == _file.resource_id):
                if _item.deleted or _item.IsTrashed():
                    # File has been deleted or trashed on gdocs

                    output = "{0} has been deleted or trashed on gdocs".format(old_file_map[id])
                    logger.log(output, i_debug=_debug, i_force=True)

                    _remove_file(id, old_file_map)
                    break
            else:
                output = "{0} has not been deleted or trashed on gdocs".format(old_file_map[id])
                logger.log(output, i_debug=_debug, i_force=True)
        else:
            # File is no longer on gdocs
            output = "{0} is no longer on gdocs".format(old_file_map[id])
            logger.log(output, i_debug=_debug, i_force=True)

            _remove_file(id, old_file_map)


    """
    For every collection in the database, if its path has
    changed in gdocs, update its path in the database
    and move the file to the new path (create the new
    path if necessary)
    """
    """
    for id in old_collection_map.keys():

        # TODO
        if old_collection_map[id].parents == parent collections on gdocs for id:
            # The 
            continue
        gdocs_path = get_
        disk_path_from_gdocs = _create_disk_path(gdocs_path)

        current_collection_path = old_file_map[id]

        if disk_path_from_gdocs == current_disk_path
        
        pass
    """
        

    

    # Store the updated resource id maps
    output = "Storing the updated resource id maps."
    logger.log(output, i_debug=_debug, i_force=True)
    config_map[_files_key] = new_file_map
    config_map[_collections_key] = new_collection_map

    # Store the updated time
    output = "Storing the updated time."
    logger.log(output, i_debug=_debug, i_force=True)
    config_map[_time_key] = start_time

    config_map.close()

    if debug_file:
        debug_file.close()