def __init__(self, provider_class, handler=Source):
     super(SourceService, self).__init__(provider_class)
     self._handler = handler
     addonid = KodiUtils.get_addon_info('id')
     Cache(addonid, 'page', 0).clear()
     Cache(addonid, 'children', 0).clear()
     Cache(addonid, 'items', 0).clear()
 def __init__(self, request, client_address, server):
     self._system_monitor = KodiUtils.get_system_monitor()
     self._account_manager = AccountManager(server.service.profile_path)
     self._addonid = KodiUtils.get_addon_info('id')
     expiration = datetime.timedelta(minutes=KodiUtils.get_cache_expiration_time())
     self._page_cache = Cache(self._addonid, 'page', expiration)
     self._children_cache = Cache(self._addonid, 'children', expiration)
     self._items_cache = Cache(self._addonid, 'items', expiration)
     BaseHandler.__init__(self, request, client_address, server)
Ejemplo n.º 3
0
 def _clear_cache(self):
     Cache(self._addonid, 'page', 0).clear()
     Cache(self._addonid, 'children', 0).clear()
     Cache(self._addonid, 'items', 0).clear()
class Source(BaseHandler):
    _system_monitor = None
    _account_manager = None

    kilobyte = 1024.0
    megabyte = kilobyte*kilobyte
    gigabyte = megabyte*kilobyte
    
    def __init__(self, request, client_address, server):
        self._system_monitor = KodiUtils.get_system_monitor()
        self._account_manager = AccountManager(server.service.profile_path)
        self._addonid = KodiUtils.get_addon_info('id')
        expiration = datetime.timedelta(minutes=KodiUtils.get_cache_expiration_time())
        self._page_cache = Cache(self._addonid, 'page', expiration)
        self._children_cache = Cache(self._addonid, 'children', expiration)
        self._items_cache = Cache(self._addonid, 'items', expiration)
        BaseHandler.__init__(self, request, client_address, server)
    
    def __del__(self):
        del self._system_monitor
        del self._account_manager
        Logger.debug('Request destroyed.')
    
    def _get_provider(self):
        return self.server.data(source_mode = True)
    
    def open_table(self, title):
        title = urllib.unquote(title)
        html = XHTML('html')
        html.head.title(title)
        body = html.body
        body.h1(title)
        table = body.table()
        row = table.tr
        row.th.a('Name')
        row.th.a('Last modified')
        row.th.a('Size')
        row.th.a('Description')
        row = table.tr
        row.th(colspan='4').hr()
        return html, table
    
    def add_row(self, table, file_name, date='  - ', size='  - ', description=' '):
        row = table.tr
        row.td.a(file_name, href=urllib.quote(file_name))
        row.td(date, align='right')
        row.td(size, align='right')
        row.td(description, escape=False)
    
    def close_table(self, table):
        table.tr.th(colspan='4').hr()
    
    def get_size(self, size):
        unit = ''
        if size > self.gigabyte:
            size = size / self.gigabyte
            unit = 'G'
        elif size > self.megabyte:
            size = size / self.megabyte
            unit = 'M'
        elif size > self.kilobyte:
            size = size / self.kilobyte
            unit = 'K'
        elif size < 0:
            return '-'
        return ("%.2f" % size) + unit
    
    def get_cloud_drive_addons(self):
        addons = []
        response = KodiUtils.execute_json_rpc('Addons.GetAddons', {'type':'xbmc.python.pluginsource', 'enabled': True, 'properties': ['dependencies', 'name']})
        for addon in Utils.get_safe_value(Utils.get_safe_value(response, 'result', {}), 'addons', []):
            for dependency in addon['dependencies']:
                if dependency['addonid'] == self._addonid:
                    addons.append(addon)
                    break
        return addons
    
    def get_addonid(self, addon_name):
        addons = self.get_cloud_drive_addons()
        addonid = None
        for addon in addons:
            if urllib.quote(Utils.str(addon['name'])) == addon_name:
                addonid = addon['addonid']
                break
        return addonid
    
    def show_addon_list(self):
        html, table = self.open_table('Index of /')
        addons = self.get_cloud_drive_addons()
        if addons:
            for addon in addons:
                self.add_row(table, Utils.str(addon['name']) + '/')
        else:
            self.add_row(table, KodiUtils.get_addon_info('name') + '/')
            
        self.close_table(table)
        response = Utils.get_file_buffer()
        response.write(str(html))
        return {'response_code': 200, 'content': response}
        
    def get_drive_list(self):
        drives = []
        accounts = self._account_manager.get_accounts()
        provider = self._get_provider()
        for account_id in accounts:
            account = accounts[account_id]
            for drive in account['drives']:
                drive['display_name'] = self._account_manager.get_account_display_name(account, drive, provider)
                drives.append(drive)
        return drives
    
    def get_driveid(self, drive_name):
        driveid = None
        drives = self.get_drive_list()
        for drive in drives:
            if urllib.quote(Utils.str(drive['display_name'])) == drive_name:
                driveid = drive['id']
                break
        return driveid
    
    def show_drives(self, addon_name):
        html, table = self.open_table('Index of /'+addon_name+'/')
        self.add_row(table, '../')
        drives = self.get_drive_list()
        for drive in drives:
            self.add_row(table, Utils.str(drive['display_name']) + '/')
        self.close_table(table)
        response = Utils.get_file_buffer()
        response.write(str(html))
        return {'response_code': 200, 'content': response}
    
    def process_path(self, addon_name, drive_name, path):
        headers = {}
        response = Utils.get_file_buffer()
        driveid = self.get_driveid(drive_name)
        if driveid:
            parts = self.path.split('/')
            filename = parts[len(parts)-1]
            if filename:
                response_code = 303
                if path:
                    u = urlparse(path)
                    path = u.path
                    Logger.debug('query: %s' % u.query)
                    if u.query == 'subtitles':
                        response_code = 200
                        response.write(json.dumps({'driveid': driveid, 'subtitles': self.get_subtitles(driveid, path)}))
                    else:
                        key = '%s%s:children' % (driveid, path[0:path.rfind('/')],)
                        Logger.debug('reading cache key: ' + key)
                        children = self._children_cache.get(key)
                        if not children and type(children) is NoneType:
                            self.get_folder_items(driveid, path[0:path.rfind('/')+1])
                        url = self.get_download_url(driveid, path)
                        headers['location'] = url
                        Logger.debug('redirect to: ' + url)
                else:
                    url = self.path + '/'
                    headers['location'] = url
            else:
                response_code = 200
                response.write(str(self.show_folder(driveid, path)))
        else:
            response_code = 404
            response.write('Drive "%s" does not exist for addon "%s"' % (drive_name, addon_name))
        return {'response_code': response_code, 'content': response, 'headers': headers}
    
    def get_folder_items(self, driveid, path):
        provider = self._get_provider()
        provider.configure(self._account_manager, driveid)
        cache_path = path[:len(path)-1]
        request_path = cache_path if len(path) > 1 else path
        self.is_path_possible(driveid, request_path)
        key = '%s%s:items' % (driveid, cache_path,)
        items = self._items_cache.get(key)
        if not items and type(items) is NoneType:
            items = provider.get_folder_items(path=request_path, include_download_info=True)
            self._items_cache.set(key, items)
            children_names = []
            cache_items = []
            for item in items:
                quoted_name = urllib.quote(Utils.str(item['name']))
                children_names.append(quoted_name)
                key = '%s%s%s' % (driveid, path, quoted_name,)
                Logger.debug('Adding item in cache for bulk: %s' % key)
                cache_items.append([key, item])
            self._items_cache.setmany(cache_items)
            Logger.debug('Cache in bulk saved')
            key = '%s%s:children' % (driveid, cache_path,)
            Logger.debug('saving children names for: ' + key)
            self._children_cache.set(key, children_names)
        else:
            Logger.debug('items for %s served from cache' % path)
        return items

    def show_folder(self, driveid, path):
        items = self.get_folder_items(driveid, path)
        html, table = self.open_table('Index of ' + self.path)
        self.add_row(table, '../')
        for item in items:
            file_name = Utils.str(item['name'])
            if 'folder' in item:
                file_name += '/'
            date = Utils.default(self.date_time_string(KodiUtils.to_timestamp(Utils.get_safe_value(item, 'last_modified_date'))), '  - ')
            size = self.get_size(Utils.default(Utils.get_safe_value(item, 'size'), -1))
            description = Utils.default(Utils.get_safe_value(item, 'description'), '&nbsp;')
            self.add_row(table, file_name, date, size, description)
        self.close_table(table)
        return html
    
    def is_path_possible(self, driveid, path):
        index = path.rfind('/')
        while index >= 0:
            filename = path[index+1:]
            path = path[0:index]
            key = '%s%s:children' % (driveid, path,)
            Logger.debug('testing possible path key: ' + key)
            children = self._children_cache.get(key)
            if children or type(children) is list:
                if filename and not filename in children:
                    Logger.debug('Not found. From cache.') 
                    raise RequestException('Not found. From cache.', HTTPError(self.path, 404, 'Not found.', None, None), 'Request URL: %s' % self.path, None)
                return True
            index = path.rfind('/')
        return True
    
    def get_item(self, driveid, path):
        key = '%s%s' % (driveid, path,)
        Logger.debug('Testing item from cache: %s' % key)
        item = self._items_cache.get(key)
        if not item:
            provider = self._get_provider()
            provider.configure(self._account_manager, driveid)
            self.is_path_possible(driveid, path)
            item = provider.get_item(path=path, include_download_info = True)
            Logger.debug('Saving item in cache: %s' % key)
            self._items_cache.set(key, item)
        return item
    
    def get_download_url(self, driveid, path):
        item = self.get_item(driveid, path) 
        if 'folder' in item:
            return self.path + '/'
        return item['download_info']['url']
    
    def get_subtitles(self, driveid, path):
        item = self.get_item(driveid, path) 
        key = '%s%s-subtitles' % (driveid, path,)
        Logger.debug('Testing subtitles from cache: %s' % key)
        subtitles = self._items_cache.get(key)
        if not subtitles:
            provider = self._get_provider()
            provider.configure(self._account_manager, driveid)
            self.is_path_possible(driveid, path)
            item_driveid = Utils.default(Utils.get_safe_value(item, 'drive_id'), driveid)
            subtitles = provider.get_subtitles(item['parent'], item['name'], item_driveid)
            Logger.debug('Saving subtitles in cache: %s' % key)
            self._items_cache.set(key, item)
        return subtitles
    
    def handle_resource_request(self, data):
        addon_name = data[2]
        size = len(data)
        cached_page = {}
        if size == 3:
            cached_page['response_code'] = 303
            cached_page['headers'] = {'location': self.path + '/'}
        elif size == 4 and not data[3]:
            cached_page = self.show_drives(addon_name)
        else:
            drive_name = data[3]
            path = self.path[len(self.server.service.name)+len(addon_name)+len(drive_name)+3:]
            cached_page = self.process_path(addon_name, drive_name, path)
        return cached_page
            
    def do_GET(self):
        Logger.debug(self.path + ': Requested')
        if self._system_monitor.abortRequested():
            Logger.debug(self.path + ': abort requested')
            return
        data = self.path.split('/')
        size = len(data)
        cached_page = self._page_cache.get(self.path)
        if cached_page:
            if cached_page['pending']:
                Logger.debug(self.path + ': Already requested. Waiting for original request...')
                max_waiting_time = time.time() + 30
                while not self._system_monitor.abortRequested() and max_waiting_time > time.time() and cached_page['pending']:
                    if self._system_monitor.waitForAbort(1):
                        break
                    cached_page = self._page_cache.get(self.path)

            if not self._system_monitor.abortRequested():
                if cached_page['pending']:
                    self.write_response(504)
                    Logger.debug(self.path + ': 504 - Gateway timeout')
                    self._page_cache.remove(self.path)
                else:
                    if 'content' in cached_page and cached_page['content']:
                        content = Utils.get_file_buffer()
                        content.write(cached_page['content'])
                        cached_page['content'] = content
                    self.write_response(cached_page['response_code'], content=Utils.get_safe_value(cached_page, 'content'), headers=Utils.get_safe_value(cached_page, 'headers', {}))
                    Logger.debug(self.path + ': %d - Served from cache' % cached_page['response_code'])
        else:
            cached_page = {'pending': True}
            self._page_cache.set(self.path, cached_page)
            if size > 1 and data[1] == self.server.service.name:
                try:
                    if size == 2:
                        cached_page['response_code'] = 303
                        cached_page['headers'] = {'location': self.path + '/'}
                    elif size > 2 and data[2]:
                        cached_page = self.handle_resource_request(data)
                    else:
                        cached_page = self.show_addon_list()
                except Exception as e:
                    httpex = ExceptionUtils.extract_exception(e, HTTPError)
                    if httpex:
                        cached_page['response_code'] = httpex.code
                    else:
                        cached_page['response_code'] = 500
                    
                    ErrorReport.handle_exception(e)
                    content = Utils.get_file_buffer()
                    content.write(ExceptionUtils.full_stacktrace(e))
                    
                    cached_page['content'] = content
            else:
                cached_page['response_code'] = 404
            cached_page['pending'] = False
            content_value = None
            if 'content' in cached_page:
                content_value = cached_page['content'].getvalue()
            self.write_response(cached_page['response_code'], content=Utils.get_safe_value(cached_page, 'content'), headers=Utils.get_safe_value(cached_page, 'headers', {}))
            cached_page['content'] = content_value
            if Utils.get_safe_value(cached_page, 'response_code', 0) >= 500:
                self._page_cache.remove(self.path)
            else:
                self._page_cache.set(self.path, cached_page)
            Logger.debug(self.path + ': Response code ' + Utils.str(cached_page['response_code']))
Ejemplo n.º 5
0
 def __init__(self, source_mode = False):
     super(GoogleDrive, self).__init__('googledrive', source_mode)
     self._items_cache = Cache(KodiUtils.get_addon_info('id'), 'items', datetime.timedelta(minutes=KodiUtils.get_cache_expiration_time()))
Ejemplo n.º 6
0
class GoogleDrive(Provider):
    _default_parameters = {'spaces': 'drive', 'prettyPrint': 'false'}
    _is_team_drive = False
    _team_drive_parameters = {'includeTeamDriveItems': 'true', 'supportsTeamDrives': 'true', 'corpora': 'teamDrive', 'teamDriveId': ''}
    _user = None

    def __init__(self, source_mode = False):
        super(GoogleDrive, self).__init__('googledrive', source_mode)
        self._items_cache = Cache(KodiUtils.get_addon_info('id'), 'items', datetime.timedelta(minutes=KodiUtils.get_cache_expiration_time()))
            
    def configure(self, account_manager, driveid):
        super(GoogleDrive, self).configure(account_manager, driveid)
        self._account_manager.load()
        drive = account_manager.get_drive_by_driveid(driveid)
        self._is_team_drive = drive and 'type' in drive and drive['type'] == 'drive#teamDrive'
        
    def _get_api_url(self):
        return 'https://www.googleapis.com/drive/v3'

    def _get_request_headers(self):
        return None
    
    def get_account(self, request_params=None, access_tokens=None):
        me = self.get('/about', parameters={'fields':'user'}, request_params=request_params, access_tokens=access_tokens)
        if not me or not 'user' in me:
            raise Exception('NoAccountInfo')
        self._user = me['user'] 
        return { 'id' : self._user['permissionId'], 'name' : self._user['displayName']}
    
    def get_drives(self, request_params=None, access_tokens=None):
        drives = [{
            'id' : self._user['permissionId'],
            'name' : '',
            'type' : ''
        }]
        try:
            all_teamdrives_fetch = False
            page_token = None
            parameters = {'pageSize': 100}
            while not all_teamdrives_fetch:
                if page_token:
                    parameters['nextPageToken'] = page_token
                response = self.get('/teamdrives', parameters=parameters, request_params=request_params, access_tokens=access_tokens)
                if response and 'teamDrives' in response:
                    for drive in response['teamDrives']:
                        drives.append({
                            'id' : drive['id'],
                            'name' : Utils.get_safe_value(drive, 'name', drive['id']),
                            'type' : drive['kind']
                        })
                if response and 'nextPageToken' in response:
                    page_token = response['nextPageToken']
                else:
                    all_teamdrives_fetch = True
        except RequestException as ex:
            httpex = ExceptionUtils.extract_exception(ex, HTTPError)
            if not httpex or httpex.code != 403:
                raise ex
        return drives
    
    def get_drive_type_name(self, drive_type):
        if drive_type == 'drive#teamDrive':
            return 'Team Drive'
        return drive_type
    
    def prepare_parameters(self):
        parameters = copy.deepcopy(self._default_parameters)
        if self._is_team_drive:
            parameters.update(self._team_drive_parameters)
            parameters['teamDriveId'] = self._driveid
        return parameters
    
    def _get_field_parameters(self):
        file_fileds = 'id,name,modifiedTime,size,mimeType'
        if not self.source_mode:
            file_fileds = file_fileds + ',description,hasThumbnail,thumbnailLink,owners(permissionId),parents,trashed,imageMediaMetadata(width),videoMediaMetadata'
        return file_fileds
        
    def get_folder_items(self, item_driveid=None, item_id=None, path=None, on_items_page_completed=None, include_download_info=False):
        item_driveid = Utils.default(item_driveid, self._driveid)
        is_album = item_id and item_id[:6] == 'album-'
        if is_album:
            item_id = item_id[6:]
        parameters = self.prepare_parameters()
        if item_id:
            parameters['q'] = '\'%s\' in parents' % item_id
        elif path == 'sharedWithMe' or path == 'starred':
            parameters['q'] = path
        elif path != 'photos':
            if path == '/':
                parent = self._driveid if self._is_team_drive else 'root'
                parameters['q'] = '\'%s\' in parents' % parent
            elif not is_album:
                item = self.get_item_by_path(path, include_download_info)
                parameters['q'] = '\'%s\' in parents' % item['id']
                
        parameters['fields'] = 'files(%s),kind,nextPageToken' % self._get_field_parameters()
        if 'q' in parameters:
            parameters['q'] += ' and not trashed'
        
        self.configure(self._account_manager, self._driveid)
        provider_method = self.get
        url = '/files'
        if path == 'photos':
            self._photos_provider = GooglePhotos()
            self._photos_provider.configure(self._account_manager, self._driveid)
            parameters = {}
            provider_method = self._photos_provider.get
            url = '/albums'
        elif is_album:
            self._photos_provider = GooglePhotos()
            self._photos_provider.configure(self._account_manager, self._driveid)
            parameters = {'albumId': item_id}
            provider_method = self._photos_provider.post
            url = '/mediaItems:search'
            
        files = provider_method(url, parameters = parameters)
        if self.cancel_operation():
            return
        return self.process_files(files, parameters, on_items_page_completed, include_download_info)
    
    def search(self, query, item_driveid=None, item_id=None, on_items_page_completed=None):
        item_driveid = Utils.default(item_driveid, self._driveid)
        parameters = self.prepare_parameters()
        parameters['fields'] = 'files(%s)' % self._get_field_parameters()
        query = 'fullText contains \'%s\'' % Utils.str(query)
        if item_id:
            query += ' and \'%s\' in parents' % item_id
        parameters['q'] = query + ' and not trashed'
        files = self.get('/files', parameters = parameters)
        if self.cancel_operation():
            return
        return self.process_files(files, parameters, on_items_page_completed)
    
    def process_files(self, files, parameters, on_items_page_completed=None, include_download_info=False, extra_info=None):
        items = []
        if files:
            kind = Utils.get_safe_value(files, 'kind', '')
            collection = []
            if kind == 'drive#fileList':
                collection = files['files']
            elif kind == 'drive#changeList':
                collection = files['changes']
            elif 'albums' in files:
                kind = 'album'
                collection = files['albums']
            elif 'mediaItems' in files:
                kind = 'media_item'
                collection = files['mediaItems']
            if collection:
                for f in collection:
                    f['kind'] = Utils.get_safe_value(f, 'kind', kind)
                    item = self._extract_item(f, include_download_info)
                    if item:
                        items.append(item)
                if on_items_page_completed:
                    on_items_page_completed(items)
            if type(extra_info) is dict:
                if 'newStartPageToken' in files:
                    extra_info['change_token'] = files['newStartPageToken']
            if 'nextPageToken' in files:
                parameters['pageToken'] = files['nextPageToken']
                url = '/files'
                provider_method = self.get
                if kind == 'drive#changeList':
                    url = '/changes'
                elif kind == 'album':
                    url = '/albums'
                    provider_method = self._photos_provider.get
                elif kind == 'media_item':
                    url = '/mediaItems:search'
                    provider_method = self._photos_provider.post
                next_files = provider_method(url, parameters = parameters)
                if self.cancel_operation():
                    return
                items.extend(self.process_files(next_files, parameters, on_items_page_completed, include_download_info, extra_info))
        return items
    
    def _extract_item(self, f, include_download_info=False):
        kind = Utils.get_safe_value(f, 'kind', '')
        if kind == 'drive#change':
            if 'file' in f:
                f = f['file']
            else:
                return {}
        size = long('%s' % Utils.get_safe_value(f, 'size', 0))
        is_album = kind == 'album'
        is_media_items = kind == 'media_item'
        item_id = f['id']
        if is_album:
            mimetype = 'application/vnd.google-apps.folder'
            name = Utils.get_safe_value(f, 'title', item_id)
        else:
            mimetype = Utils.get_safe_value(f, 'mimeType', '')
            name = Utils.get_safe_value(f, 'name', '')
        if is_media_items:
            name = Utils.get_safe_value(f, 'filename', item_id) 
        item = {
            'id': item_id,
            'name': name,
            'name_extension' : Utils.get_extension(name),
            'parent': Utils.get_safe_value(f, 'parents', ['root'])[0],
            'drive_id' : Utils.get_safe_value(Utils.get_safe_value(f, 'owners', [{}])[0], 'permissionId'),
            'mimetype' : mimetype,
            'last_modified_date' : Utils.get_safe_value(f,'modifiedTime'),
            'size': size,
            'description': Utils.get_safe_value(f, 'description', ''),
            'deleted' : Utils.get_safe_value(f, 'trashed', False)
        }
        if item['mimetype'] == 'application/vnd.google-apps.folder':
            item['folder'] = {
                'child_count' : 0
            }
        if is_media_items:
            item['url'] = f['baseUrl'] + '=d'
            if 'mediaMetadata' in f:
                metadata = f['mediaMetadata']
                item['video'] = {
                    'width' : Utils.get_safe_value(metadata, 'width'),
                    'height' : Utils.get_safe_value(metadata, 'height')
                }
                item['last_modified_date'] = Utils.get_safe_value(metadata, 'creationTime')
        if 'videoMediaMetadata' in f:
            video = f['videoMediaMetadata']
            item['video'] = {
                'width' : Utils.get_safe_value(video, 'width'),
                'height' : Utils.get_safe_value(video, 'height'),
                'duration' : long('%s' % Utils.get_safe_value(video, 'durationMillis', 0)) / 1000
            }
        if 'imageMediaMetadata' in f or 'mediaMetadata' in f:
            item['image'] = {
                'size' : size
            }
        if 'hasThumbnail' in f and f['hasThumbnail']:
            item['thumbnail'] = Utils.get_safe_value(f, 'thumbnailLink')
        if is_album:
            item['thumbnail'] = Utils.get_safe_value(f, 'coverPhotoBaseUrl')
            item['id'] = 'album-' + item['id']
        if include_download_info:
            if is_media_items:
                item['download_info'] =  {
                    'url' : item['url']
                }
            else:
                parameters = {
                    'alt': 'media',
                    'access_token': self.get_access_tokens()['access_token']
                }
                url = self._get_api_url() + '/files/%s' % item['id']
                if 'size' not in f and item['mimetype'] == 'application/vnd.google-apps.document':
                    url += '/export'
                    parameters['mimeType'] = Utils.default(Utils.get_mimetype_by_extension(item['name_extension']), Utils.get_mimetype_by_extension('pdf'))
                item['download_info'] =  {
                    'url' : url + '?%s' % urllib.urlencode(parameters)
                }
        return item
    
    def get_item_by_path(self, path, include_download_info=False):
        parameters = self.prepare_parameters()
        if path[-1:] == '/':
            path = path[:-1]
        Logger.debug(path + ' <- Target')
        key = '%s%s' % (self._driveid, path,)
        Logger.debug('Testing item from cache: %s' % key)
        item = self._items_cache.get(key)
        if not item:
            parameters['fields'] = 'files(%s)' % self._get_field_parameters()
            index = path.rfind('/')
            filename = urllib.unquote(path[index+1:])
            parent = path[0:index]
            if not parent:
                parent = 'root'
            else:
                parent = self.get_item_by_path(parent, include_download_info)['id']
            item = None
            parameters['q'] = '\'%s\' in parents and name = \'%s\'' % (Utils.str(parent), Utils.str(filename).replace("'","\\'"))
            files = self.get('/files', parameters = parameters)
            if (len(files['files']) > 0):
                for f in files['files']:
                    item = self._extract_item(f, include_download_info)
                    break
        else:
            Logger.debug('Found in cache.')
        if not item:
            raise RequestException('Not found by path', HTTPError(path, 404, 'Not found', None, None), 'Request URL: %s' % path, None)
        
        else:
            self._items_cache.set(key, item)
        return item
    
    def get_subtitles(self, parent, name, item_driveid=None, include_download_info=False):
        parameters = self.prepare_parameters()
        item_driveid = Utils.default(item_driveid, self._driveid)
        subtitles = []
        parameters['fields'] = 'files(' + self._get_field_parameters() + ')'
        parameters['q'] = 'name contains \'%s\'' % Utils.str(Utils.remove_extension(name)).replace("'","\\'")
        files = self.get('/files', parameters = parameters)
        for f in files['files']:
            subtitle = self._extract_item(f, include_download_info)
            if subtitle['name_extension'].lower() in ('srt','idx','sub','sbv','ass','ssa'):
                subtitles.append(subtitle)
        return subtitles
    
    def get_item(self, item_driveid=None, item_id=None, path=None, find_subtitles=False, include_download_info=False):
        parameters = self.prepare_parameters()
        item_driveid = Utils.default(item_driveid, self._driveid)
        parameters['fields'] = self._get_field_parameters()
        if not item_id and path == '/':
            item_id = 'root'
        if item_id:
            f = self.get('/files/%s' % item_id, parameters = parameters)
            item = self._extract_item(f, include_download_info)
        else:
            item = self.get_item_by_path(path, include_download_info)
        
        if find_subtitles:
            subtitles = self.get_subtitles(item['parent'], item['name'], item_driveid, include_download_info)
            if subtitles:
                item['subtitles'] = subtitles
        return item
    
    def changes(self):
        change_token = self.get_change_token()
        if not change_token:
            change_token = Utils.get_safe_value(self.get('/changes/startPageToken', parameters = self.prepare_parameters()), 'startPageToken')
        extra_info = {}
        parameters = self.prepare_parameters()
        parameters['pageToken'] = change_token
        parameters['fields'] = 'kind,nextPageToken,newStartPageToken,changes(kind,type,removed,file(%s))' % self._get_field_parameters()
        f = self.get('/changes', parameters = parameters)
        changes = self.process_files(f, parameters, include_download_info=True, extra_info=extra_info)
        self.persist_change_token(Utils.get_safe_value(extra_info, 'change_token'))
        return changes
class GoogleDrive(Provider):
    _default_parameters = {'spaces': 'drive', 'prettyPrint': 'false'}
    _file_fileds = 'id,name,mimeType,description,hasThumbnail,thumbnailLink,modifiedTime,owners(permissionId),parents,size,imageMediaMetadata(width),videoMediaMetadata'
    _extension_map = {
        'html': 'text/html',
        'htm': 'text/html',
        'txt': 'text/plain',
        'rtf': 'application/rtf',
        'odf': 'application/vnd.oasis.opendocument.text',
        'pdf': 'application/pdf',
        'doc':
        'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
        'docx':
        'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
        'epub': 'application/epub+zip',
        'xls':
        'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
        'sxc':
        'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
        'csv': 'text/csv',
        'ppt':
        'application/vnd.openxmlformats-officedocument.presentationml.presentation',
        'pptx':
        'application/vnd.openxmlformats-officedocument.presentationml.presentation',
        'sxi': 'application/vnd.oasis.opendocument.presentation',
        'json': 'application/vnd.google-apps.script+json'
    }
    _user = None

    def __init__(self):
        super(GoogleDrive, self).__init__('googledrive')
        self._items_cache = Cache(
            KodiUtils.get_addon_info('id'), 'items',
            datetime.timedelta(minutes=KodiUtils.get_cache_expiration_time()))

    def _get_api_url(self):
        return 'https://www.googleapis.com/drive/v3'

    def _get_request_headers(self):
        return None

    def get_account(self, request_params=None, access_tokens=None):
        me = self.get('/about',
                      parameters={'fields': 'user'},
                      request_params=request_params,
                      access_tokens=access_tokens)
        if not me or not 'user' in me:
            raise Exception('NoAccountInfo')
        self._user = me['user']
        return {
            'id': self._user['permissionId'],
            'name': self._user['displayName']
        }

    def get_drives(self, request_params=None, access_tokens=None):
        drives = [{'id': self._user['permissionId'], 'name': '', 'type': ''}]
        return drives

    def get_folder_items(self,
                         item_driveid=None,
                         item_id=None,
                         path=None,
                         on_items_page_completed=None,
                         include_download_info=False):
        item_driveid = Utils.default(item_driveid, self._driveid)
        is_album = item_id and item_id[:6] == 'album-'

        if is_album:
            Logger.notice(item_id)
            item_id = item_id[6:]
            Logger.notice(item_id)

        parameters = copy.deepcopy(self._default_parameters)
        if item_id:
            parameters['q'] = '\'%s\' in parents' % item_id
        elif path == 'sharedWithMe' or path == 'starred':
            parameters['q'] = path
        elif path != 'photos':
            if path == '/':
                parameters['q'] = '\'root\' in parents'
            elif not is_album:
                item = self.get_item_by_path(path, include_download_info)
                parameters['q'] = '\'%s\' in parents' % item['id']

        parameters['fields'] = 'files(%s),nextPageToken' % self._file_fileds
        if 'q' in parameters:
            parameters['q'] += ' and not trashed'
        if path == 'photos':
            self._photos_provider = GooglePhotos()
            Logger.notice(self._get_api_url())
            self._photos_provider.configure(self._account_manager,
                                            self._driveid)
            files = self._photos_provider.get('/albums')
            files['is_album'] = True
        elif is_album:
            self._photos_provider = GooglePhotos()
            self._photos_provider.configure(self._account_manager,
                                            self._driveid)
            files = self._photos_provider.post('/mediaItems:search',
                                               parameters={'albumId': item_id})
            files['is_media_items'] = True
        else:
            self.configure(self._account_manager, self._driveid)
            files = self.get('/files', parameters=parameters)
            files['is_album'] = False
        if self.cancel_operation():
            return
        return self.process_files(files, parameters, on_items_page_completed,
                                  include_download_info)

    def search(self,
               query,
               item_driveid=None,
               item_id=None,
               on_items_page_completed=None):
        item_driveid = Utils.default(item_driveid, self._driveid)
        parameters = copy.deepcopy(self._default_parameters)
        parameters['fields'] = 'files(%s)' % self._file_fileds
        query = 'fullText contains \'%s\'' % Utils.str(query)
        if item_id:
            query += ' and \'%s\' in parents' % item_id
        parameters['q'] = query + ' and not trashed'
        files = self.get('/files', parameters=parameters)
        if self.cancel_operation():
            return
        return self.process_files(files, parameters, on_items_page_completed)

    def process_files(self,
                      files,
                      parameters,
                      on_items_page_completed=None,
                      include_download_info=False):
        items = []
        if files:
            is_album = Utils.get_safe_value(files, 'is_album', False)
            is_media_items = Utils.get_safe_value(files, 'is_media_items',
                                                  False)
            if is_album:
                collection = 'albums'
            elif is_media_items:
                collection = 'mediaItems'
            else:
                collection = 'files'
            if collection in files:
                for f in files[collection]:
                    f['is_album'] = is_album
                    f['is_media_items'] = is_media_items
                    item = self._extract_item(f, include_download_info)
                    items.append(item)
                if on_items_page_completed:
                    on_items_page_completed(items)
            if 'nextPageToken' in files:
                parameters['pageToken'] = files['nextPageToken']
                next_files = self.get('/files', parameters=parameters)
                if self.cancel_operation():
                    return
                next_files['is_album'] = is_album
                items.extend(
                    self.process_files(next_files, parameters,
                                       on_items_page_completed,
                                       include_download_info))
        return items

    def _extract_item(self, f, include_download_info=False):
        size = long('%s' % Utils.get_safe_value(f, 'size', 0))
        is_album = Utils.get_safe_value(f, 'is_album', False)
        is_media_items = Utils.get_safe_value(f, 'is_media_items', False)
        if is_album:
            mimetype = 'application/vnd.google-apps.folder'
            name = f['title']
        else:
            mimetype = Utils.get_safe_value(f, 'mimeType', '')
            name = Utils.get_safe_value(f, 'name', '')
        if is_media_items:
            name = Utils.get_safe_value(f, 'id', '')
        item = {
            'id':
            f['id'],
            'name':
            name,
            'name_extension':
            Utils.get_extension(name),
            'drive_id':
            Utils.get_safe_value(
                Utils.get_safe_value(f, 'owners', [{}])[0], 'permissionId'),
            'mimetype':
            mimetype,
            'last_modified_date':
            Utils.get_safe_value(f, 'modifiedTime'),
            'size':
            size,
            'description':
            Utils.get_safe_value(f, 'description', '')
        }
        if item['mimetype'] == 'application/vnd.google-apps.folder':
            item['folder'] = {'child_count': 0}
        if is_media_items:
            item['url'] = f['baseUrl']
            if 'mediaMetadata' in f:
                metadata = f['mediaMetadata']
                item['video'] = {
                    'width': Utils.get_safe_value(metadata, 'width'),
                    'height': Utils.get_safe_value(metadata, 'height')
                }
        if 'videoMediaMetadata' in f:
            video = f['videoMediaMetadata']
            item['video'] = {
                'width':
                Utils.get_safe_value(video, 'width'),
                'height':
                Utils.get_safe_value(video, 'height'),
                'duration':
                long('%s' % Utils.get_safe_value(video, 'durationMillis', 0)) /
                1000
            }
        if 'imageMediaMetadata' in f or 'mediaMetadata' in f:
            item['image'] = {'size': size}
        if 'hasThumbnail' in f and f['hasThumbnail']:
            item['thumbnail'] = Utils.get_safe_value(f, 'thumbnailLink')
        if is_album:
            item['thumbnail'] = Utils.get_safe_value(f, 'coverPhotoBaseUrl')
            item['id'] = 'album-' + item['id']
        if include_download_info:
            if is_media_items:
                item['download_info'] = {'url': f['baseUrl']}
            else:
                parameters = {
                    'alt': 'media',
                    'access_token': self.get_access_tokens()['access_token']
                }
                url = self._get_api_url() + '/files/%s' % item['id']
                if 'size' not in f and item[
                        'mimetype'] == 'application/vnd.google-apps.document':
                    url += '/export'
                    parameters['mimeType'] = self.get_mimetype_by_extension(
                        item['name_extension'])
                item['download_info'] = {
                    'url': url + '?%s' % urllib.urlencode(parameters)
                }
        return item

    def get_mimetype_by_extension(self, extension):
        if extension and extension in self._extension_map:
            return self._extension_map[extension]
        return self._extension_map['pdf']

    def get_item_by_path(self, path, include_download_info=False):
        parameters = copy.deepcopy(self._default_parameters)
        if path[-1:] == '/':
            path = path[:-1]
        Logger.debug(path + ' <- Target')
        key = '%s%s' % (
            self._driveid,
            path,
        )
        Logger.debug('Testing item from cache: %s' % key)
        item = self._items_cache.get(key)
        if not item:
            parameters['fields'] = 'files(%s)' % self._file_fileds
            index = path.rfind('/')
            filename = urllib.unquote(path[index + 1:])
            parent = path[0:index]
            if not parent:
                parent = 'root'
            else:
                parent = self.get_item_by_path(parent,
                                               include_download_info)['id']
            item = None
            parameters['q'] = '\'%s\' in parents and name = \'%s\'' % (
                Utils.str(parent), Utils.str(filename))
            files = self.get('/files', parameters=parameters)
            if (len(files['files']) > 0):
                for f in files['files']:
                    item = self._extract_item(f, include_download_info)
                    break
        else:
            Logger.debug('Found in cache.')
        if not item:
            raise RequestException(
                'Not found by path',
                HTTPError(path, 404, 'Not found', None, None),
                'Request URL: %s' % path, None)

        else:
            self._items_cache.set(key, item)
        return item

    def get_item(self,
                 item_driveid=None,
                 item_id=None,
                 path=None,
                 find_subtitles=False,
                 include_download_info=False):
        parameters = copy.deepcopy(self._default_parameters)
        item_driveid = Utils.default(item_driveid, self._driveid)
        parameters['fields'] = self._file_fileds
        if not item_id and path == '/':
            item_id = 'root'
        if item_id:
            f = self.get('/files/%s' % item_id, parameters=parameters)
            item = self._extract_item(f, include_download_info)
        else:
            item = self.get_item_by_path(path, include_download_info)

        if find_subtitles:
            subtitles = []
            parameters['fields'] = 'files(' + self._file_fileds + ')'
            parameters['q'] = 'name contains \'%s\'' % Utils.str(
                Utils.remove_extension(item['name'])).replace("'", "\\'")
            files = self.get('/files', parameters=parameters)
            for f in files['files']:
                subtitle = self._extract_item(f, include_download_info)
                if subtitle['name_extension'] == 'srt' or subtitle[
                        'name_extension'] == 'sub' or subtitle[
                            'name_extension'] == 'sbv':
                    subtitles.append(subtitle)
            if subtitles:
                item['subtitles'] = subtitles
        return item