def is_token_revoked(decoded_token):
    """
    Checks if the given token is revoked or not. Because we are adding all the
    tokens that we create into this database, if the token is not present
    in the database we are going to consider it revoked, as we don't know where
    it was created.

    :param decoded_token:
    """
    jti = decoded_token['jti']
    cache = Cache()
    if cache.get('token_' + jti) is None:
        return True
    token = eval(cache.get('token_' + jti).decode("utf-8"))

    if token is not None:
        return token['revoked']
    else:
        db = Mysql()
        token = db.execute_select(
            "SELECT * FROM `user_session` WHERE `jti` = %s", (jti, ))

        if len(token) > 0:
            return token[0]['revoked']
        else:
            return True
Exemplo n.º 2
0
    def test_stock_handler(self):
        identifier = 'AMZN'

        instruments = self.stock_handler.find_instruments(identifier)

        self.assertTrue(identifier in instruments)
        instrument = instruments[identifier]
        self.validate_stock(identifier, instrument)

        self.assertTrue(instrument.url in instruments)
        instrument = instruments[instrument.url]
        self.validate_stock(identifier, instrument)

        # Test that the cache was set correctly
        cached_data = Cache.get(instrument.url)
        self.assertIsNotNone(cached_data)
        instrument_from_cache = Stock(**cached_data)
        self.validate_stock(identifier, instrument_from_cache)

        cached_data = Cache.get(Stock.search_url(symbol=instrument.symbol))
        self.assertIsNotNone(cached_data)
        self.assertTrue('results' in cached_data)
        self.assertTrue(len(cached_data['results']) == 1)
        instrument_from_cache = Stock(**cached_data['results'][0])
        self.validate_stock(identifier, instrument_from_cache)

        # Test that we retrieve from the cache correctly
        start_time = time()

        instruments = self.stock_handler.find_instruments(identifier)
        self.assertTrue(identifier in instruments)
        instrument = instruments[identifier]
        self.validate_stock(identifier, instrument)

        self.assertTrue(time() - start_time < 0.05, "Cached entry was not used")
Exemplo n.º 3
0
    def test_option_handler(self):
        identifier = 'AAPL250.0C@01/17/20'

        instruments = self.option_handler.find_instruments(identifier)

        self.assertTrue(identifier in instruments)
        instrument = instruments[identifier]
        self.validate_option(identifier, instrument)

        self.assertTrue(instrument.url in instruments)
        instrument = instruments[instrument.url]
        self.validate_option(identifier, instrument)

        # Test that the cache was set correctly
        cached_data = Cache.get(instrument.url)
        self.assertIsNotNone(cached_data)
        instrument_from_cache = Option(**cached_data)
        self.validate_option(identifier, instrument_from_cache)

        cached_data = Cache.get(Option.search_url(
            chain_symbol=instrument.chain_symbol,
            strike_price=instrument.strike_price,
            type=instrument.type,
            expiration_date=instrument.expiration_date
        ))
        self.assertIsNotNone(cached_data)
        self.assertTrue('results' in cached_data)
        self.assertTrue(len(cached_data['results']) > 0)

        option_data = None
        expiration_date_str = instrument.expiration_date.strftime("%Y-%m-%d")
        for d in cached_data['results']:
            if d['expiration_date'] == expiration_date_str:
                option_data = d
        self.assertIsNotNone(option_data, "No option matching expiration {} was cached".format(instrument.expiration_date))

        instrument_from_cache = Option(**option_data)
        self.validate_option(identifier, instrument_from_cache)

        # Test that we retrieve from the cache correctly
        start_time = time()

        instruments = self.option_handler.find_instruments(identifier)
        self.assertTrue(identifier in instruments)
        instrument = instruments[identifier]
        self.validate_option(identifier, instrument)

        self.assertTrue(time() - start_time < 0.05, "Cached entry was not used")
Exemplo n.º 4
0
    def get_instruments(self, instrument_map, get_params):
        ids_to_retrieve = set()

        for url in get_params:
            # Check if instrument is in the cache before querying Robinhood
            data = Cache.get(url)
            if data:
                instrument = self.instrument_class()(**data)
                self.set_instrument(instrument_map, instrument)
            else:
                id = get_params[url]
                ids_to_retrieve.add(id)

        # Perform a batch query for the rest of the ids
        if ids_to_retrieve:
            retrieved_instruments = self.instrument_class().search(
                ids=ids_to_retrieve)
            for instrument in retrieved_instruments:
                self.set_instrument(instrument_map, instrument)

                # Cache results of both a resource get and a search query
                Cache.set(instrument.url, instrument.data)
                search_url = self.build_search_url(
                    self.get_search_params(instrument.identifier()))
                Cache.set(search_url, {'results': [instrument.data]})
Exemplo n.º 5
0
    def search_instruments(self, instrument_map, search_params):
        search_jobs = {}

        # Initiate a thread pool for these requests.
        # Using a shared thread pool can cause hanging
        # when using a multi-process runner such as uwsgi.
        with thread_pool(10) as pool:
            for identifier in search_params:
                instrument = None
                params = search_params[identifier]
                search_url = self.build_search_url(params)

                data = Cache.get(search_url)
                if data:
                    if 'results' in data:
                        cached_instruments = [
                            self.instrument_class()(**d)
                            for d in data['results']
                        ]
                        if len(cached_instruments) > 1:
                            cached_instruments = self.filter_results(
                                cached_instruments, params)
                        if len(cached_instruments) == 1:
                            instrument = cached_instruments[0]
                    else:
                        instrument = self.instrument_class()(**data)

                if instrument:
                    self.set_instrument(instrument_map, instrument, identifier)
                else:
                    search_jobs[identifier] = pool.call(
                        self.instrument_class().search, **params)

        for identifier in search_jobs:
            params = search_params[identifier]
            search_job = search_jobs[identifier]
            retrieved_instruments = search_job.get()
            search_url = self.build_search_url(params)

            # Cache results for the search query
            Cache.set(search_url,
                      {'results': [i.data for i in retrieved_instruments]})

            matching_instruments = self.filter_results(retrieved_instruments,
                                                       params)

            if len(matching_instruments) == 0:
                raise NotFoundException("No {}s found for {}".format(
                    self.TYPE, identifier))
            elif len(matching_instruments) > 1:
                raise Exception(
                    "Multiple possible {}s found for {}, could not select a unique one"
                    .format(self.TYPE, identifier))

            instrument = matching_instruments[0]
            self.set_instrument(instrument_map, instrument, identifier)

            # Cache results for the resource query
            Cache.set(instrument.url, instrument.data)
Exemplo n.º 6
0
    def fetch_ornament_for_cart(cls, ornament_id):
        cart_item_byte = Cache.get(key='cart_item')
        cart_item = json.loads(
            cart_item_byte.decode()) if cart_item_byte else {}
        if cart_item.get(ornament_id):
            raise ItemAlreadyExistInCart(message="Item already exist in cart")

        ornament = Ornament.get_ornament_by_id(_id=ornament_id)
        category = Category.get_category_by_id(
            category_id=ornament.category_id)
        ornament_dict = dict(id=ornament.id,
                             name=ornament.name,
                             weight=ornament.weight,
                             wastage=ornament.wastage,
                             making_charge=ornament.making_charge,
                             category_id=ornament.category_id,
                             category_name=category.name,
                             category_material=category.material)
        cart_item[ornament_id] = ornament_dict
        Cache.set(key="cart_item", value=json.dumps(cart_item))
        return ornament_dict
Exemplo n.º 7
0
class Dropbox(Strategy):
    """Backup strategy for Dropbox."""
    NAME = 'Dropbox'
    TYPE = 'dropbox'
    cache = None
    dbx = None

    def start_backup(self):
        """
        Start backup.
        """
        self.cache = Cache(self.alias)

        self.dbx = dropbox.Dropbox(self.config.get('token'))

        # Get files recursively
        self.get_children()

    def get_children(self, path=''):
        """
        Get items in directory.

        @param string path (optional)
        """
        res = self.dbx.files_list_folder(path=path)

        for entry in res.entries:
            if self.check_if_excluded(entry.path_display):
                continue
            elif isinstance(entry, dropbox.files.FolderMetadata):
                self.get_children(entry.path_display)
            else:
                destination = os.path.join(self.backup_path,
                                           entry.path_display.strip('/'))
                self.logger.info(entry.path_display)

                content_hash = self.cache.get(entry.path_display)

                if not os.path.isfile(
                        destination) or content_hash != entry.content_hash:
                    self.cache.set(entry.path_display, entry.content_hash)
                    self.download(entry.path_display, destination)

    def download(self, dropbox_path, destination):
        """
        Download file.

        @param string dropbox_path
        @param string destination
        """
        parent = os.path.dirname(destination)

        if not os.path.exists(parent):
            os.makedirs(parent)

        with open(destination, 'wb+') as fout:
            _metadata, res = self.dbx.files_download(path=dropbox_path)
            fout.write(res.content)

    def check_if_excluded(self, path):
        """
        Check if path is excluded.

        @param string path
        @return boolean
        """
        for pattern in self.config.get('exclude'):
            if re.match(pattern, path):
                return True

        return False
Exemplo n.º 8
0
    def request(cls, resource_url, **params):
        request_url = ApiResource.__request_url(resource_url, **params)
        data = None

        if ApiResource.enable_mock and request_url in ApiResource.mock_results:
            # Load the mocked value
            data = ApiResource.mock_results[request_url]
            return data

        if cls.enable_cache:
            # Check if we have a cache hit first
            data = Cache.get(request_url)
            if data:
                return data

        if ApiResource.enable_mock:
            # We have not mocked out a request for this resource, raise an error
            raise NotFoundException(f"Mocking is currently enabled, but Robinhood request has not been mocked: {request_url}")

        headers = {}

        if cls.authenticated:
            access_token = ApiResource.authenticate()

            headers['Authorization'] = 'Bearer ' + access_token

        attempts = 3

        while True:
            attempts -= 1
            try:
                start_time = time()
                #print_req(request_url)
                response = requests.get(request_url, headers=headers)
            except requests.exceptions.ConnectionError:
                # Happens occasionally, retry
                if attempts > 0:
                    logger.warn("Warning: Connection error, retrying")
                else:
                    raise ApiInternalErrorException(0, "Repeated connection errors when trying to call Robinhood")
                continue

            if response.status_code == 200:
                data = response.json()
                if cls.enable_cache:
                    # Cache response. Only successful calls are cached.
                    Cache.set(request_url, data, cls.cache_timeout)
                return data
            elif response.status_code == 400:
                message = "{} (request URL: {})".format(response.text, request_url)
                raise ApiBadRequestException(message)
            elif response.status_code == 401:
                if cls.authenticated:
                    if attempts > 0:
                        # Credentials may have expired, try reauthenticating
                        access_token = ApiResource.authenticate(force_reauth=True)
                        headers['Authorization'] = 'Bearer ' + access_token
                        continue
                    else:
                        raise ApiUnauthorizedException("Authentication credentials were not accepted")
                else:
                    raise ApiUnauthorizedException("This API endpoint requires authentication: {}".format(cls.endpoint_path))
            elif response.status_code == 403:
                if attempts > 0:
                    # Credentials may have expired, try reauthenticating
                    access_token = ApiResource.authenticate(force_reauth=True)
                    headers['Authorization'] = 'Bearer ' + access_token
                    continue
                else:
                    raise ApiForbiddenException("Not authorized to access this resource: {}".format(request_url))
            elif response.status_code == 404:
                return None
            elif response.status_code > 500:
                # Internal server error, retry if possible
                if attempts <= 0:
                    raise ApiInternalErrorException(response.status_code, response.text)
            else:
                raise ApiCallException(response.status_code, response.text)
Exemplo n.º 9
0
    def authenticate(force_reauth=False):
        if ApiResource.enable_mock:
            # We are about to make an authentication request, which
            # should not occur when we are in mock mode
            raise Exception("Attempting to make authentication request when mocking enabled")

        if not (ApiResource.username and ApiResource.password and ApiResource.device_token and ApiResource.oauth_client_id):
            raise RobinhoodCredentialsException("Attempting to call authenticated endpoint, but one or more Robinhood credentials are missing for this server.")

        # If authentication has already failed, do not try again
        if ApiResource.permanent_auth_failure:
            raise ApiResource.permanent_auth_failure

        # Use locking to make sure that we are not trying to authenticate
        # from several thrads at once
        ApiResource.auth_lock.acquire()
        try:
            # We should check the cache's value before our local instance,
            # as another process may have already reauthenticated and set the value
            access_token = Cache.get('auth_access_token') or ApiResource.auth_access_token
            auth_expiration = Cache.get('auth_expiration') or ApiResource.auth_expiration

            if not force_reauth:
                if access_token and auth_expiration and datetime.now() < auth_expiration:
                    return access_token

            refresh_token = Cache.get('auth_refresh_token') or ApiResource.auth_refresh_token

            attempts = 3

            while True:
                attempts -= 1

                if refresh_token and not force_reauth:
                    data = {
                        'grant_type': 'refresh_token',
                        'refresh_token': refresh_token,
                        'client_id': ApiResource.oauth_client_id,
                        'device_token': ApiResource.device_token
                    }
                else:
                    data = {
                        'grant_type': 'password',
                        'expires_in': AUTH_DURATION.total_seconds(),
                        'username': ApiResource.username,
                        'password': ApiResource.password,
                        'client_id': ApiResource.oauth_client_id,
                        'device_token': ApiResource.device_token,
                        'scope': 'internal'
                    }

                try:
                    response = requests.post(AUTH_INFO['url'], headers=AUTH_INFO['headers'], data=json.dumps(data))
                except requests.exceptions.ConnectionError as e:
                    # Occasional error, retry if possible
                    if attempts > 0:
                        sleep(1)
                        continue

                    raise e


                if response.status_code == 200:
                    data = response.json()

                    access_token = data['access_token']
                    refresh_token = data['refresh_token']

                    auth_refresh_duration = AUTH_DURATION / 2
                    auth_expiration = datetime.now() + auth_refresh_duration

                    # Set the access token to expire early to allow the refresh token to be utilized
                    Cache.set('auth_access_token', access_token, auth_refresh_duration.total_seconds())
                    Cache.set('auth_refresh_token', refresh_token, AUTH_DURATION.total_seconds())
                    Cache.set('auth_expiration', auth_expiration)

                    # Define in local memory (we may not have a cache available to us)
                    ApiResource.auth_expiration = auth_expiration
                    ApiResource.auth_access_token = access_token
                    ApiResource.auth_refresh_token = refresh_token

                    return access_token

                if response.status_code >= 500:
                    if attempts > 0:
                        sleep(1)
                        continue

                    raise ApiInternalErrorException(response.status_code, response.text)

                if response.status_code == 429:
                    raise ApiThrottledException(response.text)

                # If we reach this point we've likely received an authentication error
                # Remove cached credentials and force reauthentication
                force_reauth = True
                Cache.delete('auth_access_token')
                Cache.delete('auth_refresh_token')
                Cache.delete('auth_expiration')
                ApiResource.auth_access_token = None
                ApiResource.auth_refresh_token = None
                ApiResource.auth_expiration = None

                if response.status_code == 401:
                    try:
                        response_data = response.json()
                        if 'error' in response_data and response_data['error'] == 'invalid_grant':
                            # Refresh token is no longer valid
                            # Remove it and re-attempt authentication with username/password
                            refresh_token = None
                            continue
                    except ValueError:
                        # Response is not valid JSON, let remaining error logic handle it
                        pass

                # Error codes other than these are considered to be permanent errors,
                # due to invalid credentials or other issues with user-provided credentials.
                if response.status_code == 403:
                    error = ApiForbiddenException("Authentication is required for this endpoint, but credentials are expired or invalid.")
                else:
                    request_details = "\n\tRequest URL: {}\n\tRequest headers: {}\n\tRequest data: {}".format(
                        auth_url, auth_request_headers, data)
                    error = ApiCallException(response.status_code, response.text + request_details)
                ApiResource.permanent_auth_failure = error
                raise error
        finally:
            ApiResource.auth_lock.release()
Exemplo n.º 10
0
class GoogleDrive(Strategy):
    """Backup strategy for Google Drive."""
    NAME = 'Google Drive'
    TYPE = 'googledrive'
    API_URL = 'https://www.googleapis.com/drive/v3/files'
    cache = None

    def add(self):
        """Add Google Drive account."""
        self.alias = super().add()

        # Show instructions
        self.show_instructions()

        # Parse credentials
        credentials_str = input('Paste content of credentials file: ')
        self.config.set('credentials', json.loads(
            credentials_str)['installed'])

        # Get access code
        code = self.request_code()
        token = self.request_token(code)

        self.config.set('token', token)

    def start_backup(self):
        """Start backup."""
        # Set cache
        self.cache = Cache(self.alias)

        # Backup
        self.get_children()

        # Cleanup
        self.cleanup()

    def cleanup(self):
        """Delete files that have been removed from Drive."""
        all_cached = self.cache.get()

        for id, item in list(all_cached.items()):
            if item['last_seen'] is not util.startup_time and 'path' in item:
                # Delete file
                Path(item['path']).unlink(missing_ok=True)

                # Remove item from cache
                self.cache.delete(id)

    def build_auth_uri(self):
        """Build auth URI for requesting token.

        @return string
        """
        auth_uri = self.config.get('credentials.auth_uri')
        auth_uri += '?response_type=code'
        auth_uri += '&redirect_uri=' + \
            quote_plus(self.config.get('credentials.redirect_uris.0'))
        auth_uri += '&client_id=' + \
            quote_plus(self.config.get('credentials.client_id'))
        auth_uri += '&scope=https://www.googleapis.com/auth/drive.readonly'
        auth_uri += '&access_type=offline'
        auth_uri += '&approval_prompt=auto'

        return auth_uri

    def request_code(self):
        """Request code from auth URI to obtain token.

        @return string
        """
        # Build auth uri
        auth_uri = self.build_auth_uri()

        # Try opening in browser
        webbrowser.open(auth_uri, new=1)

        print()
        print('If your browser does not open, go to this website:')
        print(auth_uri)
        print()

        # Return code
        return input('Enter code: ')

    def execute_request(self, url, headers={}, params={}, method='GET', is_retry=False):
        """Call Drive API.

        @param string url
        @param dict headers
        @param dict params
        @param string method
        @param bool is_retry
        """
        if self.config.get('token.access_token'):
            # Set Authorization-Header
            auth_header = {
                'Authorization': 'Bearer {}'.format(self.config.get('token.access_token'))
            }
            headers.update(auth_header)

        # Execute request
        if method == 'GET':
            res = requests.get(url, headers=headers, params=params)
        else:
            res = requests.post(url, headers=headers, data=params)

        # Permission error
        if res.status_code == 401:
            # Maybe the token is expired
            if not is_retry:
                # Refresh token
                self.config.set('token', self.request_token())

                # Re-try request
                return self.execute_request(url, headers, params, method, True)
            else:
                # This is already a retry, don't try again
                raise Exception('Failed to refresh token')

        return {
            'status': res.status_code,
            'headers': res.headers,
            'body': res.json()
        }

    def request_token(self, code=''):
        """Request access token.

        @param string code (optional)
        @return dict
        """
        headers = {
            'Content-Type': 'application/x-www-form-urlencoded'
        }

        params = {
            'client_id': self.config.get('credentials')['client_id'],
            'client_secret': self.config.get('credentials')['client_secret'],
            'redirect_uri': self.config.get('credentials')['redirect_uris'][0],
        }

        if code:
            params['grant_type'] = 'authorization_code'
            params['code'] = code
        else:
            params['grant_type'] = 'refresh_token'
            params['refresh_token'] = self.config.get('token')['refresh_token']

        res = self.execute_request(self.config.get('credentials')[
                                   'token_uri'], headers, params, 'POST')

        if res['status'] == 200:
            if self.config.get('token'):
                res['body']['refresh_token'] = self.config.get('token')[
                    'refresh_token']

            self.config.set('token', res['body'])
            return res['body']
        else:
            raise Exception('Error getting token: ' + str(res['body']))

    def show_instructions(self):
        """Print instructions on how to set up Google Cloud Project."""
        print()
        print('If you already have an OAuth-Client-ID, download the JSON')
        print('Otherwise, here\'s how to get credentials:')
        print('1. Go to https://console.developers.google.com/')
        print('2. Choose or create a project')
        print('3. Activate Photos API here: https://console.developers.google.com/apis/library/photoslibrary.googleapis.com')
        print('4. Open https://console.developers.google.com/apis/credentials/consent')
        print('5. Choose "External"')
        print('6. Enter a name, support email and contact email')
        print('7. Click "Save and continue"')
        print('8. Click "Add or remove scopes"')
        print('9. Select ".../auth/drive.readonly"')
        print('10. Select ".../auth/photoslibrary.readonly"')
        print('11. Click "Save and continue"')
        print('12. Enter yourself as a test user')
        print('13. Click "Save and continue"')
        print(
            '14. [Open credentials page](https://console.developers.google.com/apis/credentials)')
        print('15. Click on "Create Credentials" -> OAuth-Client-ID -> Desktop Application')
        print('16. Download the Client ID JSON')
        print()

    def check_if_excluded(self, path):
        """Check if file is to be excluded from download.

        @param string path
        @return boolean
        """
        for pattern in self.config.get('exclude'):
            if re.match(pattern, path):
                return True

        return False

    def is_folder(self, item):
        """Check if item is a Google Folder.

        @param GoogleDriveFile item
        @return boolean
        """
        return item['mimeType'] == 'application/vnd.google-apps.folder'

    def is_google_doc(self, item):
        """Check if item is a Google Doc.

        @param GoogleDriveFile item
        @return boolean
        """
        return item['mimeType'] == 'application/vnd.google-apps.document'

    def is_google_sheet(self, item):
        """Check if item is a Google Spreadsheet.

        @param GoogleDriveFile item
        @return boolean
        """
        return item['mimeType'] == 'application/vnd.google-apps.spreadsheet'

    def is_google_slides(self, item):
        """Check if item is a Google Slidedeck.

        @param GoogleDriveFile item
        @return boolean
        """
        return item['mimeType'] == 'application/vnd.google-apps.presentation'

    def get_children(self, item_id='root', parents=[], page_token=''):
        """Traverse Drive recursively and initiate file download.

        @param string item_id (optional)
        @param list parents (optional)
        @param string page_token (optional)
        """
        path_server = '/' + '/'.join(parents).strip('/')
        path = os.path.join(self.backup_path, path_server.strip('/'))

        params = {
            'q': "'" + item_id + "' in parents",
            'fields': 'nextPageToken,files(id,name,mimeType,modifiedTime,trashed)',
            'pageSize': '100'
        }

        if page_token:
            params['pageToken'] = page_token

        # Build param-string
        params_str = ''

        for key, param in params.items():
            params_str = params_str + key + '=' + param + '&'

        params_str = params_str[:-1].replace(',', '%2C').replace(' ', '+')

        # Send request
        res = self.execute_request(self.API_URL + '?' + params_str)

        items = res['body']['files'] if res['status'] == 200 else []

        for item in items:
            url = self.API_URL + '/' + item['id'] + '?alt=media'
            path_item = os.path.join(path_server, item['name'])
            filename = item['name']

            # Excluded or trashed
            if self.check_if_excluded(path_item):
                continue

            if item['trashed']:
                continue

            # Folders
            if self.is_folder(item):
                self.get_children(item['id'], parents + [item['name']])
                continue

            # Google Docs
            if self.is_google_doc(item):
                url = self.API_URL + '/' + \
                    item['id'] + '/export?mimeType=application/pdf'
                filename = item['name'] + '_converted.pdf'
            # Google Spreadsheets
            elif self.is_google_sheet(item):
                url = self.API_URL + '/' + \
                    item['id'] + '/export?mimeType=application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
                filename = item['name'] + '.xlsx'
            # Google Slides
            elif self.is_google_slides(item):
                url = self.API_URL + '/' + \
                    item['id'] + '/export?mimeType=application/pdf'
                filename = item['name'] + '_converted.pdf'

            # Remember last seen in cache
            self.cache.set(item['id'] + '.last_seen', util.startup_time)

            # Move if moved
            if self.check_if_moved_and_move(item, path, filename):
                continue

            # Download
            if not self.is_backed_up(item, path, filename):
                try:
                    self.download(url, path, filename)

                    # Add to cache
                    self.cache.set(item['id'] + '.modified',
                                   item['modifiedTime'])
                    self.cache.set(item['id'] + '.path',
                                   os.path.join(path, filename))
                except Exception as e:
                    self.logger.error(e)

        if 'nextPageToken' in res['body']:
            self.get_children(item_id, parents, res['body']['nextPageToken'])

    def check_if_moved_and_move(self, item, path, filename):
        """Check if source was simply moved and move if so.
        To determine whether the item has moved check the modified time.
        We can't use 'md5Checksum' here, because Google Docs don't have one.

        @param GoogleDriveFile item
        @param string path
        @param string filename
        @return boolean
        """
        move_source = self.cache.get(item['id'])

        if move_source and 'modified' in move_source:
            move_target = os.path.join(path, filename)

            if move_source['modified'] == item['modifiedTime'] \
                    and move_source['path'] != move_target:

                # Create folder if not exists
                if not os.path.exists(path):
                    os.makedirs(path)

                self.logger.info('Moving {} to {}'.format(
                    move_source['path'], move_target))
                os.rename(move_source['path'], move_target)

                self.cache.set(item['id'] + '.' + 'path', move_target)

                return True

        return False

    def is_backed_up(self, item, path, filename):
        """Check if file exists and is newer than on Drive.

        @param GoogleDriveFile item
        @param string path
        @param string filename
        @return boolean
        """
        if os.path.isfile(os.path.join(path, filename)):
            mtime_ts = os.path.getmtime(os.path.join(path, filename))
            mtime_date = datetime.utcfromtimestamp(mtime_ts).isoformat()

            if item['modifiedTime'] < mtime_date:
                return True

        return False

    def download(self, url, path, filename):
        """Download item.

        @param string url
        @param string path
        @param string filename
        """
        # Create folder if not exists
        if not os.path.exists(path):
            os.makedirs(path)

        headers = {
            'Authorization': 'Bearer {}'.format(self.config.get('token')['access_token'])
        }

        # Download file
        self.logger.info('Downloading {}...'.format(
            os.path.join(path, filename)))

        http = urllib3.PoolManager()
        res = http.request('GET', url, headers=headers, preload_content=False)

        if res.status == 200:
            self.logger.info('Downloaded.')

            with open(os.path.join(path, filename), 'wb') as out:
                while True:
                    data = res.read(128)
                    if not data:
                        break
                    out.write(data)

            res.release_conn()
        else:
            raise Exception(
                'Download failed ({}) -> {}'.format(res.status, str(res.data)))