def test_store_auth_token(config): """Ensure that the AC authentication token can be stored and re-read from the config.""" # We start with an empty auth_token. config.auth_token = None helpers.write_config(config) # Which should also be persisted. config = helpers.read_config(config) assert config.auth_token is None # Let's try it with an actual token. config.auth_token = json.dumps({'username': '******', 'password': '******'}) helpers.write_config(config) # Reset relevant part of the configuration. config.auth_token = None # We should be able to read the stored auth_token. config = helpers.read_config(config) assert config.auth_token is not None token = json.loads(config.auth_token) assert token['username'] == 'Jack' assert token['password'] == 'Daniels'
def on_storage_directory_deleted(self, _, local_unique_id): """Delete the storage from the config and restarts""" logger.info('Detected deletion of storage directory "%s"', local_unique_id) config_index, display_name, storage_id = next( ((index, item['display_name'], item['id']) for item, index in zip( self.config.csps, range(len(self.config.csps))) if item['local_unique_id'] == local_unique_id)) del self.config.csps[config_index] cc.ipc_gui.displayNotification( 'Storage Provider Deleted', description= 'Your storage provider "{}" got deleted. Your data is still available' ' on the storage itself'.format(display_name)) # write the config and apply it be restarting the graph write_config(self.config) self.restart_sync_graph() # as last step we delete the cache directory (otherwise the shutdown of the storage will # write it again) try: storage_cache_dir = get_storage_cache_dir(self.config, storage_id) logger.debug('Trying to delete the storage cache "%s"', storage_cache_dir) shutil.rmtree(storage_cache_dir) except OSError: logger.info( 'Was not able to delete the file (maybe never existed)', exc_info=True)
def test_delete_storage_provider_credentials(config): """Test removing storage provider secrets from the configuration file. """ # Create a dummy storage provider with not credentials attached. config.csps = [] config.store_credentials_in_config = mock.Mock() dummy_credentials = json.dumps({'a': 'b', 'c': 'd'}) storage = mock.Mock(jars.BasicStorage) storage.storage_name = 'Dropbox' storage.storage_user_name = 'user1' sp_dir = tempfile.mkdtemp() auth_data = { 'storage': storage, 'credentials': dummy_credentials, 'identifier': 'db1', 'display_name': 'Dropbox 1', 'new_storage_id': 'db.1234', 'sp_dir': sp_dir } with mock.patch('cc.synchronization.models.instantiate_storage', return_value=storage): helpers.add_csp(config, auth_data) helpers.get_storage(config, "db.1234")['credentials'] = dummy_credentials assert len(config.csps) == 1 helpers.write_config(config) # Delete the item and ensure that is no longer stored. helpers.delete_credentials_from_config(config, "db.1234") assert not helpers.get_credentials_from_config(config, "db.1234") # Ensure that we throw an exception if we try to access a non-existing storage. with pytest.raises(helpers.ConfigurationStorageNotFound): helpers.delete_credentials_from_config(config, "does.not.exist")
def test_read_write_last_known_ac_csps(config): """ Tests the read and write operation for last known csps in the config file """ csps = ['csp1', 'csp2'] config.admin_console_csps = [] config.admin_console_csps.extend(csps) helpers.write_config(config) config.admin_console_csps = [] config = helpers.read_config(config) assert config.admin_console_csps == csps
def test_encryption_should_work_with_auto_create_while_write( no_configuration_key): """ Ensure that a key is auto created per default if not present when calling write operation. """ config, no_configuration_key = no_configuration_key config = reset_config(config) assert not no_configuration_key helpers.write_config(config, config.config_file) # Ensure that a key has been written to the keychain. assert helpers.get_configuration_key(config, auto_create=False)
def test_fail_encryption_without_configuration_key(mock_get_configuration_key, config, tmpdir): """Ensure an exception is thrown when we explicitly don't want a key to be created. Also do not provide one during the write operation. """ config_file = str(tmpdir.join('tmp.json')) with pytest.raises(cc.crypto2.NoKeyError): helpers.write_config(config, config_file, auto_create_configuration_key=False) assert mock_get_configuration_key.called_with( service=config.APP_NAME, name=config.KEYCHAIN_CONFIGURATION_KEY_NAME)
def on_storage_directory_renamed(self, _, new_name, local_unique_id): """Change the config according to the new name.""" logger.info('Detected rename of storage directory "%s"', new_name) config_item = next((item for item in self.config.csps if item['local_unique_id'] == local_unique_id)) old_name = config_item['display_name'] config_item['display_name'] = new_name cc.ipc_gui.displayNotification( 'Storage Provider Renamed', description='Your storage provider "{}" has been renamed to "{}"'. format(old_name, new_name)) write_config(self.config) self.restart_sync_graph()
def test_write_read_csp(config): """ tests if the config is read and safed correctly""" testcsp = { 'id': 'db1', 'type': 'test', 'display_name': 'test_1', 'selected_sync_directories': [['heelo', 'world'], ['foo'], ['bar']] } config.csps.append(testcsp.copy()) helpers.write_config(config) config.csps = [] config = helpers.read_config(config) assert config.csps[0] == testcsp
def migrate_old_config(self): """Migrate config from hidden file strategy to inode strategy This is done by finding a directory+a hidden file and safe the inode into the local_unique_id field. Afterwards it deletes the hidden file to avoid doing this procedure again.""" entry_paths = set() logger.debug('Starting migration') # first get all, directories which also have a hidden file for entry in os.listdir(self.config.sync_root): # every entry is potentially an account entry_path = os.path.join(self.config.sync_root, entry) if not os.path.isdir(entry_path): continue for sub_entry in os.listdir(entry_path): sub_entry_path = os.path.join(entry_path, sub_entry) if sub_entry.startswith(HIDDEN_FILE_PREFIX): _, storage_id = sub_entry.split('_', 1) # I hope there is only one get_storage(self.config, storage_id)['local_unique_id'] = \ os.stat(entry_path).st_ino # try to find as id file here, if it is a directory if os.path.isdir(entry_path): for sub_entry in os.listdir(entry_path): sub_entry_path = os.path.join(entry_path, sub_entry) if sub_entry.startswith(HIDDEN_FILE_PREFIX): splitted_sub_entry = sub_entry.split('_', 1) if len(splitted_sub_entry) == 2: logger.info( 'Found hidden id file, going to migrate') _, storage_id = splitted_sub_entry get_storage(self.config, storage_id)['local_unique_id'] = \ os.stat(entry_path).st_ino entry_paths.add(sub_entry_path) break if entry_paths: write_config(self.config) for entry_path in entry_paths: logger.info('Deleting old marker entry: %s', entry_path) os.unlink(entry_path)
def test_read_write_policies(config): """ Tests the read and write operations for policies in the config file """ policy = { 'criteria': 'cad', 'createdAt': '2016-09-13T11:27:37.534Z', 'enabled': True, '_id': '57d7e2a92107cf1e0375f1d0', 'name': 'CAD files', '__v': 0, 'type': 'fileextension', 'updatedAt': '2016-09-13T11:27:37.534Z' } config.policies = [] config.policies.append(policy) helpers.write_config(config) config.policies = [] config = helpers.read_config(config) assert config.policies[0] == policy
def test_read_write_encryption(config): """Ensure basic encrypt/decrypt works and that invalid keys or tags throw exceptions. """ # Expected location of the configuration file. okey = helpers.get_configuration_key(config, auto_create=True) # Writing the configuration once should use the existing key and store the current tag. helpers.write_config(config) old_tag = helpers.get_configuration_tag(config) assert old_tag assert okey == helpers.get_configuration_key(config, auto_create=False) assert okey == helpers.get_configuration_key(config, auto_create=True) helpers.write_config(config) assert os.path.exists(config.config_file) # The tags should be different. new_tag = helpers.get_configuration_tag(config) assert old_tag != new_tag # The keys should stay the same. new_key = helpers.get_configuration_key(config, auto_create=True) assert new_key == okey # Decrypting it with the proper key and tag should work. assert helpers.read_encrypted_configuration(config, config.config_file, new_key, new_tag) # Decrypting with the old tag should still return the configuration. read_config = helpers.read_encrypted_configuration(config, config.config_file, new_key, old_tag) assert 'metadata' in read_config # Decrypting with an invalid tag should still return the configuration. invalid_tag = b'X' * config.KEYCHAIN_CONFIGURATION_TAG_SIZE read_config = helpers.read_encrypted_configuration(config, config.config_file, new_key, invalid_tag) assert 'metadata' in read_config # Decrypting with an invalid key should also fail. tag = helpers.get_configuration_tag(config) with pytest.raises(helpers.ConfigurationError): helpers.read_encrypted_configuration( config, config.config_file, os.urandom(config.KEYCHAIN_CONFIGURATION_KEY_SIZE), tag)
def test_read_write_basic_configuration(no_configuration_key): """ Tests basic read/write operation using a valid configuration dictionary. """ config, no_configuration_key = no_configuration_key # Ensure that the temporary location is not tainted already. assert not os.path.exists(config.config_file) # Ensure that there is not configuration key stored in the keychain. assert not no_configuration_key # Reset the configuration. config = reset_config(config) # Setup a simple dummy configuration (as stored "in-memory") to check against. basic_configuration_with_sp = { 'general': { 'sync_root_directory': "/some/local/folder", 'device_id': '123-456-678-890' }, 'dropbox_randomid': { 'display_name': 'Dropbox', 'type': 'dropbox', 'id': 'dropbox_randomid', 'selected_sync_directories': [{ 'children': True, 'path': ['test'] }], 'unique_id': 'dbid:2384928349829384' } } config.csps.append( basic_configuration_with_sp.get('dropbox_randomid').copy()) # General config.sync_root = basic_configuration_with_sp['general'][ 'sync_root_directory'] config.device_id = basic_configuration_with_sp['general']['device_id'] # Encryption config.encryption_enabled = True config.encrypt_external_shares = True assert config.encrypt_external_shares config.encrypt_public_shares = True config.encryption_csp_settings = ['dropbox', 'onedrive'] # Store / Encrypt Configuration helpers.write_config(config) assert os.path.exists(config.config_file) # Reset Configuration config = reset_config(config) # Re-read/Decrypt Configuration config = helpers.read_config(config) test_csp = basic_configuration_with_sp.get('dropbox_randomid') assert config.csps[0] == test_csp assert config.sync_root == basic_configuration_with_sp['general'][ 'sync_root_directory'] assert config.device_id == basic_configuration_with_sp['general'][ 'device_id'] assert config.csps[0]['selected_sync_directories'] assert config.csps[0]['selected_sync_directories'] == test_csp[ 'selected_sync_directories'] # Ensure encryption settings have all been set to True assert config.encryption_enabled assert config.encrypt_public_shares assert config.encrypt_external_shares assert 'dropbox' in config.encryption_csp_settings assert 'cifs' not in config.encryption_csp_settings