def test_client_configuration_errors(self): with self.assertRaises(ConfigurationException): Client(None) with self.assertRaises(ConfigurationException): Client(PLAYGROUND_KEY, api_url=None) with self.assertRaises(ConfigurationException): Client(PLAYGROUND_KEY, default_locale=None) with self.assertRaises(ConfigurationException): Client(PLAYGROUND_KEY, api_version=None) with self.assertRaises(ConfigurationException): Client(PLAYGROUND_KEY, api_version=-1)
def test_gzip_headers_disabled(self): client = Client(PLAYGROUND_SPACE, gzip_encoded=False, raise_errors=False) self.assertEqual(client._request_headers()['Accept-Encoding'], 'identity')
def process_import(filepath, space_id, content_type_id, api_key): client = Client(api_key) space = client.entries(space_id) with open(filepath) as import_file: records = json.load(import_file) for record in tqdm(records): entry_id = None # tell server to auto-generate it entry_data = { 'content_type_id': content_type_id, 'fields': {key: { 'en-US': value } for (key, value) in record.items()} } try: entry = space.create(entry_id, entry_data) except Exception: print('Creation failed', json.dumps(record)) else: try: entry.publish() except Exception: pass
def process_drop(space_id, content_type_id, api_key): client = Client(api_key) content_type = client.content_types(space_id).find(content_type_id) entries = content_type.entries().all({'limit': 1000}) for entry in tqdm(entries): delete_entry(entry)
def importDataV2(filename, space, environment, token): client = Client(token) with open(filename, 'r') as source: errors = list() reader = json.load(source) count = 0 for entry in reader['entries']: displayName = entry['fields']['title']['en-US'] if not 'content_type_id' in entry: entry['content_type_id'] = entry['sys']['contentType']['sys'][ 'id'] print("Importing " + displayName + '...') try: new_entry = client.entries(space, environment).create(None, entry) new_entry.publish() count = count + 1 except Exception: print("\tFailed. Moving on.") errors.append(displayName) print("Import complete. Successfully imported " + str(count) + "/" + str(count + len(errors)) + " items.") if len(errors) > 0: print("\nThe following items failed to create/publish:") for err in errors: print("\t" + err)
def test_client_not_raise_errors(self): client = Client(PLAYGROUND_KEY, raise_errors=False) error = client.entries(PLAYGROUND_SPACE, 'master').find('not_here') self.assertEqual(error.status_code, 404) self.assertEqual(error.response.status_code, 404)
def test_client_with_additional_headers(self): client = Client(PLAYGROUND_KEY, raise_errors=False, additional_headers={'fizz': 'buzz'}) error = client.entries(PLAYGROUND_SPACE, 'master').find('abc123') self.assertIn('fizz', error.response.request.headers)
def test_client_with_application_version_only_does_not_include_integration_in_header( self): client = Client(PLAYGROUND_SPACE, raise_errors=False, application_version='0.1.0') header = client._contentful_user_agent() self.assertFalse('app /0.1.0;' in header)
def test_client_with_application_headers(self): client = Client(PLAYGROUND_SPACE, raise_errors=False, application_name='foobar', application_version='0.1.0') header = client._contentful_user_agent() self.assertTrue('app foobar/0.1.0' in header)
def test_client_with_integration_name_only_headers(self): client = Client(PLAYGROUND_SPACE, raise_errors=False, integration_name='foobar') header = client._contentful_user_agent() self.assertTrue('integration foobar;' in header) self.assertFalse('integration foobar/;' in header)
def test_client_default_contentful_user_agent_headers(self): client = Client(PLAYGROUND_SPACE, raise_errors=False) from contentful_management import __version__ import platform expected = [ 'sdk contentful-management.py/{0};'.format(__version__), 'platform python/{0};'.format(platform.python_version()) ] header = client._contentful_user_agent() for e in expected: self.assertTrue(e in header) self.assertTrue(re.search('os (Windows|macOS|Linux)(\/.*)?;', header)) self.assertTrue('integration' not in header) self.assertTrue('app' not in header)
def poll_asset_url(asset_event: AssetCreateEvent, wait_seconds=3, max_retries=20) -> str: asset_url = None client = Client(os.environ['CMA_TOKEN']) for i in range(max_retries): print(f"Retrieving asset url: attempt {i}") asset = client.assets(asset_event.space_id, asset_event.environment_id).find( asset_event.asset_id) asset_url = asset.url() if asset_url: return f"http:{asset_url}" print(f"No asset url available on attempt {i}") sleep(wait_seconds) raise Exception("Could not get asset url")
def reindex_all(space_id, environment_id): al_client = algoliasearch.Client(os.environ['ALGOLIA_APP'], os.environ['ALGOLIA_KEY']) # al_client.delete_index('art-assets') client = Client(os.environ['CMA_TOKEN']) assets = all_assets(space_id, environment_id) index = al_client.init_index('art-assets') index.set_settings({ 'minWordSizefor1Typo': 5, 'minWordSizefor2Typos': 10, }) for asset in assets: asset_id = asset['sys']['id'] asset_url = get_asset_url(asset) print(asset_id) print(asset_url) next if not asset_url: print(f"Asset has no url, skipping: {asset}") continue url = f"https:{asset_url}" response = requests.get(url) if int(response.headers.get('Content-Length')) > 5242880: print(f"Asset to large for rekognition: {asset}") continue labels = recognize_binary(response.content) index_asset( index, asset_id, space_id, url, labels, ) print(f"Indexed asset metadata for asset id {asset_id}")
def test_client_headers(self): client = Client(PLAYGROUND_SPACE, raise_errors=False, application_name='foobar_app', application_version='1.1.0', integration_name='foobar integ', integration_version='0.1.0') from contentful_management import __version__ import platform expected = [ 'sdk contentful-management.py/{0};'.format(__version__), 'platform python/{0};'.format(platform.python_version()), 'app foobar_app/1.1.0;', 'integration foobar integ/0.1.0;' ] header = client._request_headers()['X-Contentful-User-Agent'] for e in expected: self.assertTrue(e in header) self.assertTrue(re.search('os (Windows|macOS|Linux)(\/.*)?;', header))
def run(): if len(sys.argv) < 4: helpText() sys.exit() contentful_space = sys.argv[1] contentful_environment = sys.argv[2] management_token = sys.argv[3] client = Client(management_token) space = client.spaces().find(contentful_space) question = "WARNING: This will delete ALL content in the '" + space.name + "' space under the '" + contentful_environment + "' environment. Are you sure you wish to continue?" if not confirmContinue(question): sys.exit() # API only fetches 100 entries at a time, so keep looping until we don't find any more. # This could cause an infinite loop if any entries fail to delete, but hey, this is just a convenience utility anyway. while True: try: print('Checking for entries...') entries = client.entries(contentful_space, contentful_environment).all() print('Found ' + str(len(entries)) + ' entries.') if len(entries) == 0: break for entry in entries: displayName = entry.title if hasattr(entry, 'title') else entry.name if hasattr(entry, 'name') else entry.sys['id'] if entry.is_published: print("Unpublishing " + displayName + "...") entry.unpublish() print("Deleting " + displayName + "...") entry.delete() except KeyboardInterrupt: print "\nInterrupted by user. Exiting." sys.exit() print("Complete.")
import os from contentful_management import Client PLAYGROUND_KEY = os.environ.get('CF_TEST_CMA_TOKEN', 'foobar') PLAYGROUND_SPACE = 'facgnwwgj5fe' PLAYGROUND_ORG = 'some_org' CLIENT = Client(PLAYGROUND_KEY, gzip_encoded=False)
def test_gzip_headers_enabled(self): client = Client(PLAYGROUND_SPACE, raise_errors=False) self.assertEqual(client._request_headers()['Accept-Encoding'], 'gzip')
app.config['GITHUB_CLIENT_SECRET'] = GITHUB_CLIENT_SECRET app.config["CORS_HEADERS"] = "Content-Type" app.config["PROPAGATE_EXCEPTIONS"] = True api = Api(app) db = SQLAlchemy(app) guard = Praetorian() mail = Mail(app) safe_url = URLSafeTimedSerializer(SECRET_KEY) ma = Marshmallow() migrate = Migrate(app, db) CORS(app, supports_credentials=True, resources={r"/*": {"origins": ["*"]}}) github = GitHub(app) git = Github(GITHUB_ACCESS_TOKEN) repo = git.get_repo(GITHUB_REPO) contentful_client = Client(CONTENT_MANGEMENT_API_KEY) pusher_client = pusher.Pusher(app_id=PUSHER_APP_ID, key=PUSHER_KEY, secret=PUSHER_SECRET, cluster=PUSHER_CLUSTER, ssl=True) from backend.models import User guard.init_app(app, User) from backend.activities.routes import activities_bp from backend.activity_progresses.routes import activity_progresses_bp from backend.authentication.routes import authentication_bp from backend.badges.routes import badges_bp from backend.cards.routes import cards_bp
def runimport(): client = Client(management_token) with open(filename, 'rb') as f: reader = csv.reader(f) try: for row in reader: # skip header row if row[0] == '': continue category = getFunctionalCategory(row[1]) if category == "Invalid category": print "Invalid category - '" + row[ 1] + "'. Will not import row " + row[0] continue gc_date = None if row[20] != '': gc_date = formatDate(row[20]) revised_date = None if row[21] != '': revised_date = formatDate(row[21]) entry = { "content_type_id": "recordType", "fields": { "recordType": { "en-US": row[2] }, "category": { "en-US": { "sys": { "type": "Link", "linkType": "Entry", "id": category } } }, "scheduleId": { "en-US": row[3] }, "recordTypeDescription": { "en-US": row[4] }, "officialCopy": { "en-US": row[5] }, "retention": { "en-US": row[6] }, "triggerEvent": { "en-US": row[7] }, "disposition": { "en-US": row[8] }, "dispositionMethod": { "en-US": row[9] }, "referenceCopy": { "en-US": row[10] }, "referenceCopyDisposition": { "en-US": row[11] }, "referenceCopyDispositionMethod": { "en-US": row[12] }, "dataClassification": { "en-US": row[13] }, "storageRequirements": { "en-US": row[14] }, "legalReference": { "en-US": row[15] }, "notes": { "en-US": row[16] }, "systemOfRecord": { "en-US": row[17] }, "generalCounselNotes": { "en-US": row[18] }, "dateApprovedByGeneralCounsel": { "en-US": gc_date }, "dateRevised": { "en-US": revised_date } } } #print entry client.entries(contentful_space, 'master').create(None, entry) except csv.Error as e: sys.exit('file %s, line %d: %s' % (filename, reader.line_num, e))
def test_client_has_proxy(self): self.assertFalse(CLIENT._has_proxy()) client = Client(PLAYGROUND_KEY, proxy_host='http://foo.com') self.assertTrue(client._has_proxy())
from contentful_management import Client import os, binascii, json client = Client( 'CFPAT-b9d0bb66831b4cee396847c0467eace39cd05611526064d7079b3e57653928d6') space_id = 'wjuty07n9kzp' environment_id = 'master' pages = [] with open('streaming-media.json') as f: data = json.load(f) for page_idx, page in enumerate(data): new_object = {'categories': []} placeholder = page['content'][0] for category_idx, category in enumerate( page['content'][1]['items']['categories']): new_object['categories'].append({}) if category['category-title'] == placeholder['items']['categories'][ category_idx]['category-title']: new_object['categories'][category_idx]['category-title'] = { 'en-US': category['category-title'] } else: new_object['categories'][category_idx]['category-title'] = { 'en-US': placeholder['items']['categories'][category_idx] ['category-title'], 'es-US':
def test_client_raw_mode(self): client = Client(PLAYGROUND_KEY, raw_mode=True) response = client.entries(PLAYGROUND_SPACE, 'master').all() self.assertEqual(response.status_code, 200)