def upload_photo(form_data, user_id, user_token, lang, create=True): api_url = join(settings.WS_BASE_URL, PRIVATE_API_SCHEMA_URL) chunk_size = config.PHOTO_UPLOAD_CHUNK_SIZE # request headers authorization = {'Authorization': 'Token {}'.format(user_token)} headers = dict(authorization) headers.update({ 'Accept-Language': lang, 'content_type': MULTIPART_CONTENT }) transports = HTTPTransport(credentials=authorization, headers=headers) client = Client(transports=[transports]) # get api schema schema_cache_key = "{}_{}".format(CACHE_SCHEMA_PREFIX_KEY, user_id) schema = cache.get(schema_cache_key) if not schema: schema = client.get(api_url) cache_set(schema_cache_key, schema) # get image to upload upload_id = form_data.pop('upload_id') image = MyChunkedUpload.objects.get(upload_id=upload_id) image_file = image.file filename = image.filename # request parameters data = {'filename': filename} offset, chunks = 0, image_file.chunks(chunk_size) request_path = ['photos', 'upload', 'update'] img_id = 0 for chunk_file in chunks: data.update(**{'file': ContentFile(chunk_file)}) client.transports[0].headers._data[ 'Content-Range'] = 'bytes {}-{}/{}'.format( offset, offset + len(chunk_file), image_file.size) response = client.action(schema, request_path, params=data) offset = response['offset'] img_id = response['id'] data.update({'id': img_id}) request_path = ['photos', 'upload', 'chunk', 'update'] request_path = ['photos', 'upload', 'chunk', 'create'] data.update({'md5': _checksum_file(image_file, chunk_size)}) client.action(schema, request_path, params=data) # Request is not multipart, so we remove the header, otherwise uwsgi doesn't works client.transports[0].headers._data.pop('content_type', None) # upload photo information form_data['image'] = img_id form_data['original_file_name'] = filename if create: form_data['owner'] = user_id client.action(schema, ['photos', 'create'], params=form_data) else: client.action(schema, ['photos', 'partial_update'], params=form_data)
def generate_tavern_yaml(json_path): client = Client() d = client.get(json_path, format="openapi") output_yaml(d.links) for routes in d.data.keys(): output_yaml(d.data[routes], routes)
def __init__(self, page_size=None, base_site=None): # Init client from base Api URI # Hacky page size update for all future request to OlsClient OlsClient.page_size = page_size or def_page_size if base_site: OlsClient.site = base_site document = Client(decoders=[HALCodec()]).get(self.site) logger.debug('OlsClient [%s][%s]', document.url, self.page_size) # List Clients self.ontologies = ListClientMixin('/'.join([self.site, 'ontologies']), Ontology, document, self.page_size) self.terms = ListClientMixin('/'.join([self.site, 'terms']), Term, document, self.page_size) self.properties = ListClientMixin('/'.join([self.site, 'properties']), Property, document, self.page_size) self.individuals = ListClientMixin( '/'.join([self.site, 'individuals']), Individual, document, self.page_size) # Details client self.ontology = DetailClientMixin('/'.join([self.site, 'ontologies']), Ontology) self.term = DetailClientMixin('/'.join([self.site, 'terms']), Term) self.property = DetailClientMixin('/'.join([self.site, 'properties']), Property) self.individual = DetailClientMixin( '/'.join([self.site, 'individuals']), Individual) # Special clients self.search = SearchClientMixin('/'.join([self.site, 'search']), OLSHelper, document, self.page_size) self.detail = self.ItemClient(self.site)
def __init__(self, request): super(DAMWebService, self).__init__() api_url = join(settings.WS_BASE_URL, PRIVATE_API_SCHEMA_URL) self.request = request self.user_id = request.user.id # initialize api client with user token user_params = cache.get("{}_{}".format(CACHE_USER_PROFILE_PREFIX_KEY, self.user_id)) authorization = {} if user_params and user_params.get('token'): authorization = { 'Authorization': 'Token {}'.format(user_params.get('token')) } headers = dict(authorization) headers.update({ 'Accept-Language': request.META.get('HTTP_ACCEPT_LANGUAGE', request.LANGUAGE_CODE) }) transports = HTTPTransport( credentials=authorization, headers=headers, response_callback=self._callback_client_transport) self.client = Client(transports=[transports]) # get api schema schema_cache_key = "{}_{}".format(CACHE_SCHEMA_PREFIX_KEY, self.user_id) self.schema = cache.get(schema_cache_key) if not self.schema: self.schema = self.get_or_logout(api_url) cache.set(schema_cache_key, self.schema)
def __init__(self, request): super(DAMPublicWebService, self).__init__() api_url = join(settings.WS_BASE_URL, PUBLIC_API_SCHEMA_URL) self.request = request self.client = Client() self.schema = self.client.get(api_url)
def __init__(self, uri, elem_class): """ Init from base uri and expected element helper class :param uri: relative uri to base OLS url :param elem_class: helper class expected """ self.client = Client(decoders=self.decoders) self.uri = uri self.elem_class = elem_class
def post(self, request): serializer = RegistrationSerializer(data=request.data) if serializer.is_valid(): data = serializer.data u = User.objects.create(username=data['username']) u.set_password(data['password']) u.save() name = u.username client = Client(user=u, name=name, url='' + name,\ client_id=name, client_secret='', client_type=1) client.save() return Response(serializer.data, status=status.HTTP_201_CREATED)
def _action(self, fiware_service, fiware_service_path, keys, params=None, validate=True): headers = { 'Fiware-Service': fiware_service, 'Fiware-ServicePath': fiware_service_path } transport = HTTPTransport(headers=headers) client = Client(transports=[transport]) document = client.get(self.schema_endpoint) return client.action(document, keys, params, validate)
def generate_tavern_yaml(json_path): client = Client() #print(client.__dict__) d = client.get(json_path, format="openapi") #print(d) #print(d.data) output_yaml(d.links) for routes in d.data.keys(): #print(os.getcwd()) #print(routes) test_dir = routes.split()[-1] os.mkdir(test_dir) if not os.path.isdir(test_dir) else ... pathlib.Path(test_dir + "/" + "__init__.py").touch() output_yaml(d.data[routes], test_dir)
def makeClient(self, username, password, *args, **kwargs): ''' Make a coreapi client using the username/password authentication. :param username: username to authenticate as :type username: str :param password: password to authenticate with :type password: str :return: client that uses specified authentication :rtype: coreapi.Client ''' wsse_transport = WSSEAuthenticatedHTTPTransport( username, password, *args, **kwargs) return Client(transports=[wsse_transport])
def get_ville_similaire(self): if self.__saisi != '': try: client = Client() # self.__class__.decoders # connexion à l'API document = client.get(self.__class__.__api_de_base + 'communes?nom={nom}' '&fields=population,centre,departement' '&boost=population'.format( nom=self.__saisi)) # cette fonction est un générateur qui nous permet de limiter la recherche def limit_rech(limit=self.__class__.__limite): num = 0 for un in document: yield un num += 1 if num >= limit: break # création d'une liste des villes récupérées les_villes_bruites = [x for x in limit_rech()] # Création d'une nouvelle listes avec mes propes clée:id , value , population etc... # En gros je reformatte les villes envoyées par l'API mes_villes = [] for myid, value in enumerate(les_villes_bruites): mes_villes.append({ 'id': myid, 'value': value['nom'], 'population': value['population'], 'latitude': value['centre']['coordinates'][1], 'longitude': value['centre']['coordinates'][0] }) # cet objet a été crée lors du chargement de ce fichier # enregistrement du dictionnaire des villes je_garde.set_last_list(mes_villes) return json.dumps(mes_villes) except Exception as E: return '' else: return ''
def __init__(self, host, port, user, token, params_callback=None, message_callback=None): self.base_url = self.get_service_url(host, port) self.client = Client( decoders=[ codecs.CoreJSONCodec(), # application/vnd.coreapi+json codecs.JSONCodec() # application/json ], transports=[ transports.CommandHTTPSTransport( auth=auth.CommandClientTokenAuthentication(user=user, token=token, scheme='Token', domain='*'), params_callback=params_callback, message_callback=message_callback) # https only ]) self.schema = self.client.get(self.base_url)
def __init__(self, base_url) -> None: super().__init__() self.base_url = base_url self.client = Client(decoders=self.decoders)
document = Document(title='new', content={'new': 123}) elif link.action in ('put', 'post'): if params is None: params = {} document = Document(title='new', content={ 'new': 123, 'foo': params.get('foo') }) else: document = None return _handle_inplace_replacements(document, link, link_ancestors) client = Client(transports=[MockTransport()]) @pytest.fixture def doc(): return Document(title='original', content={ 'nested': Document( content={ 'follow': Link(url='mock://example.com', action='get'), 'action': Link(url='mock://example.com', action='post', transform='inplace',
from Bio import SeqIO import sys import os from collections import OrderedDict from requests.exceptions import ConnectionError try: inputfile = open(sys.argv[1], 'r') # to get the last argument on command line filename = sys.argv[-1] #outputfile name to store the results outputfilename = os.path.splitext(filename)[0] + '.txt' outputfilename1 = os.path.splitext(filename)[0] + 'finaltable' + '.txt' try: #Connecting to JASPAR client = Client() document = client.get('http://jaspar.genereg.net/api/v1/docs/') except ConnectionError as e: print e print "Database is not responding. Try again later. " print "Profile Inference Search Started....." for record in SeqIO.parse(inputfile, "fasta"): recordseq = record.seq action = ["infer", "read"] params = { "sequence": '%s' % recordseq, } result = client.action(document, action, params=params) data1 = record.id + "#" + str(result)
def test_keys_should_access_a_link(doc): client = Client() with pytest.raises(LinkLookupError): client.action(doc, 'dict')
def test_keys_should_be_valid_indexes(doc): client = Client() with pytest.raises(LinkLookupError): client.action(doc, 'dummy')
def test_keys_should_be_a_list_of_strings_or_ints(doc): client = Client() with pytest.raises(TypeError): client.action(doc, ['nested', {}])
def auth_token(self, app_key): self.client = Client(auth=auth.TokenAuthentication(token=app_key, scheme="Token"), decoders=self.decoders)
import requests from coreapi import Client from coreapi.auth import TokenAuthentication # """ http post http://localhost:9000/timeside/api-token-auth/ username=admin password=admin """ url = 'http://localhost:9000/timeside/api-token-auth/' auth={'username':'******', 'password':'******'} r = requests.post(url, data=auth) token=r.json()['token'] #coreapi client with the right token auth = TokenAuthentication( scheme='Token', token=token ) client = Client(auth=auth) #testing several request to the TimeSide core API schema = client.get('http://localhost:9000/timeside/api/schema/') keys = ['api', 'items', 'create'] params = {'title':'fooTest'} client.action(schema,keys,params) keys = ['api', 'items', 'list'] data = client.action(schema,keys) for item in data: print(item['title'] + ' ' + item['uuid'])
def test_keys_should_be_a_list_or_string(doc): client = Client() with pytest.raises(TypeError): client.action(doc, True)