def data(request, id): batch = None data = None try: batch = ApiBatch.objects.get(user=request.user, id=int(id)) if batch.success: try: data = ApiBatchData.objects.get(batch=batch) except: pass if not data: client = Client(CAPTRICITY_API_TOKEN) BATCH_NAME = batch.name related_job_id = ast.literal_eval( batch.submit)['related_job_id'] csv_out = client.read_job_results_csv(related_job_id) data = ApiBatchData.objects.create(batch=batch, text=csv_out) data.save() else: raise Exception("Batch was not successfull") except: batch.status = "Error while extracting data" batch.save() messages.error(request, "There was an error in the batch") return HttpResponseRedirect("/home/") data = data.text.splitlines() for n, d in enumerate(data): data[n] = d.split(",") messages.success(request, "The data was extracted successfully") return render(request, "api/data.html", {"data": data, "batch": batch})
def api_proxy(request): client = Client(settings.CAPTRICITY_APPLICATION_TOKEN) req = json.loads(request.body) method = getattr(client, req['method'], None) resp = method(*req.get('args', [])) return HttpResponse(json.dumps(resp), content_type='application/json')
def create_fancy_csv_from_job(job_id, name): # Initialize Captricity Python Client (installation instructions in README # at https://github.com/Captricity/captools) start = time.time() client = Client(api_token=CAP_API_TOKEN) # Read all the Instance Sets associated with this job isets = client.read_instance_sets(job_id) # For each Instance Set, we will pull down all the Shreds and record the # transcribed value and generate a link to the Shred image. all_iset_data = [] fields = {} fields['0'] = 'Form Name' fields['0.5'] = 'Form Name Image Link' for iset in isets: shreds = client.read_instance_set_shreds(iset['id']) iset_data = {} iset_data['0'] = iset['name'] for shred in shreds: if '0.5' not in iset_data: iset_data['0.5'] = 'https://shreddr.captricity.com/api/v1/instance/%s/aligned-image' % shred['instance_id'] # Key on Field id because Field name can be duplicated field_id = shred['field']['id'] iset_data[str(field_id)] = shred['best_estimate'].encode('utf-8') if shred['best_estimate'] else None iset_data[str(field_id + 0.5)] = 'https://shreddr.captricity.com/api/v1/shred/%s/image' % shred['id'] # We'll order export by Field ID, links will be (field_id + 0.5) so they will be next to the Field in CSV fields[str(field_id)] = shred['field']['name'] fields[str(field_id + 0.5)] = shred['field']['name'] + ' Image Link' all_iset_data.append(iset_data) if len(all_iset_data) % 5 == 0: print 'Done with %s Instance Sets from Job %s in %s sec, %s to go' % (len(all_iset_data), job_id, time.time() - start, len(isets) - len(all_iset_data)) # Export all the data as CSV data_out = [fields] + all_iset_data header = sorted(fields.keys()) if job_id in [3968, 4606]: # No depivot for cover page or addenda buffer = open('%s.csv' % name, 'w') else: buffer = StringIO() csv_writer = csv.DictWriter(buffer, header, restval=u'--no data--') csv_writer.writerows(data_out) if job_id in [3968, 4606]: buffer.close() else: buffer.seek(0) depivot_data(csv.reader(buffer), '%s.csv' % name)
def addall(request): try: images = HomeImages.objects.filter(user=request.user, is_active=True) except: messages.error(request, "Error. Did not find the image object") return HttpResponseRedirect("/home/") client = Client(CAPTRICITY_API_TOKEN_SUBMIT) BATCH_NAME = get_random_batch_name(STORED_BATCH_NAME) batch_id = create_or_get_batch(client, BATCH_NAME) documents = client.read_documents() document_id = filter(lambda x: x['name'] == CAPTRICITY_TEMPLATE_NAME, documents).pop()['id'] client.update_batch(batch_id, { 'documents': document_id, 'name': BATCH_NAME }) batchObject = ApiBatch.objects.create(name=BATCH_NAME, user=request.user) batchObject.save() for image in images: f = open(image.image.url, 'rb') try: batch_file = client.create_batch_files(batch_id, {'uploaded_file': f}) imageObject = ApiBatchImage.objects.create(batch=batchObject, image=image) imageObject.save() except: pass try: batchObject.status = status(client, batch_id) batchObject.submit = submit(client, batch_id) batchObject.success = True messages.success( request, "All images were successfully added to batch " + batchObject.name + " total images " + str(images.count())) except Exception, e: batchObject.status = str(e) batchObject.submit = "" batchObject.success = False messages.success( request, "There was an Error in the batch " + batchObject.name + " " + str(e))
def uploadAllImages(): from captools.api import Client from home.models import HomeImages from django.contrib.auth.models import User from Captricity.settings import CAPTRICITY_API_TOKEN, BATCH_NAME as STORED_BATCH_NAME, CAPTRICITY_TEMPLATE_NAME from api.views import get_random_batch_name,create_or_get_batch, status, submit from api.models import ApiBatch, ApiBatchImage users = User.objects.all() for user in users: images= HomeImages.objects.filter(user=user,is_active = True) client = Client(CAPTRICITY_API_TOKEN) BATCH_NAME = get_random_batch_name(STORED_BATCH_NAME) batch_id = create_or_get_batch(client,BATCH_NAME) documents = client.read_documents() document_id = filter(lambda x: x['name'] == CAPTRICITY_TEMPLATE_NAME, documents).pop()['id'] client.update_batch(batch_id, { 'documents': document_id, 'name': BATCH_NAME }) batchObject = ApiBatch.objects.create(name=BATCH_NAME,user = user) batchObject.save() for image in images: f = open(PROJECT_PATH+"/../"+image.image.url, 'rb') try: batch_file = client.create_batch_files(batch_id, {'uploaded_file': f}) imageObject = ApiBatchImage.objects.create(batch = batchObject, image = image) imageObject.save() except: pass try: batchObject.status = status(client,batch_id) batchObject.submit = submit(client,batch_id) batchObject.success = True print "Image saved" except Exception, e: batchObject.status = str(e) batchObject.submit = "" batchObject.success = False print "Big werror" batchObject.save()
def mailUser(): from api.models import ApiBatch, ApiBatchData from captools.api import Client from Captricity.settings import CAPTRICITY_API_TOKEN from django.core.mail import send_mail import ast batches = ApiBatch.objects.filter(success = True) for batch in batches: data = ApiBatchData.objects.filter(batch = batch) if data.count() == 0: client = Client(CAPTRICITY_API_TOKEN) related_job_id = ast.literal_eval(batch.submit)['related_job_id'] csv_out = client.read_job_results_csv(related_job_id) data = ApiBatchData.objects.create(batch=batch,text = csv_out) data.save() send_mail('Captricity data completed.', 'Your data is completed. The link for the data is http://localhost:8000/api/data/'+str(batch.id)+"/", '*****@*****.**', [batch.user.email], fail_silently=False) else: pass
import sys from captools.api import Client FORM_PAGE_0 = 'assets/example_page1.png' FORM_PAGE_1 = 'assets/example_page2.png' if __name__ == '__main__': if len(sys.argv) < 2: print 'You must pass in a job id' sys.exit(0) from api_token import API_TOKEN client = Client(API_TOKEN) job_id = sys.argv[1] if client.read_document(client.read_job(job_id)['document_id'] )['name'] != 'Example School Survey Template': print 'You must choose a job that is using the example school survey template' sys.exit(0) iset = client.create_instance_sets(job_id, {'name': 'New Iset'}) instances = client.read_instance_set_instances(iset['id']) assert len(instances) == 0 instance1 = client.create_instance_set_instances( iset['id'], { 'page_number': '0', 'image_file': open(FORM_PAGE_0, 'rb') }) instance2 = client.create_instance_set_instances(