def file(request, id): if request.method == 'POST': form = UploadForm(request.POST) if form.is_valid(): read_obj = Read.objects.get(pk=id) user = User.objects.get(username__exact=request.user) if request.POST.get("new_silo", None): silo = Silo(name=request.POST['new_silo'], owner=user, public=False, create_date=timezone.now()) silo.save() else: silo = Silo.objects.get(id = request.POST["silo_id"]) # try: silo.reads.add(read_obj) silo_id = silo.id data = json.load(read_obj.file_data) saveDataToSilo(silo, data, read_obj) return HttpResponseRedirect('/silo_detail/%s/' % silo_id) # except Exception as e: # messages.error(request, "Your JSON file was formatted incorrectly") # return HttpResponseRedirect('/fileuploadjson') else: messages.error(request, "There was a problem with reading the contents of your file" + form.errors) user = User.objects.get(username__exact=request.user) # get all of the silo info to pass to the form get_silo = Silo.objects.filter(owner=user) # display the form for user to choose a table or ener a new table name to import data into return render(request, 'read/file.html', { 'read_id': id, 'form_action': reverse_lazy("fileupload", kwargs={"id": id}), 'get_silo': get_silo, })
def handle(self, *args, **options): silo = None read = None silo_id = options['silo_id'] username = options['username'] user = User.objects.get(username__exact=username) reads = Read.objects.filter(owner=user) try: silo = Silo.objects.get(pk=silo_id) except Silo.DoesNotExist: raise CommandError('Silo "%s" does not exist' % silo_id) for read_id in options['read_ids']: try: read = reads.filter(pk=read_id)[0] except Read.DoesNotExist: raise CommandError('Read "%s" does not exist for user, %s' % (read_id, user.username)) # Fetch the data from ONA ona_token = ThirdPartyTokens.objects.get(user=user.pk, name="ONA") response = requests.get(read.read_url, headers={'Authorization': 'Token %s' % ona_token.token}) data = json.loads(response.content) saveDataToSilo(silo, data, read, user) self.stdout.write('Successfully fetched the READ_ID, "%s", from database' % read_id)
def storeCommCareData(conf, data): data_refined = [] try: fieldToType = getColToTypeDict(Silo.objects.get(pk=conf['silo_id'])) except Silo.DoesNotExist: fieldToType = {} for row in data: for column in row: if fieldToType.get(column, 'string') == 'int': try: row[column] = int(row[column]) except ValueError: # skip this one # add message that this is skipped continue if fieldToType.get(column, 'string') == 'double': try: row[column] = float(row[column]) except ValueError: # skip this one # add message that this is skipped continue row[cleanKey(column)] = row.pop(column) data_refined.append(row) client = MongoClient(settings.MONGODB_URI) db = client.get_database(settings.TOLATABLES_MONGODB_NAME) if conf['download_type'] == 'commcare_form': for row in data_refined: row['edit_date'] = timezone.now() row['silo_id'] = conf['silo_id'] row['read_id'] = conf['read_id'] db.label_value_store.insert_many(data_refined) else: if conf['update']: if conf['download_type'] == 'case': for row in data_refined: row['edit_date'] = timezone.now() db.label_value_store.update( { 'silo_id': conf['silo_id'], 'case_id': row['case_id'] }, {"$set": row}, upsert=True) elif conf['download_type'] == 'commcare_report': silo = Silo.objects.get(pk=conf['silo_id']) read = Read.objects.get(pk=conf['read_id']) db.label_value_store.delete_many({'silo_id': conf['silo_id']}) saveDataToSilo(silo, data_refined, read) else: for row in data_refined: row["create_date"] = timezone.now() row["silo_id"] = conf['silo_id'] row["read_id"] = conf['read_id'] db.label_value_store.insert(data_refined)
def test_export_csv(self): self.tola_user.user.is_staff = True self.tola_user.user.is_superuser = True self.tola_user.user.save() # Create the Silo to store the data wflvl1 = factories.WorkflowLevel1( organization=self.tola_user.organization) fields = [ { 'name': 'color', 'type': 'text' }, { 'name': 'type', 'type': 'text' } ] meta = { 'name': 'Export Test', 'description': 'This is a test.', 'fields': json.dumps(fields), 'level1_uuid': wflvl1.level1_uuid, 'tola_user_uuid': self.tola_user.tola_user_uuid } request = self.factory.post('', data=meta) request.user = self.tola_user.user view = CustomFormViewSet.as_view({'post': 'create'}) response = view(request) self.assertEqual(response.status_code, 201) # For the tearDown silo_id = response.data['id'] silo = Silo.objects.get(id=silo_id) read = silo.reads.all()[0] # Upload data data = [{ 'color': 'black', 'type': 'primary' }, { 'color': 'white', 'type': 'primary' }, { 'color': 'red', 'type': 'primary' }] util.saveDataToSilo(silo, data, read) # Export to CSV request = self.factory.get('') request.user = self.tola_user.user response = views.export_silo(request, silo_id) self.assertEqual(response.status_code, 200) self.assertIn('color,type', response.content) self.assertIn('black,primary', response.content)
def handle(self, *args, **options): frequency = options['frequency'] if frequency != "daily" and frequency != "weekly": return self.stdout.write( "Frequency argument can either be 'daily' or 'weekly'") silos = Silo.objects.filter( unique_fields__isnull=False, reads__autopull_frequency__isnull=False, reads__autopull_frequency=frequency).distinct() read_type = ReadType.objects.get(read_type="ONA") for silo in silos: reads = silo.reads.filter(type=read_type.pk) for read in reads: try: ona_token = ThirdPartyTokens.objects.get( user=silo.owner.pk, name="ONA") except MultipleObjectsReturned as e: self.logger.error( "get_all_ona_forms token error: silo_id=%s, read_id=%s" % (silo.pk, read.pk)) self.logger.error(e) continue try: response = requests.get(read.read_url, headers={ 'Authorization': 'Token %s' % ona_token.token }, timeout=10) except Timeout: self.logger.error( "get_all_ona_forms timeout error: silo_id=%s, read_id=%s" % (silo.pk, read.pk)) continue data = json.loads(response.content) try: saveDataToSilo(silo, data, read, silo.owner.pk) self.stdout.write( 'Successfully fetched the READ_ID, "%s", from ONA' % read.pk) except TypeError as e: self.logger.error( "get_all_ona_forms type error: silo_id=%s, read_id=%s" % (silo.pk, read.pk)) self.logger.error(e) except UnicodeEncodeError as e: self.logger.error( "get_all_ona_forms unicode error: silo_id=%s, read_id=%s" % (silo.pk, read.pk)) self.logger.error(e)
def handle(self, *args, **options): frequency = options['frequency'] if frequency != "daily" and frequency != "weekly": return self.stdout.write("Frequency argument can either be 'daily' or 'weekly'") silos = Silo.objects.filter(unique_fields__isnull=False, reads__autopull_frequency__isnull=False, reads__autopull_frequency = frequency).distinct() read_type = ReadType.objects.get(read_type="ONA") for silo in silos: reads = silo.reads.filter(type=read_type.pk) for read in reads: ona_token = ThirdPartyTokens.objects.get(user=silo.owner.pk, name="ONA") response = requests.get(read.read_url, headers={'Authorization': 'Token %s' % ona_token.token}) data = json.loads(response.content) saveDataToSilo(silo, data) self.stdout.write('Successfully fetched the READ_ID, "%s", from ONA' % read.pk)
def process_silo(self, silo_id, read_id): silo = Silo.objects.get(id=silo_id) read_obj = Read.objects.get(pk=read_id) ctype = ContentType.objects.get_for_model(Read) task = CeleryTask.objects.get(content_type=ctype, object_id=read_obj.id) task.task_status = CeleryTask.TASK_IN_PROGRESS task.save() try: reader = CustomDictReader(read_obj.file_data) saveDataToSilo(silo, reader, read_obj) task.task_status = CeleryTask.TASK_FINISHED except TypeError, e: logger.error(e) task.task_status = CeleryTask.TASK_FAILED
def save_data(self, request): """ Persist user input data """ if not request.data: return Response({'detail': 'No data sent.'}, status=status.HTTP_400_BAD_REQUEST) if 'silo_id' in request.data and 'data' in request.data: silo_id = request.data['silo_id'] data = request.data['data'] else: return Response({'detail': 'Missing data.'}, status=status.HTTP_400_BAD_REQUEST) silo = Silo.objects.get(pk=silo_id) lvs = LabelValueStore.objects(silo_id=silo_id).count() if not lvs or not silo: return Response({'detail': 'Not found.'}, status=status.HTTP_404_NOT_FOUND) saveDataToSilo(silo, [data], silo.reads.first()) return Response(status=status.HTTP_200_OK)
def save_data(self, request): """ Persist user input data """ if not request.data: return Response({'detail': 'No data sent.'}, status=status.HTTP_400_BAD_REQUEST) if 'silo_id' in request.data and 'data' in request.data: silo_id = request.data['silo_id'] data = request.data['data'] else: return Response({'detail': 'Missing data.'}, status=status.HTTP_400_BAD_REQUEST) try: silo = Silo.objects.get(pk=silo_id) except Silo.DoesNotExist: return Response({'detail': 'Not found.'}, status=status.HTTP_404_NOT_FOUND) else: saveDataToSilo(silo, [data], silo.reads.first()) return Response({'detail': 'It was successfully saved.'}, status=status.HTTP_200_OK)
def handle(self, *args, **options): frequency = options['frequency'] if frequency != "daily" and frequency != "weekly": return self.stdout.write( "Frequency argument can either be 'daily' or 'weekly'") silos = Silo.objects.filter( unique_fields__isnull=False, reads__autopull_frequency__isnull=False, reads__autopull_frequency=frequency).distinct() read_type = ReadType.objects.get(read_type="ONA") for silo in silos: reads = silo.reads.filter(type=read_type.pk) for read in reads: ona_token = ThirdPartyTokens.objects.get(user=silo.owner.pk, name="ONA") response = requests.get( read.read_url, headers={'Authorization': 'Token %s' % ona_token.token}) data = json.loads(response.content) saveDataToSilo(silo, data, read, silo.owner.pk) self.stdout.write( 'Successfully fetched the READ_ID, "%s", from ONA' % read.pk)
def importDataFromReads(request, silo, reads): for read in reads: if read.type.read_type == "ONA": ona_token = ThirdPartyTokens.objects.get(user=silo.owner.pk, name="ONA") response = requests.get(read.read_url, headers={'Authorization': 'Token %s' % ona_token.token}) data = json.loads(response.content) res = saveDataToSilo(silo, data) elif read.type.read_type == "CSV": #messages.info(request, "When updating data in a table, its CSV source is ignored.") return (messages.INFO, "When updating data in a table, its CSV source is ignored.") elif read.type.read_type == "JSON": result = importJSON(read, request.user, None, None, silo.pk, None) #messages.add_message(request, result[0], result[1]) return (result[0], result[1]) elif read.type.read_type == "GSheet Import": msgs = import_from_gsheet_helper(request.user, silo.id, None, read.resource_id) return msgs
def uploadFile(request, id): """ Upload CSV file and save its data """ if request.method == 'POST': form = UploadForm(request.POST) if form.is_valid(): read_obj = Read.objects.get(pk=id) today = datetime.date.today() today.strftime('%Y-%m-%d') today = str(today) silo = None user = User.objects.get(username__exact=request.user) if request.POST.get("new_silo", None): silo = Silo(name=request.POST['new_silo'], owner=user, public=False, create_date=today) silo.save() else: silo = Silo.objects.get(id = request.POST["silo_id"]) silo.reads.add(read_obj) silo_id = silo.id #create object from JSON String #data = csv.reader(read_obj.file_data) #reader = csv.DictReader(read_obj.file_data) reader = CustomDictReader(read_obj.file_data) res = saveDataToSilo(silo, reader) return HttpResponseRedirect('/silo_detail/' + str(silo_id) + '/') else: messages.error(request, "There was a problem with reading the contents of your file" + form.errors) #print form.errors user = User.objects.get(username__exact=request.user) # get all of the silo info to pass to the form get_silo = Silo.objects.filter(owner=user) # display the form for user to choose a table or ener a new table name to import data into return render(request, 'read/file.html', { 'read_id': id, 'form_action': reverse_lazy("uploadFile", kwargs={"id": id}), 'get_silo': get_silo, })
def saveAndImportRead(request): """ Saves ONA read if not already in the db and then imports its data """ if request.method != 'POST': return HttpResponseBadRequest("HTTP method, %s, is not supported" % request.method) read_type = ReadType.objects.get(read_type="ONA") name = request.POST.get('read_name', None) url = request.POST.get('read_url', None) owner = request.user description = request.POST.get('description', None) silo_id = None read = None silo = None provider = "ONA" # Fetch the data from ONA ona_token = ThirdPartyTokens.objects.get(user=request.user, name=provider) response = requests.get(url, headers={'Authorization': 'Token %s' % ona_token.token}) data = json.loads(response.content) if len(data) == 0: return HttpResponse("There is not data for the selected form, %s" % name) try: silo_id = int(request.POST.get("silo_id", None)) if silo_id == 0: silo_id = None except Exception as e: return HttpResponse("Silo ID can only be an integer") try: read, read_created = Read.objects.get_or_create(read_name=name, owner=owner, defaults={'read_url': url, 'type': read_type, 'description': description}) if read_created: read.save() except Exception as e: return HttpResponse("Invalid name and/or URL") existing_silo_cols = [] new_cols = [] show_mapping = False silo, silo_created = Silo.objects.get_or_create(id=silo_id, defaults={"name": name, "public": False, "owner": owner}) if silo_created or read_created: silo.reads.add(read) elif read not in silo.reads.all(): silo.reads.add(read) """ # # THIS WILL BE ADDED LATER ONCE THE saveDataToSilo REFACTORING IS COMPLETE! # # Get all of the unique cols for this silo into an array lvs = json.loads(LabelValueStore.objects(silo_id=silo.id).to_json()) for l in lvs: existing_silo_cols.extend(c for c in l.keys() if c not in existing_silo_cols) # Get all of the unique cols of the fetched data in a separate array for row in data: new_cols.extend(c for c in row.keys() if c not in new_cols) # Loop through the unique cols of fetched data; if there are cols that do # no exist in the existing silo, then show mapping. for c in new_cols: if c == "silo_id" or c == "create_date" or c == "edit_date" or c == "id": continue if c not in existing_silo_cols: show_mapping = True if show_mapping == True: # store the newly fetched data into a temp table and then show mapping params = {'getSourceFrom':existing_silo_cols, 'getSourceTo':new_cols, 'from_silo_id':0, 'to_silo_id':silo.id} response = render_to_response("display/merge-column-form-inner.html", params, context_instance=RequestContext(request)) response['show_mapping'] = '1' return response """ # import data into this silo res = saveDataToSilo(silo, data) return HttpResponse("View table data at <a href='/silo_detail/%s' target='_blank'>See your data</a>" % silo.pk)