def upload_annotated_file(request): handle_uploaded_file(request.FILES['file_to_analyze']) filename = str(request.FILES['file_to_analyze']) normalized = check_if_normalized( open(upload_location + filename).readlines()) try: # File that are already annotated are assumed to be eligible t = import_textfile(upload_location + filename, True, normalized) if type(t) == str: return JsonResponse({'success': 0, 'error_meta': t}) md5 = get_md5(t) try: existing = UploadedFile.objects.get(pk=md5) t.normalized = existing.normalized except: pass finally: os.remove(upload_location + filename) if not t: return JsonResponse({'success': 0}) request = set_session(request, t) return JsonResponse({'success': 1})
def save_plugin_data(self, request=None): """ Saving the plugin data and moving the file. """ image = self.cleaned_data.get('image', None) if image: saved_image = handle_uploaded_file(image) self.cleaned_data['image'] = saved_image
def handle(self, username): if self.is_valid(): info = self.cleaned_data # existed asset if 'asset' in info: if info['asset'].uid.user.username == username: # Store temporary file handle_uploaded_file(info['file'], info['asset'], is_final=False) return {'non_existed': False, 'aid': info['asset'].aid, 'owner': info['asset'].uid.user.username, 'new_mime_type': info['new_mime_type'], 'new_nice_type': info['new_nice_type'], 'is_existed_owner': info['asset'].uid.user.username == username, 'new_keywords': info['tags']} # success adding new asset asset = Asset() asset.populate(username, info) handle_uploaded_file(info['file'], asset, is_final=True) return {'non_existed': True, 'asset': asset, 'aid': asset.aid}
def annotate_uploaded_file(request): tmp_dir = tempfile.mkdtemp() + "/" # Used for the pipeline normalized = False use_paste = False pasted_text = None for key in request.POST: if key == 'use_paste': if checkbox_to_bool(request.POST[key]): use_paste = True elif key == 'pasted_text': pasted_text = request.POST[key] elif key == 'checkNormalization': normalized = checkbox_to_bool(request.POST[key]) if use_paste: tf = tempfile.NamedTemporaryFile(delete=False) with codecs.open(tf.name, 'wb', encoding='utf-8') as f: f.write(pasted_text) file_path = tf.name filename = 'paste ' + datetime.now().strftime("%H:%M:%S") + '.txt' shutil.move(tf.name, upload_location + filename) options = get_optparse(request, upload_location + filename, tmp_dir, custom_filename=filename) else: handle_uploaded_file(request.FILES['file_to_annotate']) filename = str(request.FILES['file_to_annotate']) original_filename = filename if settings.PRODUCTION and os.path.splitext(filename)[1] != ".txt": try: subprocess.call( ['unoconv', '--format=txt', upload_location + filename]) stdout_file = open( upload_location + os.path.splitext(filename)[0] + ".txt2", "w") subprocess.call([ 'iconv', '-f', 'iso-8859-1', '-t', 'utf-8', upload_location + os.path.splitext(filename)[0] + ".txt" ], stdout=stdout_file) shutil.move( upload_location + os.path.splitext(filename)[0] + ".txt2", upload_location + os.path.splitext(filename)[0] + ".txt") filename = os.path.splitext(filename)[0] + ".txt" finally: os.remove(upload_location + original_filename) options = get_optparse(request, upload_location + filename, tmp_dir) # If the user has removed some column, the text can't be used for analysis text_eligible = False if len(options.columns.split(",")) == 13: text_eligible = True try: if request.session['language'] == 'en': annotated_file_path = pipeline_en.run(options) else: annotated_file_path = pipeline.run(options) finally: shutil.rmtree(tmp_dir) @timing def import_t(request, annotated_file_path, text_eligible, normalized): return import_textfile(request, annotated_file_path, text_eligible, normalized) # The second arg here is whether to use metadata or not try: t = import_t(request, annotated_file_path, text_eligible, normalized) finally: try: os.remove(annotated_file_path) except: pass if not t: return JsonResponse({'success': 0}) if type(t) == str: return JsonResponse({'success': 0, 'error_meta': t}) request = set_session(request, t) return JsonResponse({'success': 1})