def migrate_response_attachments_to_blobstorage(context): logger.info('Migrating response attachments to blob storage.') catalog = getToolByName(context, 'portal_catalog') already_migrated = 0 migrated = 0 for brain in catalog.unrestrictedSearchResults(portal_type='PoiIssue'): path = brain.getPath() try: issue = brain.getObject() except (AttributeError, ValueError, TypeError): logger.warn('Error getting object from catalog for path %s', path) continue folder = IResponseContainer(issue) for id, response in enumerate(folder): if response is None: # Has been removed. continue attachment = response.attachment if attachment is None: continue if isinstance(attachment, NamedBlobFile): # Already migrated logger.debug('Response %d already migrated, at %s.', id, path) already_migrated += 1 continue content_type = getattr(attachment, 'content_type', '') filename = getattr(attachment, 'filename', '') if not filename and hasattr(attachment, 'getId'): filename = attachment.getId() data = attachment.data # Data can be 'nested' in OFS.Image.Pdata. if base_hasattr(data, 'data'): data = data.data filename = safe_unicode(filename) try: blob = NamedBlobFile(data, contentType=content_type, filename=filename) except ConstraintNotSatisfied: # Found in live data: a filename that includes a newline... logger.info('Trying to normalize filename %s', filename) filename = normalize_filename(filename, context.REQUEST) logger.info('Normalize to %s', filename) blob = NamedBlobFile(data, contentType=content_type, filename=filename) response.attachment = blob logger.debug('Response %d migrated, at %s.', id, path) migrated += 1 logger.info( 'Migrated %d response attachments to blobs. ' '%d already migrated.', migrated, already_migrated)
def test_second_step_import_encoding_form(self): """Test importing csv data with special chars in header and content""" form = importform.ImportFormSecondStep(self.container, self.layer["request"]) annotations = IAnnotations(self.container) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["has_header"] = True annotation["separator"] = u";" csv = StringIO() lines = [ [u"猫".encode("utf8"), u"èè".encode("utf8"), u"ùù".encode("utf8")], ["", u"kéy1".encode("utf8"), u"Kèy 1".encode("utf8")], [ u"kéy1".encode("utf8"), u"kéy1.1".encode("utf8"), u"猫".encode("utf8") ], ] for line in lines: csv.write(";".join(line) + "\n") csv.seek(0) annotation["source"] = NamedBlobFile( data=csv.read(), contentType=u"text/csv", filename=u"test.csv", ) form.update() exception = None try: render = form.render() except UnicodeDecodeError as e: exception = e self.assertIsNone(exception) self.assertTrue(u"Column {0}".format(u"猫") in render)
def createFile(self, path, title=None): obj = self._createObject('File', path, title=title) named_file = NamedBlobFile(pdf_data, filename=u'test.pdf', contentType='application/pdf') obj.file = named_file obj.reindexObject()
def test_second_step_import_single_column(self): """Test importing csv data""" form = importform.ImportFormSecondStep(self.container, self.layer["request"]) annotations = IAnnotations(self.container) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["has_header"] = False annotation["separator"] = u";" csv = StringIO() lines = [ ["", "key1", "Key 1"], ["", "key2", "Key 2"], ] for line in lines: csv.write(";".join(line) + "\n") csv.seek(0) annotation["source"] = NamedBlobFile( data=csv.read(), contentType=u"text/csv", filename=u"test.csv", ) data = { "column_0": None, "column_1": "identifier", "column_2": None, "decimal_import": False, "allow_empty": False, } form._import(data) self.assertEqual(2, len(self.container)) self.assertEqual(["key1", "key2"], sorted( [e.identifier for e in self.container.values()])) self.assertEqual(["key1", "key2"], sorted([e.title for e in self.container.values()]))
def fetch_data(self, field_obj, field_value, _obj, row_buffer): field_type = field_obj[1].__class__.__name__ field_title = field_obj[1].title field_name = field_obj[0] if field_type in ('NamedBlobImage', 'NamedBlobFile'): request = urllib2.Request(url=field_value) try: response = urllib2.urlopen(request) filename = os.path.basename(response.url) if field_type == 'NamedBlobFile': blob_file = NamedBlobFile(data=response.read(), filename=safe_unicode(filename)) elif field_type == 'NamedBlobImage': blob_file = NamedBlobImage(data=response.read(), filename=safe_unicode(filename)) setattr(_obj, field_name, blob_file) return True except Exception: field_title = translate(_(field_title), context=self.request) error_doc = translate( _("""The URL you have provided could not be reached."""), context=self.request) msg = u'{0}: {1}; '.format(error_doc, field_title) row_buffer[2] += msg return False
def test_second_step_basic_delimiter(self): """Test edge case related to csv delimiter""" form = importform.ImportFormSecondStep(self.container, self.layer["request"]) annotations = IAnnotations(self.container) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["has_header"] = False annotation["separator"] = u"," csv = StringIO() lines = [ ["", "key1", "Key 1"], ["key1", "key1.1", '"Key 1,1"'], ["key1.1", "key1.1.1", '"Key 1.1.1"'], ] for line in lines: csv.write(",".join(line) + "\n") csv.seek(0) annotation["source"] = NamedBlobFile( data=csv.read(), contentType=u"text/csv", filename=u"test.csv", ) exception = None try: form.update() except Exception as e: exception = e self.assertIsNone(exception)
def test_second_step_optional_columns_data_ok(self): """Test validation of optional columns data""" request = self.layer["request"] request.form = { "form.buttons.import": u"Importer", "form.widgets.column_0": u"parent_identifier", "form.widgets.column_1": u"identifier", "form.widgets.column_2": u"title", "form.widgets.column_3": u"informations", "form.widgets.decimal_import": u"False", "form.widgets.allow_empty": u"False", } annotations = IAnnotations(self.container) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["has_header"] = False annotation["separator"] = u";" csv = StringIO() lines = [ ["", "key1", "Key 1", "infos"], ["key1", "key1.1", "Key 1.1", ""], ["key1.1", "key1.1.1", "Key 1.1.1", ""], ] for line in lines: csv.write(";".join(line) + "\n") csv.seek(0) annotation["source"] = NamedBlobFile( data=csv.read(), contentType=u"text/csv", filename=u"test.csv", ) form = importform.ImportFormSecondStep(self.container, request) form.updateFieldsFromSchemata() form.updateWidgets() data, errors = form.extractData() self.assertEqual(0, len(errors))
def test_second_step_basic_encoding(self): """Ensure that form can be displayed even with special characters""" form = importform.ImportFormSecondStep(self.container, self.layer["request"]) annotations = IAnnotations(self.container) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["has_header"] = False annotation["separator"] = u";" csv = StringIO() lines = [ ["null", "key1", "key1.1", "Key 1.1", "informations"], [ "null", "", u"key1 éà$€".encode("utf8"), u"Key 1 éà$€".encode("utf8"), u"informations éà$€".encode("utf8"), ], ] for line in lines: csv.write(";".join(line) + "\n") csv.seek(0) annotation["source"] = NamedBlobFile( data=csv.read(), contentType=u"text/csv", filename=u"test.csv", ) exception = None try: form.update() except UnicodeDecodeError as e: exception = e self.assertIsNone(exception)
def _add_dashboard_pod_template_export_users_groups(self): """Add the export users and groups DashboardPODTemplate in the contacts directory.""" logger.info("Adding 'Export users and groups' to 'contacts' directory...") pod_template_id = 'export-users-groups' contacts = self.portal.contacts if pod_template_id in contacts.objectIds(): self._already_migrated() return profile_path = self.ps._getImportContext(self.profile_name)._profile_path odt_path = profile_path + '/../examples_fr/templates/users-groups-export.ods' odt_file = open(odt_path, 'rb') odt_binary = odt_file.read() odt_file.close() data = {'title': 'Export utilisateurs et groupes', 'pod_formats': ['ods', 'xls'], 'dashboard_collections': contacts.get('orgs-searches').all_orgs.UID(), 'odt_file': NamedBlobFile( data=odt_binary, contentType='application/vnd.oasis.opendocument.text', filename=u'users-groups-export.ods'), 'use_objects': False, } pod_template = api.content.create( id=pod_template_id, type='DashboardPODTemplate', container=contacts, **data) pod_template.reindexObject() logger.info('Done.')
def test_second_step_required_columns_nok(self): """Test validation of required columns""" request = self.layer["request"] request.form = { "form.buttons.import": u"Importer", "form.widgets.column_0": u"--NOVALUE--", "form.widgets.column_1": u"--NOVALUE--", "form.widgets.column_2": u"--NOVALUE--", "form.widgets.decimal_import": u"False", "form.widgets.allow_empty": u"False", } annotations = IAnnotations(self.container) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["has_header"] = False annotation["separator"] = u";" annotation["source"] = NamedBlobFile( data=self._csv.read(), contentType=u"text/csv", filename=u"test.csv", ) form = importform.ImportFormSecondStep(self.container, request) form.updateFieldsFromSchemata() form.updateWidgets() data, errors = form.extractData() self.assertEqual(1, len(errors)) self.assertEqual( "The following required columns are missing: identifier", translate(errors[0].error.message), )
def _getBlob(_type='audio', _format='mp3'): if _type == 'audio': filename = u'test.%s' % _format else: filename = u'test.%s' % _format newpath = mktemp() origpath = os.path.join(test_file_dir, filename) copyfile(origpath, newpath) fi = open(newpath, 'rb') blob = NamedBlobFile(fi, filename=filename) fi.close() return blob
def setUp(self): self.portal = self.layer['portal'] self.request = self.layer['request'] setRoles(self.portal, TEST_USER_ID, TEST_USER_ROLES + ['Manager']) self.portal.invokeFactory( type_name=TEST_CONTENT_TYPE_ID, id='obj1', title=u'Object 1 Title', description=u'Description of obect number 1', text=u'Object 1 some footext.', file=NamedBlobFile(filename=u'object_1_file.txt', data='Object 1 Data'), ) self.obj1 = self.portal['obj1']
def __exit__(self, exc_type, exc_value, traceback): # push the result back into the site pod templates for filename in self.changed_files: with open(filename, "rb") as replaced_file: podtemplate = self.templates_by_filename[filename]["obj"] result = NamedBlobFile( data=replaced_file.read(), contentType=mimetypes.guess_type(filename)[0], filename=podtemplate.odt_file.filename, ) podtemplate.odt_file = result # clean tmp file for filename in self.templates_by_filename.keys(): if os.path.isfile(filename): os.remove(filename)
def test_query_filter_by_friendly_type(self): img_path = resource_filename('ploneintranet', 'userprofile/tests/test_avatar.jpg') with open(img_path, 'rb') as fp: img_data = fp.read() self.image1 = self._create_content( type='Image', container=self.layer['portal'], title=u'A Test image', description=u'Info about this image', file=NamedBlobFile( data=img_data, contentType='image/jpeg', filename=fp.name.decode('utf-8'), )) transaction.commit() util = self._make_utility() response = util.query(u'Test', filters={'friendly_type_name': ['Image']}) self.assertEqual(response.total_results, 1) result = next(iter(response)) self.assertEqual(result.title, self.image1.title) response = util.query(u'Test', filters={'friendly_type_name': [ 'Page', ]}) self.assertEqual(response.total_results, 3) self.assertEqual( set([x.friendly_type_name for x in response]), {'Page'}, ) response = util.query( u'Test', filters={'friendly_type_name': [ 'Image', 'Page', ]}) self.assertEqual(response.total_results, 4) self.assertEqual( set([x.friendly_type_name for x in response]), { 'Page', 'Image', }, )
def add_failing_template(self): current_path = os.path.dirname(__file__) failing_template_data = open( os.path.join(current_path, "failing_template.odt"), "r").read() self.failing_template = api.content.create( type="ConfigurablePODTemplate", id="failing_template", title=_(u"Failing template"), odt_file=NamedBlobFile( data=failing_template_data, contentType="application/vnd.oasis.opendocument.text", filename=u"modele_general.odt", ), pod_formats=["odt"], container=self.portal.podtemplates, exclude_from_nav=True, ) self.failing_template.reindexObject()
def setUp(self): super(TestFriendlyName, self).setUp() self.portal = api.portal.get() self.catalog = api.portal.get_tool(name='portal_catalog') self.doc1 = self._create_content( type='Document', title='Test Doc', container=self.portal, ) self.file1 = self._create_content(type='File', title='Test File', container=self.portal, file=NamedBlobFile( data='blah blah', filename=u'test-file.pdf', contentType='application/pdf', )) transaction.commit()
def pop_temporary_attachment(workspace, file_field, token): """ Replace a temporary attachment on the workspace with the uploaded data """ temp_attachments = IAttachmentStorage(workspace) temp_id = getUtility(IURLNormalizer).normalize(u'{0}-{1}'.format( token, safe_unicode(file_field.filename))) if temp_id in temp_attachments.keys(): temp_att = aq_base(temp_attachments.get(temp_id)) temp_att.id = file_field.filename temp_att.file = NamedBlobFile( data=file_field.read(), filename=file_field.filename.decode('utf-8'), ) temp_attachments.remove(temp_id) return temp_att return None
def test_second_step_import_encoding(self): """Test importing csv data with special chars in header and content""" form = importform.ImportFormSecondStep(self.container, self.layer["request"]) annotations = IAnnotations(self.container) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["has_header"] = True annotation["separator"] = u";" csv = StringIO() lines = [ [u"猫".encode("utf8"), u"èè".encode("utf8"), u"ùù".encode("utf8")], ["", u"kéy1".encode("utf8"), u"Kèy 1".encode("utf8")], [ u"kéy1".encode("utf8"), u"kéy1.1".encode("utf8"), u"猫".encode("utf8") ], ] for line in lines: csv.write(";".join(line) + "\n") csv.seek(0) annotation["source"] = NamedBlobFile( data=csv.read(), contentType=u"text/csv", filename=u"test.csv", ) data = { "column_0": "parent_identifier", "column_1": "identifier", "column_2": "title", "decimal_import": False, "allow_empty": False, } form._import(data) self.assertEqual(1, len(self.container)) self.assertEqual([u"kéy1"], [e.identifier for e in self.container.values()]) key1 = self.container.get_by("identifier", u"kéy1") self.assertEqual(1, len(key1)) self.assertEqual([u"kéy1.1"], [e.identifier for e in key1.values()]) key1_1 = key1.get_by("identifier", u"kéy1.1") self.assertEqual(u"猫", key1_1.title)
def _delete_attachments(self, attachments): if not attachments: return attachment_names = [ attachment.get('filename', '[no filename]').decode('utf-8') for attachment in attachments ] positions = [attachment['position'] for attachment in attachments] # Flag the `message` attribute as having changed desc = Attributes(IAttachmentsDeletedEvent, "message") notify(AttachmentsDeleted(self, attachment_names, desc)) # set the new message file msg = remove_attachments(self.msg, positions) self.message = NamedBlobFile(data=msg.as_string(), contentType=self.message.contentType, filename=self.message.filename)
def test_second_step_import_basic(self): """Test importing csv data""" form = importform.ImportFormSecondStep(self.container, self.layer["request"]) annotations = IAnnotations(self.container) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["has_header"] = False annotation["separator"] = u";" annotation["source"] = NamedBlobFile( data=self._csv.read(), contentType=u"text/csv", filename=u"test.csv", ) data = { "column_0": "parent_identifier", "column_1": "identifier", "column_2": "title", "decimal_import": False, "allow_empty": False, } form._import(data) self.assertEqual(2, len(self.container)) self.assertEqual(["key1", "key2"], sorted( [e.identifier for e in self.container.values()])) key1 = self.container.get_by("identifier", "key1") self.assertEqual(3, len(key1)) self.assertEqual( ["key1.1", "key1.2", "key1.3"], sorted([e.identifier for e in key1.values()]), ) key1_1 = key1.get_by("identifier", "key1.1") self.assertEqual(1, len(key1_1)) self.assertEqual(["key1.1.1"], sorted([e.identifier for e in key1_1.values()])) key2 = self.container.get_by("identifier", "key2") self.assertEqual(1, len(key2)) self.assertEqual(["key2.1"], sorted([e.identifier for e in key2.values()]))
def create_attachment(filename, data): """ Set up a contextless dexterity file to hold the attachment data """ if not isinstance(filename, unicode): filename = filename.decode('utf-8') namedfile = NamedBlobFile( data=data, filename=filename, ) if namedfile.contentType.startswith('image'): fti = queryUtility(IDexterityFTI, name='Image') thefile = createObject(fti.factory, id=filename, image=namedfile) else: fti = queryUtility(IDexterityFTI, name='File') thefile = createObject(fti.factory, id=filename, file=namedfile) # assign a uuid, needed for c.documentviewer generator = queryUtility(IUUIDGenerator) setattr(thefile, ATTRIBUTE_NAME, generator()) return thefile
def test_file_content_matches(self): path = resource_filename('ploneintranet.search.tests', 'fixtures/lorum-ipsum.pdf') with open(path, 'rb') as fp: data = fp.read() self._create_content(type='File', container=self.layer['portal'], title=u'Test File 1', description=(u'This is a test file. '), safe_id=False, file=NamedBlobFile( data=data, contentType='application/pdf', filename=fp.name.decode('utf-8'))) transaction.commit() util = self._make_utility() query = util.query response = query(u'Maecenas urna elit') results = list(response) self.assertEqual(len(results), 1) self.assertEqual(results[0].title, u'Test File 1')
def test_second_step_columns_data_format_nok(self): """Test validation of columns data format""" request = self.layer["request"] request.form = { "form.buttons.import": u"Importer", "form.widgets.column_0": u"identifier", "form.widgets.column_1": u"title", "form.widgets.decimal_import": u"selected", "form.widgets.allow_empty": u"False", } annotations = IAnnotations(self.container) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["has_header"] = False annotation["separator"] = u";" csv = StringIO() lines = [ ["-1", "key1"], [".10", "key2"], ["-1.1", "key3"], ["-1 11", "Key4"], ] for line in lines: csv.write(";".join(line) + "\n") csv.seek(0) annotation["source"] = NamedBlobFile( data=csv.read(), contentType=u"text/csv", filename=u"test.csv", ) form = importform.ImportFormSecondStep(self.container, request) form.updateFieldsFromSchemata() form.updateWidgets() data, errors = form.extractData() self.assertEqual(1, len(errors)) self.assertEqual( "Bad format values: Line 4, col 1: '-1 11'", translate(errors[0].error.message), )
def consumeMessage(message, event): """Consume message""" uuid = message.header_frame.correlation_id minutes = uuidToObject(uuid) site = getUtility(ISiteRoot) mtool = getToolByName(site, "portal_membership") creator = mtool.getMemberById(minutes.creators[0]) date = minutes.modified() kwargs = { "date": "%s.%s.%s" % (date.day(), date.month(), date.year()), "title": minutes.title, "author": creator.getProperty("fullname"), "email": creator.getProperty("email"), "body": minutes.body.output, "site": site.Title() } # Read receipt template from disk path = os.path.join(os.path.dirname(__file__), "sfs2487.pt") template = PageTemplateFile(path) # Render template into HTML html = template(**kwargs) # Create a PDF from HTML pdf = StringIO() pisa.CreatePDF(html, pdf) pdf.seek(0) # Save the PDF blob = NamedBlobFile(pdf.read(), filename=u"minutes.pdf") bound = IMinutes["deliverable"].bind(minutes) bound.set(minutes, blob) message.ack()
def test_first_step_set_data(self): form = importform.ImportFormFirstStep(self.container, self.layer["request"]) data = { "source": NamedBlobFile( data=self._csv.read(), contentType=u"text/csv", filename=u"test.csv", ), "separator": u";", "has_header": False, } form._set_data(data) annotations = IAnnotations(self.container) self.assertTrue(importform.ANNOTATION_KEY in annotations) annotation = annotations[importform.ANNOTATION_KEY] self.assertEqual(data["separator"], annotation["separator"]) self.assertEqual(data["source"], annotation["source"]) self.assertEqual(data["has_header"], annotation["has_header"])
def test_second_step_import_basic(self): """Test importing csv data""" form = importform.ImportFormSecondStep(self.folders, self.layer["request"]) form.data = [] annotations = IAnnotations(self.folders) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["separator"] = u";" annotation["has_header"] = False annotation["source"] = NamedBlobFile( data=self._csv.read(), contentType=u"text/csv", filename=u"test.csv", ) data = { "treating_groups": None, "column_1": "classification_categories", "column_3": "parent_identifier", "column_4": "internal_reference_no", "column_5": "title", } self.assertEqual(0, len(self.folders)) form.update() form._import(data)
def test_second_step_required_columns_data_ok(self): """Test validation of required columns data""" request = self.layer["request"] request.form = { "form.buttons.import": u"Importer", "form.widgets.column_0": u"parent_identifier", "form.widgets.column_1": u"identifier", "form.widgets.column_2": u"title", "form.widgets.decimal_import": u"False", } annotations = IAnnotations(self.container) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["has_header"] = False annotation["separator"] = u";" annotation["source"] = NamedBlobFile( data=self._csv.read(), contentType=u"text/csv", filename=u"test.csv", ) form = importform.ImportFormSecondStep(self.container, request) form.updateFieldsFromSchemata() form.updateWidgets() data, errors = form.extractData() self.assertEqual(0, len(errors))
def _convertFormat(context): # reset these... context.video_file_ogv = None context.video_file_webm = None video = context.video_file context.video_converted = True try: opened = openBlob(video._blob) bfilepath = opened.name opened.close() except IOError: logger.warn('error opening blob file') return tmpdir = mkdtemp() tmpfilepath = os.path.join(tmpdir, video.filename) copyfile(bfilepath, tmpfilepath) try: metadata = avprobe.info(tmpfilepath) except: logger.warn('not a valid video format') return context.metadata = metadata conversion_types = { 'mp4': 'video_file' } portal = getToolByName(context, 'portal_url').getPortalObject() settings = GlobalSettings(portal) for type_ in settings.additional_video_formats: format = getFormat(type_) if format: conversion_types[format.extension] = 'video_file_%s' % ( format.extension ) # sometimes force full video conversion force = settings.force for video_type, fieldname in conversion_types.items(): if video_type == video.contentType.split('/')[-1] and not force: setattr(context, fieldname, video) else: output_filepath = os.path.join(tmpdir, 'output.' + video_type) try: avconv.convert(tmpfilepath, output_filepath, video_type, context) except: logger.warn('error converting to %s' % video_type) continue if os.path.exists(output_filepath): fi = open(output_filepath) namedblob = NamedBlobFile( fi, filename=switchFileExt(video.filename, video_type)) setattr(context, fieldname, namedblob) fi.close() # try and grab one from video output_filepath = os.path.join(tmpdir, u'screengrab.png') try: avconv.grab_frame(tmpfilepath, output_filepath) if os.path.exists(output_filepath): with open(output_filepath, 'rb') as fi: data = fi.read() context.image = NamedBlobImage(data, filename=u'screengrab.png') fi.close() except: logger.warn('error getting thumbnail from video') rmtree(tmpdir)
def _convertFormat(context): # reset these... context.video_file_webm = None video = context.video_file context.video_converted = True try: opened = openBlob(video._blob) bfilepath = opened.name opened.close() except IOError: logger.warn('error opening blob file') return tmpdir = mkdtemp() tmpfilepath = os.path.join(tmpdir, video.filename) copyfile(bfilepath, tmpfilepath) try: metadata = avprobe.info(tmpfilepath) except: logger.warn('NOT a valid video format') return context.metadata = metadata logger.info('Valid video format') try: duration = _get_duration(tmpfilepath) except: logger.warn('cannot recover duration from file') return context.duration = duration conversion_types = {} portal = getToolByName(context, 'portal_url').getPortalObject() settings = GlobalSettings(portal) for type_ in settings.additional_video_formats: format = getFormat(type_) if format: conversion_types[format.type_] = '%s' % (format.quality) for video_type, quality in conversion_types.items(): vt = video_type.split('_')[0] if video_type == video.contentType.split('/')[-1]: setattr(context, vt, video) else: output_filepath = os.path.join(tmpdir, 'output_' + video_type + '.' + vt) try: avconv.convert(tmpfilepath, output_filepath, vt, quality, context) except: logger.warn('error converting to %s' % vt) continue if os.path.exists(output_filepath): fi = open(output_filepath) namedblob = NamedBlobFile(fi, filename=switchFileExt( video.filename, vt)) setattr(context, video_type, namedblob) fi.close() import transaction transaction.commit() # try and grab one from video output_filepath = os.path.join(tmpdir, u'screengrab.png') try: avconv.grab_frame(tmpfilepath, output_filepath) if os.path.exists(output_filepath): with open(output_filepath, 'rb') as fi: data = fi.read() context.image = NamedBlobImage(data, filename=u'screengrab.png') fi.close() except: logger.warn('error getting thumbnail from video') logger.warn('CONVERSIONS FINISHED') rmtree(tmpdir)
def test_second_step_import_decimal_basic(self): """Test importing csv data with decimal codes""" form = importform.ImportFormSecondStep(self.container, self.layer["request"]) annotations = IAnnotations(self.container) annotation = annotations[importform.ANNOTATION_KEY] = PersistentDict() annotation["has_header"] = False annotation["separator"] = u";" csv = StringIO() lines = [ ["100", "Key 1"], ["100.1", "Key 1.1"], ["100.2", "Key 1.2"], ["200", "Key 2"], ["200.1", "Key 2.1"], ["200.10", "Key 2.10"], ] for line in lines: csv.write(";".join(line) + "\n") csv.seek(0) annotation["source"] = NamedBlobFile( data=csv.read(), contentType=u"text/csv", filename=u"test.csv", ) data = { "column_0": "identifier", "column_1": "title", "decimal_import": True, "allow_empty": False, } form._import(data) self.assertEqual(2, len(self.container)) self.assertEqual(["1", "2"], sorted( [e.identifier for e in self.container.values()])) code_1 = self.container.get_by("identifier", "1") self.assertEqual("1", code_1.title) self.assertEqual(1, len(code_1)) self.assertEqual(["10"], [e.identifier for e in code_1.values()]) code_10 = code_1.get_by("identifier", "10") self.assertEqual("10", code_10.title) self.assertEqual(1, len(code_10)) self.assertEqual(["100"], [e.identifier for e in code_10.values()]) code_100 = code_10.get_by("identifier", "100") self.assertEqual("Key 1", code_100.title) self.assertEqual(2, len(code_100)) self.assertEqual( ["100.1", "100.2"], sorted([e.identifier for e in code_100.values()]), ) self.assertEqual( ["Key 1.1", "Key 1.2"], sorted([e.title for e in code_100.values()]), ) code_2 = self.container.get_by("identifier", "2") self.assertEqual("2", code_2.title) self.assertEqual(1, len(code_2)) self.assertEqual(["20"], [e.identifier for e in code_2.values()]) code_20 = code_2.get_by("identifier", "20") self.assertEqual("20", code_20.title) self.assertEqual(1, len(code_20)) self.assertEqual(["200"], [e.identifier for e in code_20.values()]) code_200 = code_20.get_by("identifier", "200") self.assertEqual("Key 2", code_200.title) self.assertEqual(1, len(code_200)) self.assertEqual(["200.1"], [e.identifier for e in code_200.values()]) code_2001 = code_200.get_by("identifier", "200.1") self.assertEqual("Key 2.1", code_2001.title) self.assertEqual(1, len(code_2001)) self.assertEqual(["200.10"], [e.identifier for e in code_2001.values()]) self.assertEqual(["Key 2.10"], [e.title for e in code_2001.values()])