def _upload_request(self, req, handler): self.log.debug('Handling file upload for "%s"', req.authname) # Retrieve uploaded file upload_file = req.args['bsop_upload_file'] # Retrieve filename, normalize, use to check a file was uploaded # Filename checks adapted from trac/attachment.py filename = getattr(upload_file, 'filename', '') filename = unicodedata.normalize('NFC', unicode(filename, 'utf-8')) filename = filename.replace('\\', '/').replace(':', '/') filename = posixpath.basename(filename) if not filename: raise TracError('No file uploaded') # Check size of uploaded file, accepting 0 to max_upload_size bytes file_data = upload_file.value # Alternatively .file for file object file_size = len(file_data) if self.max_upload_size > 0 and file_size > self.max_upload_size: raise TracError('Uploaded file is too large, ' 'maximum upload size: %s' % pretty_size(self.max_upload_size)) self.log.debug('Received file %s with %i bytes', filename, file_size) commit_msg = req.args.get('bsop_upload_commit') self.log.debug('Opening repository for file upload') reponame, repos, path = _get_repository(self.env, req) try: repos_path = repos.normalize_path('/'.join([path, filename])) self.log.debug('Writing file %s to %s in %s', filename, repos_path, reponame) svn_writer = SubversionWriter(self.env, repos, req.authname) rev = svn_writer.put_content(repos_path, file_data, commit_msg) add_notice( req, _("Uploaded %s, creating revision %s.") % (filename, rev)) except Exception, e: self.log.exception("Failed when uploading file %s" % filename) add_warning(req, _("Failed to upload file: %s") % e)
def _upload_request(self, req, handler): self.log.debug('Handling file upload for "%s"', req.authname) # Retrieve uploaded file upload_file = req.args['bsop_upload_file'] # Retrieve filename, normalize, use to check a file was uploaded # Filename checks adapted from trac/attachment.py filename = getattr(upload_file, 'filename', '') filename = unicodedata.normalize('NFC', unicode(filename, 'utf-8')) filename = filename.replace('\\', '/').replace(':', '/') filename = posixpath.basename(filename) if not filename: raise TracError('No file uploaded') # Check size of uploaded file, accepting 0 to max_upload_size bytes file_data = upload_file.value # Alternatively .file for file object file_size = len(file_data) if self.max_upload_size > 0 and file_size > self.max_upload_size: raise TracError('Uploaded file is too large, ' 'maximum upload size: %s' % pretty_size(self.max_upload_size)) self.log.debug('Received file %s with %i bytes', filename, file_size) commit_msg = req.args.get('bsop_upload_commit') self.log.debug('Opening repository for file upload') reponame, repos, path = _get_repository(self.env, req) try: repos_path = repos.normalize_path('/'.join([path, filename])) self.log.debug('Writing file %s to %s in %s', filename, repos_path, reponame) svn_writer = SubversionWriter(self.env, repos, req.authname) rev = svn_writer.put_content(repos_path, file_data, commit_msg) add_notice(req, _("Uploaded %s, creating revision %s.") % (filename, rev)) except Exception, e: self.log.exception("Failed when uploading file %s" % filename) add_warning(req, _("Failed to upload file: %s") % e)
def test_add_changeset(self): sw = SubversionWriter(self.env, self.repos, 'kalle') new_rev = sw.put_content('/trunk/foo.txt', content='Foo Bar', commit_msg='A comment') RepositoryManager(self.env).notify('changeset_added', '', [new_rev]) # Node so = self._get_so() self.assertEquals('trac:source:trunk/foo.txt', so.doc_id) self.assertEquals('source', so.realm) self.assertEquals('trunk/foo.txt', so.id) self.assertEquals('trunk/foo.txt', so.title) self.assertEquals('kalle', so.author) self.assertEquals('Foo Bar', so.body.read()) self.assertTrue('A comment' in so.comments) # Changeset so = self._get_so(-2) self.assertEquals('trac:changeset:%i' % new_rev, so.doc_id) self.assertEquals('changeset', so.realm) self.assertEquals('%i' % new_rev, so.id) self.assertTrue(so.title.startswith('[%i]: A comment' % new_rev)) self.assertEquals('kalle', so.author) self.assertEquals('A comment', so.body)
def _do_execute_transformation(self, transformation, transformation_id=None, store=True, return_bytes_handle=False, changecwd=False, listall=False, parameters=None): tempdir = tempfile.mkdtemp() if changecwd: os.chdir(tempdir) os.mkdir(os.path.join(tempdir, "svn")) write_simple_jndi_properties(self.env, tempdir) # execute transform transform = self._list_transformation_files(listall)[transformation] if parameters: for parameter in parameters: if parameter not in transform['parameters']: raise KeyError("%s is not valid parameter" % parameter) else: parameters = {} parameters['DefineInternal.Project.ShortName'] = os.path.split(self.env.path)[1] scriptfilename = {'transformation': 'pan.sh', 'job': 'kitchen.sh'}[transform['type']] executable = os.path.join(resource_filename(__name__, 'pentaho-data-integration'), scriptfilename) args = [ "/bin/sh", executable, "-file", transform['full_path'], "-level", "Detailed", ] for k, v in parameters.items(): if "=" in k: raise ValueError("Unable to support = symbol in parameter key named %s" % k) args.append("-param:%s=%s" % (k.encode('utf-8'), v.encode('utf-8'))) self.log.debug("Running %s with %s", executable, args) if transformation_id: # See https://d4.define.logica.com/ticket/4375#comment:7 db = self.env.get_read_db() @self.env.with_transaction() def do_insert(db): cursor = db.cursor() self.env.log.debug("Updating running_transformations - inserting new row for %s", transformation_id) cursor.execute("""INSERT INTO running_transformations (transformation_id, status, started) VALUES (%s, %s, %s)""", (transformation_id, "running", to_utimestamp(datetime.now(utc)))) # this bit of Python isn't so good :-( I'll just merge the stdout and stderr streams... # http://stackoverflow.com/questions/6809590/merging-a-python-scripts-subprocess-stdout-and-stderr-while-keeping-them-disti # http://codereview.stackexchange.com/questions/6567/how-to-redirect-a-subprocesses-output-stdout-and-stderr-to-logging-module script = subprocess.Popen(args, executable="/bin/sh", cwd=os.path.join(tempdir, "svn"), env={'PENTAHO_DI_JAVA_OPTIONS': "-Dfile.encoding=utf8 -Dnet.sf.ehcache.skipUpdateCheck=true -Djava.awt.headless=true -Dorg.osjava.sj.root=%s" % os.path.join(tempdir,"simple-jndi"), 'LANG': "en_US.UTF-8", 'KETTLE_HOME': os.path.join(tempdir,"kettle")}, stdout=subprocess.PIPE,stderr=subprocess.STDOUT) while script.poll() is None: # this can go to the database later (natively, by pdi) # keeping here, as info level for now. self.log.info("Script output: %s", script.stdout.readline()) self.log.info("Script returned %s", script.returncode) if script.returncode: # transform has failed to complete - update running_transformations table if transformation_id: @self.env.with_transaction() def do_insert(db): cursor = db.cursor() self.env.log.debug("Updating running_transformations - %s failed to complete", transformation_id) cursor.execute("""UPDATE running_transformations SET transformation_id=%s, status=%s, ended=%s WHERE transformation_id=%s""", (transformation_id, "error", to_utimestamp(datetime.now(utc)), transformation_id)) raise RuntimeError("Business Intelligence subprocess script failed") # We know assume that the transform has finished successfully # so we update the running_transformations table to represent this if transformation_id: @self.env.with_transaction() def do_insert(db): cursor = db.cursor() self.env.log.debug("Updating running_transformations - %s completed", transformation_id) cursor.execute("""UPDATE running_transformations SET transformation_id=%s, status=%s, ended=%s WHERE transformation_id=%s""", (transformation_id, "success", to_utimestamp(datetime.now(utc)), transformation_id)) if store: reponame, repos, path = RepositoryManager(self.env).get_repository_by_path('') svn_writer = SubversionWriter(self.env, repos, "reporting") revs = [] for filename_encoded in os.listdir(os.path.join(tempdir, "svn")): filename = filename_encoded.decode('utf-8') # we wrote the filename out ourselves self.log.info("Uploading %s", filename) writer = SubversionWriter(self.env, repos, "reporting") file_data = open(os.path.join(os.path.join(tempdir, "svn"), filename)).read() for path in ["define-reports", "define-reports/%s" % transformation]: try: repos.sync() repos.get_node(path) except NoSuchNode, e: self.log.warning("Creating %s for the first time", path) writer.make_dir(path, "Generated by reporting framework") repos.sync() properties = {'define:generated-by-transformation': transformation} for k, v in parameters.items(): if not k.startswith("DefineInternal"): properties[u"define:parameter:%s" % k] = v rev = writer.put_content([("define-reports/%s/%s" % (transformation, filename), file_data)], "Generated by reporting framework transformation", properties=properties, clearproperties=True) revs.append(rev)