def test_load_job(self): j = 'world dominance' with NamedTemporaryFile() as tf: write(tf, 'job = "%s"' % j) write(tf, os.linesep) tf.flush() job = util.load_job(tf.name) self.assertEqual(job.job, j)
def test_read(self): s = "'tis a test string" with NamedTemporaryFile() as f: write(f, s) f.flush() r = Read('utf8') r.run(paths=[f.name]) self.assertListEqual(r.out['data'], [s])
def test_sha1sum(self): text = 'sha1 checksum test' hasher = hashlib.sha1() with NamedTemporaryFile(delete=False) as tf: write(tf, text) tf.flush() self.assertEqual(util.sha1sum(tf.name), update_hasher(hasher, text).hexdigest()) self.paths_to_delete.append(tf.name)
def make_bootstrap_client(): """ Returns the temporary filename of a file containing the bootstrap client. """ tf = NamedTemporaryFile() source = inspect.getsource(bootstrap) write(tf, source) tf.flush() return tf
def write_to_tempfiles(data): files = [] for d in data: # itentionally not closing, do in tearDown f = NamedTemporaryFile(prefix='penchy') write(f, d) f.seek(0) files.append(f) return files
def test_load_config(self): i = 5 self.assertFalse('config' in sys.modules) with NamedTemporaryFile() as tf: # save for writing after close, assures file does not exist fname = tf.name write(tf, 'foo = %s' % i) write(tf, os.linesep) tf.flush() config = util.load_config(tf.name) self.assertEqual(config.foo, i) self.assertTrue('config' in sys.modules) with self.assertRaises(IOError): util.load_config(fname)
def setUp(self): self.tf = NamedTemporaryFile() write( self.tf, """ <settings xmlns="http://maven.apache.org/SETTINGS/1.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd"> <servers> <server> <id>server001</id> <username>my_login</username> <password>my_password</password> </server> </servers> </settings> """, ) self.tf.flush()
def test_copy(self): s = "'tis a test string" with NamedTemporaryFile(delete=False) as f: path = f.name write(f, s) self.assertTrue(os.path.exists(path)) backup_path = '/tmp/penchy-backup-test' b = BackupFile(backup_path) b.run(filename=path, **{':environment:' : {}}) # did backup? with open(backup_path) as f: self.assertEqual(f.read(), s) # did not modify backuped file? with open(path) as f: self.assertEqual(f.read(), s) os.remove(path) os.remove(backup_path)
def test_relative_copy(self): s = "'tis a test string" comp = make_system_composition() comp.node_setting.path = '/tmp' with NamedTemporaryFile(delete=False) as f: path = f.name write(f, s) self.assertTrue(os.path.exists(path)) backup_file = 'penchy-backup-test' backup_path = os.path.join(comp.node_setting.path, backup_file) b = BackupFile(backup_file) b.run(filename=path, **{':environment:' : {'current_composition' : comp}}) # did backup? with open(backup_path) as f: self.assertEqual(f.read(), s) # did not modify backuped file? with open(path) as f: self.assertEqual(f.read(), s) os.remove(path) os.remove(os.path.join(comp.node_setting.path, backup_path))
def visualize(self, format='png', dot='dot'): """ Visualize job via graphviz. .. note:: The ``dot`` executable has to be in the path or passed as ``dot`` parameter. :param format: output format (has to be supported by dot) :type format: str :param dot: path to dot executable :type dot: str :returns: the path to the generated file :rtype: str """ def edges_of_flow(flow, flow_id=0): """ :param flow: flow to visualize :type flow: list of :class:`~penchy.jobs.dependency.Edge` :param flow_id: id to differentiate between multiple flows with same elements :type flow_id: int :returns: edges in graphviz format :rtype: list of str """ return [""" node{source_id}{id} [label = "{source}"]; node{sink_id}{id} [label = "{sink}"]; node{source_id}{id} -> node{sink_id}{id} [label = "{decoration}"]; """ .format(source=e.source, source_id=id(e.source), sink=e.sink, sink_id=id(e.sink), id=flow_id, decoration=', '.join('{0} -> {1}' .format(m[0], m[1]) if m[0] != m[1] else m[0] for m in e.map_ ) if e.map_ else '') for e in flow] clients = [""" subgraph cluster_client%d { label = "%s"; %s } """ % (i, c.name, '\n'.join(edges_of_flow(c.flow, i))) for i, c in enumerate(self.compositions)] server_edges = edges_of_flow(self.server_flow) s = """ digraph G { rankdir = LR subgraph cluster_server { color = blue; %s label = "Server"; } subgraph cluster_client { color = black; label = "Clients"; %s } } """ % ('\n'.join(server_edges), '\n'.join(clients)) with NamedTemporaryFile(delete=False) as f: fname = f.name write(f, s) subprocess.call([dot, '-T', format, '-O', fname]) os.remove(fname) return '{0}.{1}'.format(fname, format)