コード例 #1
0
def dispatch(prog,
             args,
             dirpath=None,
             infile=None,
             outfile=None,
             errfile=None):
    '''Dispatch program prog with arguments args.

    Optional parameters:
    dirpath: Path to working directory.
    infile:  File with inputs to the program. Use subprocess.PIPE for input stream.
    outfile: File for program output. Use subprocess.PIPE for output stream.
    errfile: File for program output. Use subprocess.PIPE for output stream.

    Returns the Popen() process.

    Note that if all outputs are piped, the system can cause the program to crash
    since the pipes might not be properly closed between calls.
    '''

    if infile is None:
        infile = tmp()
    if outfile is None:
        outfile = tmp()
    if errfile is None:
        errfile = tmp()

    if args == []:
        progargs = prog
    else:
        progargs = [prog]

        for a in args:
            progargs.append(a)

    if dirpath is None:
        dirpath = "./"

    dbg.debug(
        "<<<< Dispatching program: " + str(progargs) + " from " + dirpath,
        dbg.verb_modes["chatty"])

    process = Popen(progargs,
                    cwd=dirpath,
                    close_fds=True,
                    stdout=outfile,
                    stderr=errfile,
                    stdin=infile,
                    universal_newlines=True)

    dbg.debug(" with pid=" + str(process.pid) + " >>>>\n",
              dbg.verb_modes["chatty"])
    dbg.flush()
    return process
コード例 #2
0
ファイル: utils.py プロジェクト: anentropic/ebaysuds
def clear_cache():
    """
    Clear the suds object / wsdl cache

    http://pythonaut.blogspot.co.uk/2011/10/how-to-clear-suds-cache.html
    """
    shutil.rmtree(os.path.join(tmp(), 'suds'), True)
コード例 #3
0
def record(anim, k):
    if not hasattr(anim, '_encoded_video'):
        with tmp(suffix = '.mp4') as f:
            newname = "/Users/fuentes/workspace/" + k + '_' + f.name.split("/")[-1]
            anim.save(newname,
                      fps=20,
                      extra_args=['-vcodec','libx264','-pix_fmt','yuv420p'])
    return None
コード例 #4
0
 def store_file(self, context, column, root_key): 
     content = getattr(context, column.key, None)
     if content:
         bfile = tmp(delete=False)
         bfile.write(content)
         bfile.flush()
         bfile.close()
         file_name = os.path.basename(bfile.name)
         if not self._saved_files.has_key(root_key):
             self._saved_files[root_key] = []
         self._saved_files[root_key].append(bfile.name)
         return file_name
コード例 #5
0
ファイル: serialize.py プロジェクト: mohalfaki/bungeni-portal
 def store_file(self, context, column, root_key):
     content = getattr(context, column.key, None)
     if content:
         bfile = tmp(delete=False)
         bfile.write(content)
         bfile.flush()
         bfile.close()
         file_name = os.path.basename(bfile.name)
         if not self._saved_files.has_key(root_key):
             self._saved_files[root_key] = []
         self._saved_files[root_key].append(bfile.name)
         return file_name
コード例 #6
0
def tar_to_zip(*file_names: Path, zippath="."):
    zip_path = Path(zippath) / "output.zip"

    for file_name in file_names:
        try:
            with tmp() as tmp_dir:
                tarfile.open(file_name).extractall(tmp_dir)

                with ZipFile(zip_path, "w") as zip_file:
                    for compress_path in Path(tmp_dir).glob("**/*.*"):
                        zip_file.write(compress_path, compress_path.name)
        except tarfile.ReadError:
            print(f"Read error for {file_name}")
コード例 #7
0
ファイル: cache.py プロジェクト: tic-ull/defensatfc-proto
 def __init__(self, location=None, **duration):
     """
     @param location: The directory for the cached files.
     @type location: str
     @param duration: The cached file duration which defines how
         long the file will be cached.  A duration=0 means forever.
         The duration may be: (months|weeks|days|hours|minutes|seconds).
     @type duration: {unit:value}
     """
     if location is None:
         location = os.path.join(tmp(), 'suds')
     self.location = location
     self.duration = (None, 0)
     self.setduration(**duration)
コード例 #8
0
 def __init__(self, location=None, **duration):
     """
     @param location: The directory for the cached files.
     @type location: str
     @param duration: The cached file duration which defines how
         long the file will be cached.  A duration=0 means forever.
         The duration may be: (months|weeks|days|hours|minutes|seconds).
     @type duration: {unit:value}
     """
     if location is None:
         location = os.path.join(tmp(), 'suds')
     self.location = location
     self.duration = (None, 0)
     self.setduration(**duration)
コード例 #9
0
ファイル: cpubind.py プロジェクト: NicolasDenoyelle/starbind
    def __init__(self, resource_list, num_procs=None, env={}, knobs=[]):

        # Write rankfile
        f, self.rankfile = tmp(dir=os.getcwd(), text=True)
        file = os.fdopen(f, 'w')
        for l in OpenMPI._rankfile_(resource_list):
            file.write(l + '\n')
        file.close()

        # Set knobs
        knobs.append(OpenMPI._bindto_knob_(resource_list))
        launcher = 'mpirun {} -rf {}'.format(' '.join(knobs), self.rankfile)

        MPI.__init__(self, resource_list, num_procs, env, launcher=launcher)
コード例 #10
0
ファイル: serialize.py プロジェクト: bungeni-org/bungeni.main
 def store_file(self, context, parent, key, root_key):
     """Store file.
     Skipped if file is serialized elsewhere (other serializable context).
     This ensures attachments are not serialized multiple times in document
     hierarchies.
     """
     if not(parent and interfaces.ISerializable.providedBy(context)):
         content = getattr(context, key, None)
         if content:
             bfile = tmp(delete=False)
             bfile.write(content)
             bfile.flush()
             bfile.close()
             file_name = os.path.basename(bfile.name)
             if not self._saved_files.has_key(root_key):
                 self._saved_files[root_key] = []
             self._saved_files[root_key].append(bfile.name)
             return file_name
コード例 #11
0
 def __init__(self, system=None, filename=None):
     from simtk.openmm.app.pdbfile import PDBFile
     from simtk.openmm import app
     from BigDFT.IO import read_pdb, write_pdb
     from tempfile import NamedTemporaryFile as tmp
     if filename is not None:
         pdb = PDBFile(open(filename))
         sys = read_pdb(open(filename))
     elif system is not None:
         sys = system
         ofile = tmp('w+')
         write_pdb(system=system, ofile=ofile)
         ofilename = ofile.name
         pdb = PDBFile(open(ofilename))
         ofile.close()
     System.__init__(self, **sys.dict())
     self.pdb = pdb
     self.modeller = app.Modeller(pdb.topology, pdb.positions)
コード例 #12
0
 def run(self):
     """Safely remove stale observations and append to target file."""
     if not os.path.exists(self.output_file):
         open(self.output_file, "w").close()
     while True:
         with open(self.output_file) as old, tmp(delete=False) as new:
             reader = csv.reader(old)
             writer = csv.writer(new)
             for line in reader:
                 ts = datetime.strptime(line[0], "%Y-%m-%d %H:%M:%S")
                 now = datetime.now()
                 diff = now - ts
                 if diff.seconds < 60 * 60 * 2:
                     writer.writerow(line)
             try:
                 current_time = CLOCK.get_value()
                 observation = self.sensor.get_value()
                 writer.writerow((current_time, observation))
             except IOError: pass
         os.chmod(new.name, 0644)
         shutil.move(new.name, self.output_file)
         time.sleep(self.period)
コード例 #13
0
 def run(self):
     """Safely remove stale observations and append to target file."""
     if not os.path.exists(self.output_file):
         open(self.output_file, "w").close()
     while True:
         with open(self.output_file) as old, tmp(delete=False) as new:
             reader = csv.reader(old)
             writer = csv.writer(new)
             for line in reader:
                 ts = datetime.strptime(line[0], "%Y-%m-%d %H:%M:%S")
                 now = datetime.now()
                 diff = now - ts
                 if diff.seconds < 60 * 60 * 2:
                     writer.writerow(line)
             try:
                 current_time = CLOCK.get_value()
                 observation = self.sensor.get_value()
                 writer.writerow((current_time, observation))
             except IOError:
                 pass
         os.chmod(new.name, 0644)
         shutil.move(new.name, self.output_file)
         time.sleep(self.period)
コード例 #14
0
ファイル: ra8.py プロジェクト: 5l1v3r1/stonix
#!/usr/bin/env python3

from subprocess import Popen, PIPE
from tempfile import SpooledTemporaryFile as tmp

f = tmp()

f.write('\nP@SSW0rd\n')

f.seek(0)

pipe = Popen('/usr/bin/su dcsadmin -c "/usr/bin/id"',
             stdout=PIPE,
             stdin=f,
             bufsize=1,
             shell=True)
pipe.stdout.read()
pipe.stdout.read()
pipe.stdout.read()
output = pipe.stdout.readlines()
f.close()

print(output)
コード例 #15
0
def publish_to_xml(context):
    """Generates XML for object and saves it to the file. If object contains
    attachments - XML is saved in zip archive with all attached files. 
    """

    #create a fake interaction to ensure items requiring a participation
    #are serialized 
    #!+SERIALIZATION(mb, Jan-2013) review this approach
    try:
        zope.security.management.getInteraction()
    except zope.security.interfaces.NoInteraction:
        principal = zope.security.testing.Principal('user', 'manager', ())
        zope.security.management.newInteraction(create_participation(principal))

    include = []
    # list of files to zip
    files = []
    # data dict to be published
    data = {}
    
    context = zope.security.proxy.removeSecurityProxy(context)
    
    if interfaces.IFeatureVersion.providedBy(context):
        include.append("versions")
    if interfaces.IFeatureAudit.providedBy(context):
        include.append("event")
    
    exclude = ["data", "event", "attachments", "changes"]
    
    # include binary fields and include them in the zip of files for this object
    for column in class_mapper(context.__class__).columns:
        if column.type.__class__ == Binary:
            exclude.append(column.key)
            content = getattr(context, column.key, None)
            if content:
                bfile = tmp(delete=False)
                bfile.write(content)
                files.append(bfile.name)
                data[column.key] = dict(
                    saved_file=os.path.basename(bfile.name)
                )
                bfile.close()
    data.update(
        obj2dict(context, 1, 
            parent=None,
            include=include,
            exclude=exclude
        )
    )
    obj_type = IWorkflow(context).name    
    tags = IStateController(context).get_state().tags
    if tags:
        data["tags"] = tags
    permissions = get_object_state_rpm(context).permissions
    data["permissions"] = get_permissions_dict(permissions)
    data["changes"] = []
    for change in getattr(context, "changes", []):
        change_dict = obj2dict(change, 0, parent=context)
        change_permissions = get_head_object_state_rpm(change).permissions
        change_dict["permissions"] = get_permissions_dict(change_permissions)
        data["changes"].append(change_dict)
    
    # setup path to save serialized data 
    path = os.path.join(setupStorageDirectory(), obj_type)
    if not os.path.exists(path):
        os.makedirs(path)
    
    # xml file path
    file_path = os.path.join(path, stringKey(context)) 
    
    if interfaces.IFeatureAttachment.providedBy(context):
        attachments = getattr(context, "attachments", None)
        if attachments:
            data["attachments"] = []
            for attachment in attachments:
                # serializing attachment
                attachment_dict = obj2dict(attachment, 1,
                    parent=context,
                    exclude=["data", "event", "versions"])
                permissions = get_object_state_rpm(attachment).permissions
                attachment_dict["permissions"] = \
                    get_permissions_dict(permissions)
                # saving attachment to tmp
                attached_file = tmp(delete=False)
                attached_file.write(attachment.data)
                attached_file.flush()
                attached_file.close()
                files.append(attached_file.name)
                attachment_dict["saved_file"] = os.path.basename(
                    attached_file.name
                )
                data["attachments"].append(attachment_dict)

    
    # zipping xml, attached files plus any binary fields
    # also remove the temporary files
    if files:
        #generate temporary xml file
        temp_xml = tmp(delete=False)
        temp_xml.write(serialize(data, name=obj_type))
        temp_xml.close()
        #write attachments/binary fields to zip
        zip_file = ZipFile("%s.zip" % (file_path), "w")
        for f in files:
            zip_file.write(f, os.path.basename(f))
            os.remove(f)
        #write the xml
        zip_file.write(temp_xml.name, "%s.xml" % os.path.basename(file_path))
        zip_file.close()
        #placed remove after zip_file.close !+ZIP_FILE_CRC_FAILURE
        os.remove(temp_xml.name) 

    else:
        # save serialized xml to file
        with open("%s.xml" % (file_path), "w") as xml_file:
            xml_file.write(serialize(data, name=obj_type))
            xml_file.close()

    #publish to rabbitmq outputs queue
    connection = get_mq_connection()
    if not connection:
        return
    channel = connection.channel()
    publish_file_path = "%s.%s" %(file_path, ("zip" if files else "xml"))
    channel.basic_publish(
        exchange=SERIALIZE_OUTPUT_EXCHANGE,
        routing_key=SERIALIZE_OUTPUT_ROUTING_KEY,
        body=simplejson.dumps({"type": "file", "location": publish_file_path }),
        properties=pika.BasicProperties(content_type="text/plain",
            delivery_mode=2
        )
    )
    
    #clean up - remove any files if zip was created
    if files:
        prev_xml_file = "%s.%s" %(file_path, "xml")
        if os.path.exists(prev_xml_file):
            os.remove(prev_xml_file)
コード例 #16
0
ファイル: serialize.py プロジェクト: bungeni-org/bungeni.main
def publish_to_xml(context):
    """Generates XML for object and saves it to the file. If object contains
    attachments - XML is saved in zip archive with all attached files. 
    """
    context = zope.security.proxy.removeSecurityProxy(context)
    obj_type = IWorkflow(context).name
    #locking
    random_name_sfx = generate_random_filename()
    context_file_name = "%s-%s" % (stringKey(context), random_name_sfx)
    #lock_name = "%s-%s" %(obj_type, context_file_name)
    #!+LOCKING(AH, 25-01-2014) disabling file locking
    #! locking was reqiured when the serializer used ta constant file name
    #! for an object. Now serialized file names are unique, and non repeated
    #with LockStore.get_lock(lock_name):
    #    
    #root key (used to cache files to zip)
    root_key = make_key()
    # create a fake interaction to ensure items requiring a participation
    # are serialized 
    #!+SERIALIZATION(mb, Jan-2013) review this approach
    try:
        zope.security.management.getInteraction()
    except zope.security.interfaces.NoInteraction:
        principal = zope.security.testing.Principal("user", "manager", ())
        zope.security.management.newInteraction(create_participation(principal))
    include = []
    # data dict to be published
    data = {}
    if IFeatureVersion.providedBy(context):
        include.append("versions")
    if IFeatureEvent.providedBy(context):
        include.append("event")
    
    exclude = ["data", "event", "attachments"]
    updated_dict = obj2dict(context, 1, 
	    parent=None,
	    include=include,
	    exclude=exclude,
	    root_key=root_key
        )
    data.update(
        updated_dict
    )

    tags = IStateController(context).get_state().tags
    if tags:
        data["tags"] = tags
    permissions = get_object_state_rpm(context).permissions
    data["permissions"] = get_permissions_dict(permissions)

    # setup path to save serialized data
    path = os.path.join(setupStorageDirectory(), obj_type)
    log.info("Setting up path to write to : %s", path)
    if not os.path.exists(path):
        #
        # !+THREADSAFE(AH, 2014-09-24) making makedirs threadsafe, 
        # sometimes between checking for existence and execution 
        # of makedirs() the folder has already been created by 
        # another thread
        try:
            os.makedirs(path)
        except OSError as exc:
            if exc.errno == errno.EEXIST and os.path.isdir(path):
                log.info("Error Folder : %s already exists, ignoring exception ", path)
            else:
                raise

    # xml file path
    file_path = os.path.join(path, context_file_name) 
    # files to zip
    files = []

    if IFeatureAttachment.providedBy(context):
        attachments = getattr(context, "attachments", None)
        if attachments:
	    data["attachments"] = []
	    for attachment in attachments:
	        # serializing attachment
	        attachment_dict = obj2dict(attachment, 1,
	            parent=context,
	            exclude=["data", "event", "versions"])
	        # saving attachment to tmp
	        attached_file = tmp(delete=False)
	        attached_file.write(attachment.data)
	        attached_file.flush()
	        attached_file.close()
	        files.append(attached_file.name)
	        attachment_dict["saved_file"] = os.path.basename(
	            attached_file.name
	        )
	        data["attachments"].append(attachment_dict)

    # add explicit origin chamber for this object (used to partition data
    # in if more than one chamber exists)
    
    if obj_type == "Legislature":
        data["origin_chamber"] = None
    else:
        data["origin_chamber"] = get_origin_chamber(context)

    # add any additional files to file list
    files = files + PersistFiles.get_files(root_key)
    # zipping xml, attached files plus any binary fields
    # also remove the temporary files
    if files:
        # generate temporary xml file
        temp_xml = tmp(delete=False)
        temp_xml.write(serialize(data, name=obj_type))
        temp_xml.close()
        # write attachments/binary fields to zip
        with  ZipFile("%s.zip" % (file_path), "w") as zip_file:
	    for f in files:
	        zip_file.write(f, os.path.basename(f))
	    # write the xml
	    zip_file.write(temp_xml.name, "%s.xml" % os.path.basename(file_path))
        files.append(temp_xml.name)
    else:
        # save serialized xml to file
        with open("%s.xml" % (file_path), "w") as xml_file:
	    xml_file.write(serialize(data, name=obj_type))
	    xml_file.close()
    # publish to rabbitmq outputs queue
    connection = bungeni.core.notifications.get_mq_connection()
    if not connection:
        return
    channel = connection.channel()
    #channel.confirm_delivery()
    publish_file_path = "%s.%s" %(file_path, ("zip" if files else "xml"))
    #channel_delivery = 
    channel.basic_publish(
        exchange=SERIALIZE_OUTPUT_EXCHANGE,
        routing_key=SERIALIZE_OUTPUT_ROUTING_KEY,
        body=simplejson.dumps({"type": "file", "location": publish_file_path }),
        properties=pika.BasicProperties(content_type="text/plain",
            delivery_mode=2
        )
    )
    #if channel_delivery:
    #    log.info("Message published to exchange %s with key %s for %s" % 
    #        (SERIALIZE_OUTPUT_EXCHANGE, SERIALIZE_OUTPUT_ROUTING_KEY, publish_file_path))
    #else:
    #    log.error("Message publication failed for %r", publish_file_path)
        

    #clean up - remove any files if zip was/was not created
    if files:
        files.append("%s.%s" %(file_path, "xml"))
    else:
        files.append("%s.%s" %(file_path, "zip"))
    remove_files(files)

    # clear the cache
    PersistFiles.clear_files(root_key)
コード例 #17
0
def publish_to_xml(context):
    """Generates XML for object and saves it to the file. If object contains
    attachments - XML is saved in zip archive with all attached files. 
    """
    include = []

    context = removeSecurityProxy(context)

    if IVersionable.implementedBy(context.__class__):
        include.append("versions")
    if IAuditable.implementedBy(context.__class__):
        include.append("event")

    data = obj2dict(context,
                    1,
                    parent=None,
                    include=include,
                    exclude=[
                        "file_data", "image", "logo_data", "event",
                        "attached_files", "changes"
                    ])

    type = IWorkflow(context).name

    tags = IStateController(context).get_state().tags
    if tags:
        data["tags"] = tags

    permissions = get_object_state_rpm(context).permissions
    data["permissions"] = get_permissions_dict(permissions)

    data["changes"] = []
    for change in getattr(context, "changes", []):
        change_dict = obj2dict(change, 0, parent=context)
        change_permissions = get_head_object_state_rpm(change).permissions
        change_dict["permissions"] = get_permissions_dict(change_permissions)
        data["changes"].append(change_dict)

    # list of files to zip
    files = []
    # setup path to save serialized data
    path = os.path.join(setupStorageDirectory(), type)
    if not os.path.exists(path):
        os.makedirs(path)

    # xml file path
    file_path = os.path.join(path, stringKey(context))

    has_attachments = False
    if IAttachmentable.implementedBy(context.__class__):
        attached_files = getattr(context, "attached_files", None)
        if attached_files:
            has_attachments = True
            # add xml file to list of files to zip
            files.append("%s.xml" % (file_path))
            data["attached_files"] = []
            for attachment in attached_files:
                # serializing attachment
                attachment_dict = obj2dict(
                    attachment,
                    1,
                    parent=context,
                    exclude=["file_data", "event", "versions", "changes"])
                permissions = get_object_state_rpm(attachment).permissions
                attachment_dict["permissions"] = \
                    get_permissions_dict(permissions)
                # saving attachment to tmp
                with tmp(delete=False) as f:
                    f.write(attachment.file_data)
                    files.append(f.name)
                    attachment_dict["saved_file"] = \
                        os.path.split(f.name)[-1]
                data["attached_files"].append(attachment_dict)

    # saving xml file
    with open("%s.xml" % (file_path), "w") as file:
        file.write(serialize(data, name=type))

    # zipping xml and attached files
    # unzipped files are removed
    if has_attachments:
        zip = ZipFile("%s.zip" % (file_path), "w")
        for f in files:
            zip.write(f, os.path.split(f)[-1])
            os.remove(f)
        zip.close()
コード例 #18
0
def publish_to_xml(context):
    """Generates XML for object and saves it to the file. If object contains
    attachments - XML is saved in zip archive with all attached files. 
    """
    include = []
    
    context = removeSecurityProxy(context)
    
    if interfaces.IFeatureVersion.providedBy(context):
        include.append("versions")
    if interfaces.IFeatureAudit.providedBy(context):
        include.append("event")
    
    data = obj2dict(context, 1, 
        parent=None,
        include=include,
        exclude=["data", "image", "logo_data", "event", "attachments", 
            "changes"]
    )
    
    # !+please do not use python builtin names as variable names
    type = IWorkflow(context).name
    
    # !+IWorkflow(context).get_state(context.status).tags
    tags = IStateController(context).get_state().tags
    if tags:
        data["tags"] = tags
    
    permissions = get_object_state_rpm(context).permissions
    data["permissions"] = get_permissions_dict(permissions)
    
    data["changes"] = []
    for change in getattr(context, "changes", []):
        change_dict = obj2dict(change, 0, parent=context)
        change_permissions = get_head_object_state_rpm(change).permissions
        change_dict["permissions"] = get_permissions_dict(change_permissions)
        data["changes"].append(change_dict)
    
    # list of files to zip
    files = []
    # setup path to save serialized data 
    path = os.path.join(setupStorageDirectory(), type)
    if not os.path.exists(path):
        os.makedirs(path)
    
    # xml file path
    file_path = os.path.join(path, stringKey(context)) 
    
    has_attachments = False
    if interfaces.IFeatureAttachment.providedBy(context):
        attachments = getattr(context, "attachments", None)
        if attachments:
            has_attachments = True
            # add xml file to list of files to zip
            files.append("%s.xml" % (file_path))
            data["attachments"] = []
            for attachment in attachments:
                # serializing attachment
                attachment_dict = obj2dict(attachment, 1,
                    parent=context,
                    exclude=["data", "event", "versions"])
                permissions = get_object_state_rpm(attachment).permissions
                attachment_dict["permissions"] = \
                    get_permissions_dict(permissions)
                # saving attachment to tmp
                with tmp(delete=False) as f:
                    f.write(attachment.data)
                    files.append(f.name)
                    attachment_dict["saved_file"] = \
                        os.path.split(f.name)[-1]  
                data["attachments"].append(attachment_dict)
    
    # saving xml file
    with open("%s.xml" % (file_path), "w") as file:
        file.write(serialize(data, name=type))
    
    # zipping xml and attached files 
    # unzipped files are removed
    if has_attachments:
        zip = ZipFile("%s.zip" % (file_path), "w")
        for f in files:
            zip.write(f, os.path.split(f)[-1])
            os.remove(f)
        zip.close()
コード例 #19
0
ファイル: serialize.py プロジェクト: mohalfaki/bungeni-portal
def publish_to_xml(context):
    """Generates XML for object and saves it to the file. If object contains
    attachments - XML is saved in zip archive with all attached files. 
    """

    #create a fake interaction to ensure items requiring a participation
    #are serialized
    #!+SERIALIZATION(mb, Jan-2013) review this approach
    try:
        zope.security.management.getInteraction()
    except zope.security.interfaces.NoInteraction:
        principal = zope.security.testing.Principal('user', 'manager', ())
        zope.security.management.newInteraction(
            create_participation(principal))

    include = []
    # list of files to zip
    files = []
    # data dict to be published
    data = {}

    context = zope.security.proxy.removeSecurityProxy(context)

    if interfaces.IFeatureVersion.providedBy(context):
        include.append("versions")
    if interfaces.IFeatureAudit.providedBy(context):
        include.append("event")

    exclude = ["data", "event", "attachments", "changes"]

    # include binary fields and include them in the zip of files for this object
    for column in class_mapper(context.__class__).columns:
        if column.type.__class__ == Binary:
            exclude.append(column.key)
            content = getattr(context, column.key, None)
            if content:
                bfile = tmp(delete=False)
                bfile.write(content)
                files.append(bfile.name)
                data[column.key] = dict(
                    saved_file=os.path.basename(bfile.name))
                bfile.close()
    data.update(
        obj2dict(context, 1, parent=None, include=include, exclude=exclude))
    obj_type = IWorkflow(context).name
    tags = IStateController(context).get_state().tags
    if tags:
        data["tags"] = tags
    permissions = get_object_state_rpm(context).permissions
    data["permissions"] = get_permissions_dict(permissions)
    data["changes"] = []
    for change in getattr(context, "changes", []):
        change_dict = obj2dict(change, 0, parent=context)
        change_permissions = get_head_object_state_rpm(change).permissions
        change_dict["permissions"] = get_permissions_dict(change_permissions)
        data["changes"].append(change_dict)

    # setup path to save serialized data
    path = os.path.join(setupStorageDirectory(), obj_type)
    if not os.path.exists(path):
        os.makedirs(path)

    # xml file path
    file_path = os.path.join(path, stringKey(context))

    if interfaces.IFeatureAttachment.providedBy(context):
        attachments = getattr(context, "attachments", None)
        if attachments:
            data["attachments"] = []
            for attachment in attachments:
                # serializing attachment
                attachment_dict = obj2dict(
                    attachment,
                    1,
                    parent=context,
                    exclude=["data", "event", "versions"])
                permissions = get_object_state_rpm(attachment).permissions
                attachment_dict["permissions"] = \
                    get_permissions_dict(permissions)
                # saving attachment to tmp
                attached_file = tmp(delete=False)
                attached_file.write(attachment.data)
                attached_file.flush()
                attached_file.close()
                files.append(attached_file.name)
                attachment_dict["saved_file"] = os.path.basename(
                    attached_file.name)
                data["attachments"].append(attachment_dict)

    # zipping xml, attached files plus any binary fields
    # also remove the temporary files
    if files:
        #generate temporary xml file
        temp_xml = tmp(delete=False)
        temp_xml.write(serialize(data, name=obj_type))
        temp_xml.close()
        #write attachments/binary fields to zip
        zip_file = ZipFile("%s.zip" % (file_path), "w")
        for f in files:
            zip_file.write(f, os.path.basename(f))
            os.remove(f)
        #write the xml
        zip_file.write(temp_xml.name, "%s.xml" % os.path.basename(file_path))
        zip_file.close()
        #placed remove after zip_file.close !+ZIP_FILE_CRC_FAILURE
        os.remove(temp_xml.name)

    else:
        # save serialized xml to file
        with open("%s.xml" % (file_path), "w") as xml_file:
            xml_file.write(serialize(data, name=obj_type))
            xml_file.close()

    #publish to rabbitmq outputs queue
    connection = get_mq_connection()
    if not connection:
        return
    channel = connection.channel()
    publish_file_path = "%s.%s" % (file_path, ("zip" if files else "xml"))
    channel.basic_publish(exchange=SERIALIZE_OUTPUT_EXCHANGE,
                          routing_key=SERIALIZE_OUTPUT_ROUTING_KEY,
                          body=simplejson.dumps({
                              "type": "file",
                              "location": publish_file_path
                          }),
                          properties=pika.BasicProperties(
                              content_type="text/plain", delivery_mode=2))

    #clean up - remove any files if zip was created
    if files:
        prev_xml_file = "%s.%s" % (file_path, "xml")
        if os.path.exists(prev_xml_file):
            os.remove(prev_xml_file)
コード例 #20
0
ファイル: ra8.py プロジェクト: CSD-Public/stonix
#!/usr/bin/python

from subprocess import Popen, PIPE
from tempfile import SpooledTemporaryFile as tmp

f = tmp()

f.write('\nP@SSW0rd\n')

f.seek(0)

pipe = Popen('/usr/bin/su dcsadmin -c "/usr/bin/id"', stdout=PIPE, stdin=f, bufsize=1, shell=True)
pipe.stdout.read()
pipe.stdout.read()
pipe.stdout.read()
output = pipe.stdout.readlines()
f.close()

print output

コード例 #21
0
def publish_to_xml(context):
    """Generates XML for object and saves it to the file. If object contains
    attachments - XML is saved in zip archive with all attached files. 
    """


    context = zope.security.proxy.removeSecurityProxy(context)
    obj_type = IWorkflow(context).name
    
    #locking
    lock_name = "%s-%s" %(obj_type, stringKey(context))
    with LockStore.get_lock(lock_name):
        #root key (used to cache files to zip)
        root_key = make_key()

        #create a fake interaction to ensure items requiring a participation
        #are serialized 
        #!+SERIALIZATION(mb, Jan-2013) review this approach
        try:
            zope.security.management.getInteraction()
        except zope.security.interfaces.NoInteraction:
            principal = zope.security.testing.Principal('user', 'manager', ())
            zope.security.management.newInteraction(create_participation(principal))
        include = []
        # data dict to be published
        data = {}

        if interfaces.IFeatureVersion.providedBy(context):
            include.append("versions")
        if interfaces.IFeatureAudit.providedBy(context):
            include.append("event")
        
        exclude = ["data", "event", "attachments"]
        
        data.update(
            obj2dict(context, 1, 
                parent=None,
                include=include,
                exclude=exclude,
                root_key=root_key
            )
        )
        tags = IStateController(context).get_state().tags
        if tags:
            data["tags"] = tags
        permissions = get_object_state_rpm(context).permissions
        data["permissions"] = get_permissions_dict(permissions)
        
        # setup path to save serialized data 
        path = os.path.join(setupStorageDirectory(), obj_type)
        if not os.path.exists(path):
            os.makedirs(path)
        
        # xml file path
        file_path = os.path.join(path, stringKey(context)) 
        
        #files to zip
        files = []
        
        if interfaces.IFeatureAttachment.providedBy(context):
            attachments = getattr(context, "attachments", None)
            if attachments:
                data["attachments"] = []
                for attachment in attachments:
                    # serializing attachment
                    attachment_dict = obj2dict(attachment, 1,
                        parent=context,
                        exclude=["data", "event", "versions"])
                    # saving attachment to tmp
                    attached_file = tmp(delete=False)
                    attached_file.write(attachment.data)
                    attached_file.flush()
                    attached_file.close()
                    files.append(attached_file.name)
                    attachment_dict["saved_file"] = os.path.basename(
                        attached_file.name
                    )
                    data["attachments"].append(attachment_dict)

        #add explicit origin chamber for this object (used to partition data in
        #if more than one parliament exists)
        data["origin_parliament"] = get_origin_parliament(context)
        
        #add any additional files to file list
        files = files + PersistFiles.get_files(root_key)
        # zipping xml, attached files plus any binary fields
        # also remove the temporary files
        if files:
            #generate temporary xml file
            temp_xml = tmp(delete=False)
            temp_xml.write(serialize(data, name=obj_type))
            temp_xml.close()
            #write attachments/binary fields to zip
            with  ZipFile("%s.zip" % (file_path), "w") as zip_file:
                for f in files:
                    zip_file.write(f, os.path.basename(f))
                # write the xml
                zip_file.write(temp_xml.name, "%s.xml" % os.path.basename(file_path))
            files.append(temp_xml.name)

        else:
            # save serialized xml to file
            with open("%s.xml" % (file_path), "w") as xml_file:
                xml_file.write(serialize(data, name=obj_type))
                xml_file.close()

        # publish to rabbitmq outputs queue
        connection = bungeni.core.notifications.get_mq_connection()
        if not connection:
            return
        channel = connection.channel()
        publish_file_path = "%s.%s" %(file_path, ("zip" if files else "xml"))
        channel.basic_publish(
            exchange=SERIALIZE_OUTPUT_EXCHANGE,
            routing_key=SERIALIZE_OUTPUT_ROUTING_KEY,
            body=simplejson.dumps({"type": "file", "location": publish_file_path }),
            properties=pika.BasicProperties(content_type="text/plain",
                delivery_mode=2
            )
        )
        
        #clean up - remove any files if zip was/was not created
        if files:
            files.append("%s.%s" %(file_path, "xml"))
        else:
            files.append("%s.%s" %(file_path, "zip"))
        remove_files(files)

        #clear the cache
        PersistFiles.clear_files(root_key)
コード例 #22
0
ファイル: serialize.py プロジェクト: mohalfaki/bungeni-portal
def publish_to_xml(context):
    """Generates XML for object and saves it to the file. If object contains
    attachments - XML is saved in zip archive with all attached files. 
    """

    context = zope.security.proxy.removeSecurityProxy(context)
    obj_type = IWorkflow(context).name

    #locking
    lock_name = "%s-%s" % (obj_type, stringKey(context))
    with LockStore.get_lock(lock_name):
        #root key (used to cache files to zip)
        root_key = make_key()

        #create a fake interaction to ensure items requiring a participation
        #are serialized
        #!+SERIALIZATION(mb, Jan-2013) review this approach
        try:
            zope.security.management.getInteraction()
        except zope.security.interfaces.NoInteraction:
            principal = zope.security.testing.Principal('user', 'manager', ())
            zope.security.management.newInteraction(
                create_participation(principal))
        include = []
        # data dict to be published
        data = {}

        if interfaces.IFeatureVersion.providedBy(context):
            include.append("versions")
        if interfaces.IFeatureAudit.providedBy(context):
            include.append("event")

        exclude = ["data", "event", "attachments"]

        data.update(
            obj2dict(context,
                     1,
                     parent=None,
                     include=include,
                     exclude=exclude,
                     root_key=root_key))
        tags = IStateController(context).get_state().tags
        if tags:
            data["tags"] = tags
        permissions = get_object_state_rpm(context).permissions
        data["permissions"] = get_permissions_dict(permissions)

        # setup path to save serialized data
        path = os.path.join(setupStorageDirectory(), obj_type)
        if not os.path.exists(path):
            os.makedirs(path)

        # xml file path
        file_path = os.path.join(path, stringKey(context))

        #files to zip
        files = []

        if interfaces.IFeatureAttachment.providedBy(context):
            attachments = getattr(context, "attachments", None)
            if attachments:
                data["attachments"] = []
                for attachment in attachments:
                    # serializing attachment
                    attachment_dict = obj2dict(
                        attachment,
                        1,
                        parent=context,
                        exclude=["data", "event", "versions"])
                    # saving attachment to tmp
                    attached_file = tmp(delete=False)
                    attached_file.write(attachment.data)
                    attached_file.flush()
                    attached_file.close()
                    files.append(attached_file.name)
                    attachment_dict["saved_file"] = os.path.basename(
                        attached_file.name)
                    data["attachments"].append(attachment_dict)

        #add explicit origin chamber for this object (used to partition data in
        #if more than one parliament exists)
        data["origin_parliament"] = get_origin_parliament(context)

        #add any additional files to file list
        files = files + PersistFiles.get_files(root_key)
        # zipping xml, attached files plus any binary fields
        # also remove the temporary files
        if files:
            #generate temporary xml file
            temp_xml = tmp(delete=False)
            temp_xml.write(serialize(data, name=obj_type))
            temp_xml.close()
            #write attachments/binary fields to zip
            with ZipFile("%s.zip" % (file_path), "w") as zip_file:
                for f in files:
                    zip_file.write(f, os.path.basename(f))
                # write the xml
                zip_file.write(temp_xml.name,
                               "%s.xml" % os.path.basename(file_path))
            files.append(temp_xml.name)

        else:
            # save serialized xml to file
            with open("%s.xml" % (file_path), "w") as xml_file:
                xml_file.write(serialize(data, name=obj_type))
                xml_file.close()

        # publish to rabbitmq outputs queue
        connection = bungeni.core.notifications.get_mq_connection()
        if not connection:
            return
        channel = connection.channel()
        publish_file_path = "%s.%s" % (file_path, ("zip" if files else "xml"))
        channel.basic_publish(exchange=SERIALIZE_OUTPUT_EXCHANGE,
                              routing_key=SERIALIZE_OUTPUT_ROUTING_KEY,
                              body=simplejson.dumps({
                                  "type":
                                  "file",
                                  "location":
                                  publish_file_path
                              }),
                              properties=pika.BasicProperties(
                                  content_type="text/plain", delivery_mode=2))

        #clean up - remove any files if zip was/was not created
        if files:
            files.append("%s.%s" % (file_path, "xml"))
        else:
            files.append("%s.%s" % (file_path, "zip"))
        remove_files(files)

        #clear the cache
        PersistFiles.clear_files(root_key)