Exemplo n.º 1
0
    def exportgroup_remove_volumes_by_uri(
        self, exportgroup_uri, volumeIdList, sync=False, tenantname=None, projectname=None, snapshots=None, cg=None
    ):
        # if snapshot given then snapshot added to exportgroup
        volume_snapshots = volumeIdList
        if snapshots:
            resuri = None
            if cg:
                blockTypeName = "consistency-groups"
                from consistencygroup import ConsistencyGroup

                cgObject = ConsistencyGroup(self.__ipAddr, self.__port)
                resuri = cgObject.consistencygroup_query(cg, projectname, tenantname)
            else:
                blockTypeName = "volumes"
                if len(volumeIdList) > 0:
                    resuri = volumeIdList[0]
            volume_snapshots = []
            snapshotObject = Snapshot(self.__ipAddr, self.__port)
            for snapshot in snapshots:
                volume_snapshots.append(snapshotObject.snapshot_query("block", blockTypeName, resuri, snapshot))

        parms = {}

        parms["volume_changes"] = self._remove_list(volume_snapshots)
        o = self.send_json_request(exportgroup_uri, parms)
        return self.check_for_sync(o, sync)

        # initator
        """
Exemplo n.º 2
0
    def _initialize_netcdf(self):
        """
        Initialize the netCDF file for storage.
        
        """

        # Open NetCDF file for writing
        ncfile = netcdf.Dataset(self.fn_storage, "w")  # for netCDF4

        # Store netcdf file handle.
        self.ncfile = ncfile

        # Set global attributes.
        setattr(ncfile, "title", "Multi-State-Transition-Interface-Sampling")
        setattr(ncfile, "application", "Host-Guest-System")
        setattr(ncfile, "program", "run.py")
        setattr(ncfile, "programVersion", __version__)
        setattr(ncfile, "Conventions", "Multi-State Transition Interface TPS")
        setattr(ncfile, "ConventionVersion", "0.1")

        # initialize arrays used for snapshots
        Snapshot._init_netcdf(self)

        # initialize arrays used for trajectories
        Trajectory._init_netcdf(self)

        # Force sync to disk to avoid data loss.
        ncfile.sync()

        return
Exemplo n.º 3
0
 def test_snapshot_insert(self):
     record = ['idhere', 1, '*']
     s = Snapshot(record)
     record = ['another id', 2, '*']
     s.insert(record)
     self.assertEqual(s.acceptor.record_list[1], record)
     self.assertEqual(s.blocks.last_node, s.alives_entries[record[0]])
Exemplo n.º 4
0
    def _get_resource_lun_tuple(self, resources, resType, baseResUri, tenantname, projectname, blockTypeName):
        copyEntries = []
        snapshotObject = Snapshot(self.__ipAddr, self.__port)
        volumeObject = Volume(self.__ipAddr, self.__port)
        for copy in resources:
            copyParam = []
            try:
                copyParam = copy.split(":")
            except Exception as e:
                raise SOSError(
                    SOSError.CMD_LINE_ERR, "Please provide valid format volume: lun for parameter " + resType
                )
            copy = dict()
            if not len(copyParam):
                raise SOSError(SOSError.CMD_LINE_ERR, "Please provide atleast volume for parameter " + resType)
            if resType == "volumes":
                fullvolname = tenantname + "/" + projectname + "/"
                fullvolname += copyParam[0]
                copy["id"] = volumeObject.volume_query(fullvolname)
            if resType == "snapshots":
                copy["id"] = snapshotObject.snapshot_query("block", blockTypeName, baseResUri, copyParam[0])
            if len(copyParam) > 1:
                copy["lun"] = copyParam[1]
            copyEntries.append(copy)
        return copyEntries

        """
 def __init__(self, fpga, comb, f_start, f_stop, logger=logging.getLogger(__name__)):
     """ f_start and f_stop must be in Hz
     """
     self.logger = logger
     snap_name = "snap_{a}x{b}".format(a=comb[0], b=comb[1])
     self.snapshot0 = Snapshot(fpga,
                              "{name}_0".format(name = snap_name),
                              dtype='>i8',
                              cvalue=True,
                              logger=self.logger.getChild("{name}_0".format(name = snap_name)))
     self.snapshot1 = Snapshot(fpga,
                              "{name}_1".format(name = snap_name),
                              dtype='>i8',
                              cvalue=True,
                              logger=self.logger.getChild("{name}_1".format(name = snap_name)))
     self.f_start = np.uint64(f_start)
     self.f_stop = np.uint64(f_stop)
     # this will change from None to an array of phase offsets for each frequency bin 
     # if calibration gets applied at a later stage.
     # this is an array of phases introduced by the system. So if a value is positive, 
     # it means that the system is introducing a phase shift between comb[0] and comb[1]
     # in other words comb1 is artificially delayed. 
     self.calibration_phase_offsets = None
     self.calibration_cable_length_offsets = None
     self.arm()
     self.fetch_signal()
     self.frequency_bins = np.linspace(
         start = self.f_start,
         stop = self.f_stop,
         num = len(self.signal),
         endpoint = False)
Exemplo n.º 6
0
 def make_snapshot(self):
     # self.st = SnapshotThread()
     # self.st.start()
     s = Snapshot()
     commit = s.compare_to(self.snapshots_list[-1])
     self.commits_list.append(commit)
     self.commitsModel.setStringList(self.commitsModel.stringList() + [commit.name])
Exemplo n.º 7
0
    def create_snapshot(self, file):

        if not file:
            self.logger.info("Please enter a valid filename.")
            return None

        s = Snapshot(file)
        s.create()
Exemplo n.º 8
0
    def restore_snapshot(self, file, version=None):

        if not file:
            self.logger.info("Please enter a valid filename.")
            return None

        s = Snapshot(file)
        s.restore(version)
Exemplo n.º 9
0
	def onSnapshot(self, evt):
		s = Snapshot(self, self.settings)
		if s.wasSuccessful():
			s.Show()
		else:
			dlg = wx.MessageDialog(self, "Error Taking Picture",
					'Camera Error', wx.OK | wx.ICON_ERROR)
			dlg.ShowModal()
			dlg.Destroy()
Exemplo n.º 10
0
    def snapshot(self, options):
        gp = GitParser()
        sn = Snapshot()

        image_path = sn.snapshot()
        im = ImageManipulator(image_path)
        im.add_text(gp.get_message('-1'), ImageManipulator.POSITION_BOTTOMLEFT,
                self.font_file, self.font_size)
        im.add_text(gp.get_hash('-1')[:10], ImageManipulator.POSITION_TOPRIGHT,
                self.font_file, self.font_size)
        im.save(self._get_snapshot_destination(options.destination))
Exemplo n.º 11
0
    def __init__(self,
                 ip_addr='localhost',
                 num_channels=4,
                 fs=800e6,
                 logger=logging.getLogger(__name__)):
        """The interface to a ROACH cross correlator

        Keyword arguments:
        ip_addr -- IP address (or hostname) of the ROACH. (default: localhost)
        num_channels -- antennas in the correlator. (default: 4)
        fs -- sample frequency of antennas. (default 800e6; 800 MHz)
        logger -- logger to use. (default: new default logger)
        """
        self.logger = logger
        self.fpga = corr.katcp_wrapper.FpgaClient(ip_addr)
        time.sleep(0.1)
        self.num_channels = num_channels
        self.fs = np.float64(fs)
        self.cross_combinations = list(
            itertools.combinations(
                range(num_channels),
                2))  # [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]
        self.control_register = ControlRegister(
            self.fpga, self.logger.getChild('control_reg'))
        self.set_accumulation_len(100)
        self.re_sync()
        self.control_register.allow_trigger(
        )  # necessary as Correlations auto fetch signal
        # only 0x0 has been implemented
        #self.auto_combinations = [(x, x) for x in range(num_channels)] # [(0, 0), (1, 1), (2, 2), (3, 3)]
        self.auto_combinations = [(0, 0)]
        self.frequency_correlations = {}
        for comb in (self.cross_combinations + self.auto_combinations):
            self.frequency_correlations[comb] = Correlation(
                fpga=self.fpga,
                comb=comb,
                f_start=0,
                f_stop=fs / 2,
                logger=self.logger.getChild("{a}x{b}".format(a=comb[0],
                                                             b=comb[1])))
        self.time_domain_snap = Snapshot(
            fpga=self.fpga,
            name='dram_snapshot',
            dtype=np.int8,
            cvalue=False,
            logger=self.logger.getChild('time_domain_snap'))
        self.upsample_factor = 100
        self.subsignal_length_max = 2**17
        self.time_domain_padding = 100
        self.time_domain_calibration_values = None
        self.time_domain_calibration_cable_values = None
        self.control_register.block_trigger()
Exemplo n.º 12
0
    def snapshot(self, options):
        self._load_repository_settings()
        gp = GitParser()
        sn = Snapshot(int(options.delay), options.device,
                      int(options.skip_frames))

        image_path = sn.snapshot()
        im = ImageManipulator(image_path)
        im.resize(options.image_size)
        im.add_text(gp.get_message('-1'), ImageManipulator.POSITION_BOTTOMLEFT,
                self.font_file, self.font_size)
        im.add_text(gp.get_hash('-1')[:10], ImageManipulator.POSITION_TOPRIGHT,
                self.font_file, self.font_size)
        im.save(self._get_snapshot_destination(options.destination))
Exemplo n.º 13
0
    def tabulate_snapshots(self, file):

        if not file:
            self.logger.info("Please enter a valid filename.")
            return None

        s = Snapshot(file)

        table = s.tabulate()

        if table:
            self.logger.info("Snapshot Information for file: {}".format(file))
            self.logger.info(s.tabulate())
        else:
            self.logger.info("No snapshot Information found for file: {}".format(file))
Exemplo n.º 14
0
def exportgroup_list(args):
    obj = ExportGroup(args.ip, args.port)
    try:
        uris = obj.exportgroup_list(args.project, args.tenant)
        output = []
        if len(uris) > 0:
            for uri in uris:
                eg = obj.exportgroup_show(uri, args.project, args.tenant)
                # The following code is to get volume/snapshot name part of
                # export group list.
                if eg:
                    if "project" in eg and "name" in eg["project"]:
                        del eg["project"]["name"]
                    volumeuris = common.get_node_value(eg, "volumes")
                    volobj = Volume(args.ip, args.port)
                    snapobj = Snapshot(args.ip, args.port)
                    volnames = []
                    strvol = ""
                    for volumeuri in volumeuris:
                        strvol = str(volumeuri["id"])
                        if strvol.find("urn:storageos:Volume") >= 0:
                            vol = volobj.show_by_uri(strvol)
                            if vol:
                                volnames.append(vol["name"])
                        elif strvol.find("urn:storageos:BlockSnapshot") >= 0:
                            snapshot = snapobj.snapshot_show_uri("block", None, strvol)
                            if snapshot:
                                volnames.append(snapshot["name"])
                    eg["volumes_snapshots"] = volnames
                    output.append(eg)

            if args.verbose:
                return common.format_json_object(output)
            if len(output) > 0:
                if args.long:
                    from common import TableGenerator

                    TableGenerator(
                        output, ["module/name", "volumes_snapshots", "initiator_node", "initiator_port", "tags"]
                    ).printTable()

                else:
                    from common import TableGenerator

                    TableGenerator(output, ["module/name"]).printTable()

    except SOSError as e:
        raise common.format_err_msg_and_raise("list", "exportgroup", e.err_text, e.err_code)
Exemplo n.º 15
0
 def test_snapshot_init(self):
     record = ['idhere', 1, '*']
     s = Snapshot(record)
     self.assertIsInstance(s, Snapshot)
     self.assertIsInstance(s.blocks, Linked_list)
     self.assertIsInstance(s.blocks.last_node.block, block)
     self.assertIsInstance(s.acceptor, block)
Exemplo n.º 16
0
    def getSnapshots(self, sessionId):
        """
        Get a list of Snapshot objects.

        Parameters
        ----------
        sessionId: string
            An identifier for a user session.

        Returns
        -------
        list
            A list of Snapshot objects.
        """
        snapshots = self.con.cmdTagList("getSnapshotObjects",
                                        sessionId=sessionId)
        snapshotObjects = []
        if (snapshots[0] != ""):
            for snapshot in snapshots:
                j = json.loads(snapshot)
                snapObj = Snapshot(sessionId, j['type'], j['index'],
                                   j['Snapshot'], j['layout'],
                                   j['preferences'], j['data'],
                                   j['description'], j['dateCreated'],
                                   self.con)
                snapshotObjects.append(snapObj)
        return snapshotObjects
Exemplo n.º 17
0
    def _test_create(my):
        search = Search("unittest/person")
        persons = search.get_sobjects()
        person = persons[0]

        snapshot_type = "file"
        snapshot = Snapshot.create(person,
                                   context="publish",
                                   snapshot_type=snapshot_type)

        version = snapshot.get_value("version")
        my.assertEquals(1, version)

        search_type = snapshot.get_value("search_type")
        my.assertEquals(search_type, person.get_search_type())
        search_code = snapshot.get_value("search_code")
        my.assertEquals(search_code, person.get_value("code"))

        # also check search_id
        if SearchType.column_exists("sthpw/snapshot", "search_id"):
            search_code = snapshot.get_value("search_id")
            my.assertEquals(search_code, person.get_value("id"))

        test_person = snapshot.get_sobject()
        my.assertEquals(test_person.get_code(), person.get_code())
Exemplo n.º 18
0
    def get_web_dir(my, snapshot=None):
        """go through the stored snapshot_code to get the actual path"""
        code = my.get_value("snapshot_code")
        from snapshot import Snapshot

        snapshot = Snapshot.get_by_code(code)
        return snapshot.get_web_dir()
Exemplo n.º 19
0
def empty_snapshot_from_openmm_topology(topology, simple_topology=False):
    """
    Return an empty snapshot from an openmm.Topology object

    Velocities will be set to zero.

    Parameters
    ----------
    topology : openmm.Topology
        the topology representing the structure and number of atoms
    simple_topology : bool
        if `True` only a simple topology with n_atoms will be created.
        This cannot be used with complex CVs but loads and stores very fast

    Returns
    -------
    openpathsampling.engines.Snapshot
        the complete snapshot with zero coordinates and velocities

    """
    n_atoms = topology.n_atoms

    if simple_topology:
        topology = Topology(n_atoms, 3)
    else:
        topology = MDTrajTopology(md.Topology.from_openmm(topology))

    snapshot = Snapshot.construct(
        coordinates=u.Quantity(np.zeros((n_atoms, 3)), u.nanometers),
        box_vectors=u.Quantity(topology.setUnitCellDimensions(), u.nanometers),
        velocities=u.Quantity(np.zeros((n_atoms, 3)),
                              u.nanometers / u.picoseconds),
        engine=TopologyEngine(topology))

    return snapshot
Exemplo n.º 20
0
    def get_parent_dir(my, search_type=None, context=None, sobject=None):
        from project import Project

        if not sobject:
            sobject = my.sobject
        if search_type:
            parent = sobject.get_parent(search_type)
        else:
            search_type = sobject.get_value("search_type")
            search_id = sobject.get_value("search_id")
            parent = Search.get_by_id(search_type, search_id)

        if not parent:
            raise TacticException("No parent exists for '%s', possibly a result of Shot rename or removal." % sobject.get_code())

        # just use the latest of the context desired
        if context:
            search_id = parent.get_id()
            search_type = parent.get_search_type()
            snapshot = Snapshot.get_latest(search_type, search_id, context)
        else:
            # basically this means that without a parent context, there is
            # no way to know the directory this is in.
            snapshot = None
        dirs = Project._get_dir( my.protocol,parent,snapshot,None )
        dirs = dirs.split("/")
        return dirs
Exemplo n.º 21
0
def snapshot_from_pdb(pdb_file, simple_topology=False):
    """
    Construct a Snapshot from the first frame in a pdb file without velocities

    Parameters
    ----------
    pdb_file : str
        The filename of the .pdb file to be used
    simple_topology : bool
        if `True` only a simple topology with n_atoms will be created.
        This cannot be used with complex CVs but loads and stores very fast

    Returns
    -------
    :class:`openpathsampling.engines.Snapshot`
        the constructed Snapshot

    """
    pdb = md.load(pdb_file)
    velocities = np.zeros(pdb.xyz[0].shape)

    if simple_topology:
        topology = Topology(*pdb.xyz[0].shape)
    else:
        topology = MDTrajTopology(pdb.topology)

    snapshot = Snapshot.construct(
        coordinates=u.Quantity(pdb.xyz[0], u.nanometers),
        box_vectors=u.Quantity(pdb.unitcell_vectors[0], u.nanometers),
        velocities=u.Quantity(velocities, u.nanometers / u.picoseconds),
        engine=FileEngine(topology, pdb_file))

    return snapshot
Exemplo n.º 22
0
    def get_parent_dir(self, search_type=None, context=None, sobject=None):
        from project import Project

        if not sobject:
            sobject = self.sobject
        if search_type:
            parent = sobject.get_parent(search_type)
        else:
            search_type = sobject.get_value("search_type")
            search_id = sobject.get_value("search_id")
            parent = Search.get_by_id(search_type, search_id)

        if not parent:
            raise TacticException("No parent exists for '%s', possibly a result of Shot rename or removal." % sobject.get_code())

        # just use the latest of the context desired
        if context:
            search_id = parent.get_id()
            search_type = parent.get_search_type()
            snapshot = Snapshot.get_latest(search_type, search_id, context)
        else:
            # basically this means that without a parent context, there is
            # no way to know the directory this is in.
            snapshot = None
        dirs = Project._get_dir( self.protocol,parent,snapshot,None )
        dirs = dirs.split("/")
        return dirs
Exemplo n.º 23
0
def snapshot_from_pdb(pdb_file, simple_topology=False):
    """
    Construct a Snapshot from the first frame in a pdb file without velocities

    Parameters
    ----------
    pdb_file : str
        The filename of the .pdb file to be used
    simple_topology : bool
        if `True` only a simple topology with n_atoms will be created.
        This cannot be used with complex CVs but loads and stores very fast

    Returns
    -------
    :class:`openpathsampling.engines.Snapshot`
        the constructed Snapshot

    """
    pdb = md.load(pdb_file)
    velocities = np.zeros(pdb.xyz[0].shape)

    if simple_topology:
        topology = Topology(*pdb.xyz[0].shape)
    else:
        topology = MDTrajTopology(pdb.topology)

    snapshot = Snapshot.construct(
        coordinates=u.Quantity(pdb.xyz[0], u.nanometers),
        box_vectors=u.Quantity(pdb.unitcell_vectors[0], u.nanometers),
        velocities=u.Quantity(velocities, u.nanometers / u.picoseconds),
        engine=FileEngine(topology, pdb_file)
    )

    return snapshot
Exemplo n.º 24
0
	def __init__(self, sdir, snap_lims, cosmological=0, periodic_bound_fix=False, dust_depl=False, statistic='average'):
		self.sdir = sdir
		self.stat = statistic
		self.snap_lims = snap_lims
		self.num_snaps = (snap_lims[1]+1)-snap_lims[0]
		self.cosmological = cosmological
		self.time = np.zeros(self.num_snaps)
		if self.cosmological:
			self.redshift = np.zeros(self.num_snaps)


		self.Flag_DustDepl = dust_depl
		# In case the sim was non-cosmological and used periodic BC which causes
		# galaxy to be split between the 4 corners of the box
		self.pb_fix = False
		if periodic_bound_fix and cosmological==0:
			self.pb_fix=True
		# Determines if you want to look at the Snapshot/Halo/Disk
		self.setHalo=False
		self.setDisk=False


		# Load the first snapshot to check needed array sizes
		sp = Snapshot(self.sdir, self.snap_lims[0], cosmological=self.cosmological, periodic_bound_fix=self.pb_fix)
		self.dust_impl = sp.dust_impl
		self.m = np.zeros(self.num_snaps)
		self.z = np.zeros(self.num_snaps)
		self.dz = np.zeros(self.num_snaps)
		self.spec = np.zeros([self.num_snaps, sp.Flag_DustSpecies])
		self.source = np.zeros([self.num_snaps, 4])

		return
Exemplo n.º 25
0
    def _test_naming_util(my):
       
        #my.clear_naming()
        naming_util = NamingUtil()
        # these should evaluate to be the same
        file_naming_expr1 = ['{$PROJECT}__{context[0]}__hi_{$BASEFILE}.{$EXT}','{project.code}__{context[0]}__hi_{basefile}.{ext}']
        dir_naming_expr2 = ['{$PROJECT}/{context[1]}/somedir/{@GET(.name_first)}','{project.code}/{snapshot.context[1]}/somedir/{sobject.name_first}']

        process= 'light'
        context = 'light/special'
        type = 'ma'
        version = 2

        virtual_snapshot = Snapshot.create_new()
        virtual_snapshot_xml = '<snapshot process=\'%s\'><file type=\'%s\'/></snapshot>' % (process, type)
        virtual_snapshot.set_value("snapshot", virtual_snapshot_xml)
        virtual_snapshot.set_value("process", process)
        virtual_snapshot.set_value("context", context)
        virtual_snapshot.set_value("snapshot_type", 'file')

        virtual_snapshot.set_sobject(my.person)
        virtual_snapshot.set_value("version", version)

        file_name = "abc.txt"
        file_obj = File(File.SEARCH_TYPE)
        file_obj.set_value("file_name", file_name)
        
        for naming_expr in file_naming_expr1:
            file_name = naming_util.naming_to_file(naming_expr, my.person, virtual_snapshot, file=file_obj, file_type="main")
            my.assertEquals(file_name,'unittest__light__hi_abc.txt')

        for naming_expr in dir_naming_expr2:
            dir_name = naming_util.naming_to_dir(naming_expr, my.person, virtual_snapshot, file=file_obj, file_type="main")
            my.assertEquals(dir_name,'unittest/special/somedir/Philip')
Exemplo n.º 26
0
def trajectory_from_mdtraj(mdtrajectory, simple_topology=False):
    """
    Construct a Trajectory object from an mdtraj.Trajectory object

    Parameters
    ----------
    mdtrajectory : mdtraj.Trajectory
        Input mdtraj.Trajectory
    simple_topology : bool
        if `True` only a simple topology with n_atoms will be created.
        This cannot be used with complex CVs but loads and stores very fast

    Returns
    -------
    openpathsampling.engines.Trajectory
        the constructed Trajectory instance
    """

    trajectory = Trajectory()
    empty_kinetics = Snapshot.KineticContainer(velocities=u.Quantity(
        np.zeros(mdtrajectory.xyz[0].shape), u.nanometer / u.picosecond))
    if simple_topology:
        topology = Topology(*mdtrajectory.xyz[0].shape)
    else:
        topology = MDTrajTopology(mdtrajectory.topology)

    engine = TopologyEngine(topology)

    for frame_num in range(len(mdtrajectory)):
        # mdtraj trajectories only have coordinates and box_vectors
        coord = u.Quantity(mdtrajectory.xyz[frame_num], u.nanometers)
        if mdtrajectory.unitcell_vectors is not None:
            box_v = u.Quantity(mdtrajectory.unitcell_vectors[frame_num],
                               u.nanometers)
        else:
            box_v = None

        statics = Snapshot.StaticContainer(coordinates=coord,
                                           box_vectors=box_v)

        snap = Snapshot(statics=statics,
                        kinetics=empty_kinetics,
                        engine=engine)
        trajectory.append(snap)

    return trajectory
Exemplo n.º 27
0
    def _restore_netcdf(self):
        """
        Restore the storage from the netCDF file
        """
        # Open NetCDF file for appending
        ncfile = netcdf.Dataset(self.fn_storage, "a")

        # Store netcdf file handle.
        self.ncfile = ncfile

        # initialize arrays used for snapshots
        Snapshot._restore_netcdf(self)

        # initialize arrays used for trajectories
        Trajectory._restore_netcdf(self)

        return
Exemplo n.º 28
0
	def load_total(self):
		for i, snum in enumerate(np.arange(self.snap_lims[0],self.snap_lims[1]+1)):
			sp = Snapshot(self.sdir, snum, cosmological=self.cosmological, periodic_bound_fix=self.pb_fix)
			self.time[i] = sp.time
			if self.cosmological: self.redshift[i] = sp.redshift
			if self.setHalo:
				gal = sp.loadhalo(**self.kwargs)
			if self.setDisk:
				gal = sp.loaddisk(**self.kwargs)
			gas = gal.loadpart(0)

			self.z[i] = np.nansum(gas.z[:,0]*gas.m)/np.nansum(gas.m)
			self.dz[i] = np.nansum(gas.dz[:,0]*gas.m)/np.nansum(gas.z[:,0]*gas.m)
			self.spec[i] = np.nansum(gas.spec*gas.m[:,np.newaxis], axis=0)/np.nansum(gas.dz[:,0]*gas.m)
			self.source[i] = np.nansum(gas.dzs*gas.dz[:,0]*gas.m[:,np.newaxis], axis=0)/np.nansum(gas.dz[:,0]*gas.m)

		return
Exemplo n.º 29
0
Arquivo: idxd.py Projeto: kats/idx
def run():
    log.info('start')
    print 'Start serving idxd...'
    idx = PfrIndex(config.IDX_FILENAME)
    snapshot_manager = Snapshot()
    if not idx.validate() and \
            not snapshot_manager.restore(PfrIndex) and \
            not idx.create():
        print "Please, close all connections to DB and try again."
        return -1

    Events = namedtuple("Events", "stop endupdate")
    events = Events(Event(), Event())
    __serve_forever( \
            __start_search_daemon(config.IDX_WEBSERVER_PORT, events), \
            __start_update_daemon((config.KANSO_FILENAME, config.KANSO_FILENAME2), events), \
            events)
Exemplo n.º 30
0
def observations_from_log(observables, log_filename):
    snapshots = [Snapshot.from_data_dict(json.loads(line)['snapshot']) \
                 for line in file(log_filename).readlines()]

    snapshot_log = sorted(snapshots, key=lambda snapshot: snapshot.timestamp)
    observations = compute_observations(observables,
                                        flatten_by_timestamp(snapshot_log))

    return observations
Exemplo n.º 31
0
    def __init__(self, verification: Instruction = None):
        self.enable = Signal(reset=1)
        self.addr = Signal(16)
        self.din = Signal(8)
        self.dout = Signal(8)
        self.RWB = Signal(reset=1)  # 1 = read, 0 = write

        # registers
        self.reg = Registers()
        self.tmp = Signal(8)  # temp signal when reading 16 bits

        # internal exec state
        self.opcode = Signal(8)
        self.cycle = Signal(4, reset=1)

        # formal verification
        self.verification = verification
        self.snapshot = Snapshot()
def exportgroup_list(args):
    obj = ExportGroup(args.ip, args.port)
    try:
        uris = obj.exportgroup_list(args.project, args.tenant)
        output = []
        if(len(uris) > 0):
            for uri in uris:
                eg = obj.exportgroup_show(uri, args.project, args.tenant)
                # The following code is to get volume/snapshot name part of export group list.
                if(eg):
                    if("project" in eg and "name" in eg["project"]):
                        del eg["project"]["name"]
                    volumeuris = common.get_node_value(eg, "volumes")
                    volobj = Volume(args.ip, args.port)
                    snapobj = Snapshot(args.ip, args.port)
                    volnames = []
                    strvol = ""
                    for volumeuri in volumeuris:
                        strvol = str(volumeuri['id'])
                        if(strvol.find('urn:storageos:Volume') >= 0):
                            vol = volobj.show_by_uri(strvol)
                            if(vol):
                                volnames.append(vol['name'])
                        elif(strvol.find('urn:storageos:BlockSnapshot')>= 0):
                            snapshot = snapobj.snapshot_show_uri('block', None, strvol)
                            if(snapshot):
                                volnames.append(snapshot['name'])
                    eg['volumes_snapshots']=volnames
                    output.append(eg)
            
	    if(args.verbose == True):
                return common.format_json_object(output)
            if(len(output) > 0):
                if(args.long == True):
                    from common import TableGenerator
                    TableGenerator(output, ['name', 'volumes_snapshots','initiator_node', 'initiator_port']).printTable()

                else:
                    from common import TableGenerator
                    TableGenerator(output, ['name']).printTable()

    except SOSError as e:
        raise common.format_err_msg_and_raise("list", "exportgroup", e.err_text, e.err_code)
Exemplo n.º 33
0
	def load_average(self):
		for i, snum in enumerate(np.arange(self.snap_lims[0],self.snap_lims[1]+1)):
			sp = Snapshot(self.sdir, snum, cosmological=self.cosmological, periodic_bound_fix=self.pb_fix)
			self.time[i] = sp.time
			if self.cosmological: self.redshift[i] = sp.redshift
			if self.setHalo:
				gal = sp.loadhalo(**self.kwargs)
			if self.setDisk:
				gal = sp.loaddisk(**self.kwargs)
			gas = gal.loadpart(0)

			self.z[i] = weighted_percentile(gas.z[:,0], percentiles=[50], weights=gas.m, ingore_invalid=True)
			self.dz[i] = weighted_percentile(gas.dz[:,0]/gas.z[:,0], percentiles=[50], weights=gas.m, ingore_invalid=True)
			for j in range(sp.Flag_DustSpecies):
				self.spec[i,j] = weighted_percentile(gas.spec[:,j]/gas.dz[:,0], percentiles=[50], weights=gas.m, ingore_invalid=True)
			for j in range(4):
				self.source[i,j] = weighted_percentile(gas.dzs[:,j], percentiles=[50], weights=gas.m, ingore_invalid=True)

		return
Exemplo n.º 34
0
    def get_balance(self, args):
        if not args.markets:
            logging.error("You must use --markets argument to specify markets")
            sys.exit(2)
        pmarkets = args.markets.split(",")
        pmarketsi = []
        for pmarket in pmarkets:
            exec('import brokers.' + pmarket.lower())
            market = eval('brokers.' + pmarket.lower() + '.Broker' + pmarket +
                          '()')
            pmarketsi.append(market)

        snapshot = Snapshot()

        while True:
            total_btc = 0.
            total_bch = 0.
            for market in pmarketsi:
                market.get_balances()
                print(market)
                total_btc += market.btc_balance
                total_bch += market.bch_balance
                snapshot.snapshot_balance(market.name[7:], market.btc_balance,
                                          market.bch_balance)

            snapshot.snapshot_balance('ALL', total_btc, total_bch)

            time.sleep(60 * 10)
Exemplo n.º 35
0
 def __init__(self,
              fpga,
              comb,
              f_start,
              f_stop,
              logger=logging.getLogger(__name__)):
     """ f_start and f_stop must be in Hz
     """
     self.logger = logger
     snap_name = "snap_{a}x{b}".format(a=comb[0], b=comb[1])
     self.snapshot0 = Snapshot(fpga,
                               "{name}_0".format(name=snap_name),
                               dtype='>i8',
                               cvalue=True,
                               logger=self.logger.getChild(
                                   "{name}_0".format(name=snap_name)))
     self.snapshot1 = Snapshot(fpga,
                               "{name}_1".format(name=snap_name),
                               dtype='>i8',
                               cvalue=True,
                               logger=self.logger.getChild(
                                   "{name}_1".format(name=snap_name)))
     self.f_start = np.uint64(f_start)
     self.f_stop = np.uint64(f_stop)
     # this will change from None to an array of phase offsets for each frequency bin
     # if calibration gets applied at a later stage.
     # this is an array of phases introduced by the system. So if a value is positive,
     # it means that the system is introducing a phase shift between comb[0] and comb[1]
     # in other words comb1 is artificially delayed.
     self.calibration_phase_offsets = None
     self.calibration_cable_length_offsets = None
     self.arm()
     self.fetch_signal()
     self.frequency_bins = np.linspace(start=self.f_start,
                                       stop=self.f_stop,
                                       num=len(self.signal),
                                       endpoint=False)
Exemplo n.º 36
0
def flatten_by_timestamp(snapshots_log):
    by_timestamp = collections.defaultdict(dict)

    for snapshot in snapshots_log:
        timestamp = snapshot.timestamp
        measurements = snapshot.measurements

        for measurement_name, measurement_result in measurements.iteritems():
            by_timestamp[timestamp][measurement_name] = measurement_result

    timestamps_in_order = sorted(by_timestamp.keys())
    return [
        Snapshot(timestamp, by_timestamp[timestamp])
        for timestamp in timestamps_in_order
    ]
Exemplo n.º 37
0
    def _build_current_snapshot(self):
        # TODO: Add caching for this and mark if changed

        state = self.simulation.context.getState(getPositions=True,
                                                 getVelocities=True,
                                                 getEnergy=True)

        snapshot = Snapshot.construct(
            coordinates=state.getPositions(asNumpy=True),
            box_vectors=state.getPeriodicBoxVectors(asNumpy=True),
            velocities=state.getVelocities(asNumpy=True),
            engine=self
        )

        return snapshot
Exemplo n.º 38
0
    def __init__(self):
        self._benchmark = None
        self._benchmark_history = None
        self._benchmark_return = None
        self._portfolio_history = None
        self._portfolio_return = None
        self._trading_days = None

        self._positions = Positions()
        self._auto_fill_positions()
        self._load_benchmark()
        if not os.path.isfile(HISTORY_IMG_FILE):
            self._draw_history_timeline()

        self._snapshot = Snapshot(self._benchmark, self._positions.get_current_position())
    def init_vipr_cli_components(self):
        import common as vipr_utils
        vipr_utils.COOKIE = None

        from exportgroup import ExportGroup
        from host import Host
        from hostinitiators import HostInitiator
        from snapshot import Snapshot
        from virtualarray import VirtualArray
        from volume import Volume

        # instantiate a few vipr cli objects for later use
        self.volume_obj = Volume(self.configuration.vipr_hostname,
                                 self.configuration.vipr_port)
        self.exportgroup_obj = ExportGroup(self.configuration.vipr_hostname,
                                           self.configuration.vipr_port)
        self.host_obj = Host(self.configuration.vipr_hostname,
                             self.configuration.vipr_port)
        self.hostinitiator_obj = HostInitiator(
            self.configuration.vipr_hostname, self.configuration.vipr_port)
        self.varray_obj = VirtualArray(self.configuration.vipr_hostname,
                                       self.configuration.vipr_port)
        self.snapshot_obj = Snapshot(self.configuration.vipr_hostname,
                                     self.configuration.vipr_port)
Exemplo n.º 40
0
def process_files(fnames):
    for fname in fnames:
        snap = Snapshot.read_snapshot(fname)

        count = snap.get_bin_statistic("n")
        percent = snap.get_species_statistic("percentage")
        count[count == 0] = 1.0
        energy_scale = 6.24150974e18 / count

        make_histogram(snap, "temperature")
        make_histogram(snap, "max_temperature")
        make_histogram(snap, "ionization")
        make_histogram(snap, "kinetic_energy", scalar=energy_scale)
        make_histogram(snap,
                       "percentage",
                       scalar=snap.get_bin_statistic("density"))
Exemplo n.º 41
0
    def _snapshotList(self, snapshots, location):

        if not snapshots:
            return []

        snap_list = []

        for snapshot in snapshots:
            location = location + '/'
            snap = Snapshot(snapshot, location)

            snap_list.append(snap)
            snap_list = snap_list + self._snapshotList(
                snapshot.childSnapshotList, location + snapshot.name)

        return snap_list
Exemplo n.º 42
0
def main():
    fname = 'snapshot_test.txt'
    print('Start reading and indexing file...')
    start = time.time()
    with open(fname, 'r') as f:
        line = f.readline().split()
        Sindex = Snapshot([line[1], int(line[0]), '*'],
                          cap=CAPACITY,
                          ut=UTILIZE)
        for l in f:
            line = l.split()
            op = line[2]
            if (op == 'b'):
                Sindex.insert([line[1], int(line[0]), '*'],
                              cap=CAPACITY,
                              ut=UTILIZE)
            elif (op == 'd'):
                Sindex.delete(line[1], int(line[0]))
    duration = time.time() - start
    print('')
    print('Index done in: {0:.2f}s '.format(duration))
    print('')
    print('Enter the query type (ts or tr) and time')
    print('timeslice query format: ts time_instance')
    print('timerrange query format: tr min_time max_time')
    print('Enter anything else to quit')
    print('')
    for line in sys.stdin:
        qtype, *t = line.split()
        if (qtype == 'ts'):
            start = time.time()
            result = Sindex.tsquery(int(t[0]))
            duration = time.time() - start
        elif (qtype == 'tr'):
            start = time.time()
            result = Sindex.trquery(int(t[0]), int(t[1]))
            duration = time.time() - start
        else:
            quit()
        print('result: ', result)
        print('')
        print('Get time result in {0:.5f}s'.format(duration))
Exemplo n.º 43
0
async def main():
    snapshot = Snapshot()

    web.start_http_server_in_thread(port=8000)
    while True:
        print('Getting snapshot...')
        await snapshot.update()
        dBFS = scan()
        VU_METER.set(dBFS)
        an = analysis()
        misc_bitrate = 0
        for pid, data in an['pids'].items():
            if pid in [564, 768]:
                continue
            misc_bitrate += int(data['bitrate'])
        MISC_BITRATE.set(misc_bitrate)
        VIDEO_BITRATE.set(an['pids'][564]['bitrate'])
        AUDIO_BITRATE.set(an['pids'][768]['bitrate'])
        await asyncio.sleep(5)
Exemplo n.º 44
0
    def newSnapshot(self, sessionId, saveName, saveLayout, savePreferences,
                    saveData, description):
        """
        Create a new Snapshot object that can be saved.

        Parameters
        ----------
        sessionId: string
            An identifier for a user session.

        saveName: string
            The name of the snapshot.

        saveLayout: boolean
            True if the layout is to be saved; false otherwise.

        savePreferences: boolean
            True if the preferences are to be saved; false otherwise.

        saveData: boolean
            True if the data is to be saved; false otherwise.

        description: string
            Descriptive information about the snapshot.

        Returns
        -------
        Snapshot
            A new instance of the Snapshot class.
        """
        snapshot = Snapshot(sessionId=sessionId,
                            snapshotType='',
                            index=0,
                            saveName=saveName,
                            layout=saveLayout,
                            preferences=savePreferences,
                            data=saveData,
                            description=description,
                            dateCreated='',
                            connection=self.con)
        return snapshot
Exemplo n.º 45
0
def snapshot_from_testsystem(testsystem, simple_topology=False):
    """
    Construct a Snapshot from openmm topology and state objects

    Parameters
    ----------
    testsystem : openmmtools.Topology
        The filename of the .pdb file to be used
    simple_topology : bool
        if `True` only a simple topology with n_atoms will be created.
        This cannot be used with complex CVs but loads and stores very fast

    Returns
    -------
    :class:`openpathsampling.engines.Snapshot`
        the constructed Snapshot

    """

    velocities = u.Quantity(
        np.zeros(testsystem.positions.shape), u.nanometers / u.picoseconds)

    if simple_topology:
        topology = Topology(*testsystem.positions.shape)
    else:
        topology = MDTrajTopology(md.Topology.from_openmm(testsystem.topology))

    box_vectors = \
        np.array([
            v / u.nanometers for v in
            testsystem.system.getDefaultPeriodicBoxVectors()]) * u.nanometers

    snapshot = Snapshot.construct(
        coordinates=testsystem.positions,
        box_vectors=box_vectors,
        velocities=velocities,
        engine=OpenMMToolsTestsystemEngine(topology, testsystem.name)
    )

    return snapshot
Exemplo n.º 46
0
    def __init__(self, ip_addr='localhost', num_channels=4, fs=800e6, logger=logging.getLogger(__name__)):
        """The interface to a ROACH cross correlator

        Keyword arguments:
        ip_addr -- IP address (or hostname) of the ROACH. (default: localhost)
        num_channels -- antennas in the correlator. (default: 4)
        fs -- sample frequency of antennas. (default 800e6; 800 MHz)
        logger -- logger to use. (default: new default logger)
        """
        self.logger = logger
        self.fpga = corr.katcp_wrapper.FpgaClient(ip_addr)
        time.sleep(0.1)
        self.num_channels = num_channels
        self.fs = np.float64(fs)
        self.cross_combinations = list(itertools.combinations(range(num_channels), 2))  # [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]
        self.control_register = ControlRegister(self.fpga, self.logger.getChild('control_reg'))
        self.set_accumulation_len(100)
        self.re_sync()
        self.control_register.allow_trigger() # necessary as Correlations auto fetch signal
        # only 0x0 has been implemented
        #self.auto_combinations = [(x, x) for x in range(num_channels)] # [(0, 0), (1, 1), (2, 2), (3, 3)]
        self.auto_combinations = [(0, 0)]
        self.frequency_correlations = {}
        for comb in (self.cross_combinations + self.auto_combinations):
            self.frequency_correlations[comb] = Correlation(fpga = self.fpga,
                                                  comb = comb,
                                                  f_start = 0,
                                                  f_stop = fs/2,
                                                  logger = self.logger.getChild("{a}x{b}".format(a = comb[0], b = comb[1])) )
        self.time_domain_snap = Snapshot(fpga = self.fpga, 
                                         name = 'dram_snapshot',
                                         dtype = np.int8,
                                         cvalue = False,
                                         logger = self.logger.getChild('time_domain_snap'))
        self.upsample_factor = 100
        self.subsignal_length_max = 2**17
        self.time_domain_padding = 100
        self.time_domain_calibration_values = None
        self.time_domain_calibration_cable_values = None
        self.control_register.block_trigger()
Exemplo n.º 47
0
def measure(measurements_to_extract, as_of, download_dir, measurements_log):
    measurement_results = {}
    for measurement in measurements_to_extract:
        logging.info({
            'message': 'Attempting measurement',
            'as-of': timestamp_utils.to_string(as_of),
            'measurement.name': measurement.name,
            'page.name': measurement.page.name,
            'page.url': measurement.page.url
        })

        page_directory = page_download_directory(download_dir,
                                                 measurement.page.name)

        page_filename = timestamped_filename(page_directory, as_of)

        if not os.path.exists(page_filename):
            logging.error({
                'message': "Couldn't find downloaded content",
                'filename': page_filename
            })
            continue

        content = file(page_filename).read()
        try:
            result = measurement.parse(content)

            logging.info({
                'message': 'Recording measured result',
                'as-of': timestamp_utils.to_string(as_of),
                'measurement.name': measurement.name,
                'measurement-result': result
            })

            measurement_results[measurement.name] = result
        except BaseException as e:
            logging.info({'message': 'Measurement failed', 'exception': e})

    append_to_log(initialize_measurement_log(measurements_log),
                  Snapshot(as_of, measurement_results))
Exemplo n.º 48
0
def differences_of(snapshots):
    def diff_measurement_name(measurement_name):
        return measurement_name + '.diff'

    measurements_to_diff = all_measurement_names(snapshots)

    answer = []
    for row_before, row_after in zip(snapshots, snapshots[1:]):
        diff = {}
        for measurement in measurements_to_diff:
            before = row_before.measurements[measurement]
            after = row_after.measurements[measurement]

            if (after is not None) and (before is not None):
                diff_measurement = after - before
            else:
                diff_measurement = None
            diff[diff_measurement_name(measurement)] = diff_measurement

        answer.append(Snapshot(row_after.timestamp, diff))

    return answer
Exemplo n.º 49
0
def snapshot_from_testsystem(testsystem, simple_topology=False):
    """
    Construct a Snapshot from openmm topology and state objects

    Parameters
    ----------
    testsystem : openmmtools.Topology
        The filename of the .pdb file to be used
    simple_topology : bool
        if `True` only a simple topology with n_atoms will be created.
        This cannot be used with complex CVs but loads and stores very fast

    Returns
    -------
    :class:`openpathsampling.engines.Snapshot`
        the constructed Snapshot

    """

    velocities = u.Quantity(np.zeros(testsystem.positions.shape),
                            u.nanometers / u.picoseconds)

    if simple_topology:
        topology = Topology(*testsystem.positions.shape)
    else:
        topology = MDTrajTopology(md.Topology.from_openmm(testsystem.topology))

    box_vectors = \
        np.array([
            v / u.nanometers for v in
            testsystem.system.getDefaultPeriodicBoxVectors()]) * u.nanometers

    snapshot = Snapshot.construct(coordinates=testsystem.positions,
                                  box_vectors=box_vectors,
                                  velocities=velocities,
                                  engine=OpenMMToolsTestsystemEngine(
                                      topology, testsystem.name))

    return snapshot
Exemplo n.º 50
0
 def test_snapshot_delete(self):
     record = ['idhere', 1, '*']
     s = Snapshot(record, cap=2, ut=0.6)
     record = ['another id', 2, '*']
     s.insert(record)
     s.delete('another id', 5)
     self.assertEqual(
         s.blocks.last_node.previous_node.Pce_node.block.record_list[1][2],
         5)
     self.assertEqual(
         s.blocks.last_node.previous_node.Pce_node.block.time_interval,
         [1, 5])
     self.assertTrue(s.blocks.last_node.previous_node.Pce_node.block.isfull)
     self.assertTrue(
         s.blocks.last_node.previous_node.Pce_node.block.isunderflow)
     self.assertEqual(s.blocks.last_node.block.usage, 1)
     self.assertFalse('another id' in s.alives_entries)
Exemplo n.º 51
0
    def _test_create(my):
        search = Search("unittest/person")
        persons = search.get_sobjects()
        person = persons[0]

        snapshot_type = "file"
        snapshot = Snapshot.create(person, context="publish", snapshot_type=snapshot_type)

        version = snapshot.get_value("version")
        my.assertEquals( 1, version )

        search_type = snapshot.get_value("search_type")
        my.assertEquals( search_type, person.get_search_type() )
        search_code = snapshot.get_value("search_code")
        my.assertEquals( search_code, person.get_value("code") )

        # also check search_id
        if SearchType.column_exists("sthpw/snapshot", "search_id"):
            search_code = snapshot.get_value("search_id")
            my.assertEquals( search_code, person.get_value("id") )


        test_person = snapshot.get_sobject()
        my.assertEquals(test_person.get_code(), person.get_code())
Exemplo n.º 52
0
def empty_snapshot_from_openmm_topology(topology, simple_topology=False):
    """
    Return an empty snapshot from an openmm.Topology object

    Velocities will be set to zero.

    Parameters
    ----------
    topology : openmm.Topology
        the topology representing the structure and number of atoms
    simple_topology : bool
        if `True` only a simple topology with n_atoms will be created.
        This cannot be used with complex CVs but loads and stores very fast

    Returns
    -------
    openpathsampling.engines.Snapshot
        the complete snapshot with zero coordinates and velocities

    """
    n_atoms = topology.n_atoms

    if simple_topology:
        topology = Topology(n_atoms, 3)
    else:
        topology = MDTrajTopology(md.Topology.from_openmm(topology))

    snapshot = Snapshot.construct(
        coordinates=u.Quantity(np.zeros((n_atoms, 3)), u.nanometers),
        box_vectors=u.Quantity(topology.setUnitCellDimensions(), u.nanometers),
        velocities=u.Quantity(
            np.zeros((n_atoms, 3)), u.nanometers / u.picoseconds),
        engine=TopologyEngine(topology)
    )

    return snapshot
import sys

from snapshot import Snapshot
from make_histogram import make_histogram

for fname in sys.argv[1:]:
    snap = Snapshot.read_snapshot(fname)
    make_histogram(snap, "temperature", fname)
    make_histogram(snap, "max_temperature", fname)
    make_histogram(snap, "ionization", fname)
    make_histogram(snap, "kinetic_energy", fname, scalar=6.24150974e18)

Exemplo n.º 54
0
 def get_web_dir(self,snapshot=None):
     '''go through the stored snapshot_code to get the actual path'''
     code = self.get_value("snapshot_code")
     from snapshot import Snapshot
     snapshot = Snapshot.get_by_code(code)
     return snapshot.get_web_dir()
Exemplo n.º 55
0
from matplotlib import rc

rc('text', usetex=True)

import sys
import numpy as np
import os
import matplotlib.pyplot as plt
from pylab import *
from snapshot import Snapshot
from make_histogram import make_histograms
import string

def getnum(fname):
    return "".join([i for i in fname if not str.isalpha(i)])[:-1]


fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)

datas = [ Snapshot.read_snapshot(fname) for fname in sys.argv[1:] ]

make_histograms(datas, "temperature", map(getnum, sys.argv[1:]))

print datas
Exemplo n.º 56
0
    def add_default_ending(my, parts, auto_version=True, is_sequence=True):

        context = my.snapshot.get_value("context")
        filename = my.file_object.get_full_file_name()

        # make sure that the version in the file name does not yet exist
        version = my.get_version_from_file_name(filename)
        if not auto_version and version:

            # if the file version is not the same as the snapshot version
            # then check to see if the snapshot already exists
            if version != my.snapshot.get_value("version"):
                existing_snap = Snapshot.get_by_version(my.snapshot.get_value("search_type"),\
                    my.snapshot.get_value("search_id"), context, version)
                if existing_snap:
                    raise TacticException('A snapshot with context "%s" and version "%s" already exists.' % (context, version) )


            my.snapshot.set_value("version", version)
            my.snapshot.commit()
        else:
            version = my.snapshot.get_value("version")


        if version == 0:
            version = "CURRENT"
        elif version == -1:
            version = "LATEST"
        else:

            if version == "":
                version = 1


            # pad the version by by the global setting
            padding = Config.get_value("checkin", "version_padding")
            if not padding:
                padding = 3
            else:
                padding = int(padding)
            expr = "v%%0.%sd" % padding
            version = expr % version

        revision = my.snapshot.get_value("revision", no_exception=True)
        if revision:
            revision = "r%0.2d" % revision

        ext = my.get_ext()

        # by default publish is not put into the file name
        if context != "publish":
            parts.append(context.replace("/", "_"))


        # add the server location
        #value = ProdSetting.get_value_by_key("naming/add_server")
        server = Config.get_value("install", "server")
        if server:
            parts.append(server)


        if my.is_tactic_repo():
            parts.append(version)
            if revision:
                parts.append(revision)

        from pyasm.prod.biz import ProdSetting
        value = ProdSetting.get_value_by_key("naming/add_initials")
        if value == "false":
            project = Project.get()
            initials = Project.get().get_initials()
            parts.append(initials)

        filename = "_".join(parts)
        if is_sequence:
            filename = "%s.####.%s" % (filename, ext)
        elif ext: # dir don't need extension
            filename = "%s%s" % (filename, ext)

        return filename
Exemplo n.º 57
0
    def get_from_db_naming(my, search_type):
        project_code = Project.get_project_code()
        if project_code in ["admin", "sthpw"]:
            return ""

        file_type = my.get_file_type()
        filename = my.file_object.get_full_file_name()

        naming = Naming.get(my.sobject, my.snapshot, file_path=filename)

        if not naming:
            return None

        if naming and my.checkin_type:
            checkin_type = naming.get_value('checkin_type')
            if checkin_type and my.checkin_type != checkin_type:
                print "mismatched checkin_type!"
                naming = None
                return None

        naming_util = NamingUtil()

        # Provide a mechanism for a custom class
        naming_class = naming.get_value("class_name", no_exception=True)
        if naming_class:
            kwargs = {
                'sobject': my.sobject,
                'snapshot': my.snapshot,
                'file_object': my.file_object,
                'ext': my.get_ext(),
                'mode': 'file'
            }
            naming = Common.create_from_class_path(naming_class, kwargs)
            filename = naming.get_file()
            if filename:
                return filename


        # provide a mechanism for a custom client side script
        script_path = naming.get_value("script_path", no_exception=True)
        if script_path:
            project_code = my.sobject.get_project_code()
            input = {
                'sobject': my.sobject,
                'snapshot': my.snapshot,
                'file_object': my.file_object,
                'ext': my.get_ext(),
                'mode': 'file',
                'project': project_code
            }
            from tactic.command import PythonCmd

            cmd = PythonCmd(script_path=script_path, input=input)
            results = cmd.execute()
            if results:
                return results




        naming_value = naming.get_value("file_naming")

        if not naming_value:
            is_versionless = naming.get_value("latest_versionless") or naming.get_value("current_versionless")
            if not is_versionless:
                return ""

            # FIXME:
            # if this is a versionless naming, then empty uses a default
            # This is put here because the check-in type is determined by the
            # naming here.  Normally, this is passed through with "naming_expr"
            # but in snapshot.py, it is not yet known that this is an "auto"
            # checkin_type because it is defined in the naming and not the
            # process

            server = Config.get_value("install", "server")
            if server:
                naming_value= "{basefile}_{snapshot.process}_%s.{ext}" % server
            else:
                naming_value = "{basefile}_{snapshot.process}.{ext}"

        
        # check for manual_version
        manual_version = naming.get_value('manual_version')
        if manual_version == True:
	    # if the file version is not the same as the snapshot version
            # then check to see if the snapshot already exists
            filename = my.file_object.get_full_file_name()
            version = my.get_version_from_file_name(filename)
            context = my.snapshot.get_context()
            if version > 0 and version != my.snapshot.get_value("version"):
                existing_snap = Snapshot.get_snapshot(\
                    my.snapshot.get_value("search_type"),\
                    my.snapshot.get_value("search_id"), context=context, \
                    version=version, show_retired=True)
                if existing_snap:
                    raise TacticException('You have chosen manual version in Naming for this SObject. A snapshot with context "%s" and version "%s" already exists.' % (context, version) )


                my.snapshot.set_value("version", version)
                my.snapshot.commit()
        
       
        file_type = my.get_file_type()

        return naming_util.naming_to_file(naming_value, my.sobject,my.snapshot,my.file_object,ext=my.get_ext(),file_type=file_type)