Ejemplo n.º 1
0
    def create_minimal(cls, uris, configloc, schemas, config_password,
                       pidfile=None):

        if _os.path.isdir(configloc) and _os.access(configloc, _os.W_OK):
            pass
        elif not _os.path.exists(configloc):
            try:
                _os.mkdir(configloc, 0700)
            except OSError as exc:
                raise RuntimeError('cannot create config dir {!r}: {}'
                                    .format(configloc, str(exc)))
        else:
            raise ValueError('invalid config dir {!r}; expecting a writable'
                              ' directory path'
                              .format(configloc))

        if cls._PASSWORD_WITH_SCHEME_RE.match(config_password):
            config_password_configvalue = config_password
        else:
            config_password_salt = _os.urandom(4)
            config_password_configvalue = \
                '{SSHA}' + _b64encode(_sha1(config_password
                                            + config_password_salt)
                                       .digest()
                                      + config_password_salt)

        if not pidfile:
            if _os.path.isdir(cls._PIDFILE_STD_DIR) \
                   and _os.access(cls._PIDFILE_STD_DIR, _os.W_OK):
                pidfile_dir = cls._PIDFILE_STD_DIR
            else:
                pidfile_dir = None
            pidfile_tmp = _NamedTemporaryFile(dir=pidfile_dir, prefix='slapd-',
                                              suffix='.pid')
            with pidfile_tmp:
                pass
            pidfile = pidfile_tmp.name

        configfile = _NamedTemporaryFile(delete=False)
        with configfile:
            for schema in schemas:
                configfile.write('include {}\n'.format(schema))
            configfile.write('pidfile {}\n'.format(pidfile))
            configfile.write('database config\n')
            configfile.write('rootpw {}\n'.format(config_password_configvalue))

        service = cls.create_from_configfile(uris=uris,
                                             configfile=configfile.name,
                                             configdir=configloc)
        _os.remove(configfile.name)
        return service
Ejemplo n.º 2
0
    def visualize(self, layout_dir='LR', display=False):
        """
        Create and optionally display an image of the net structure.

        :param layout_dir: string in ['LR', 'TB', 'BT'].
          Short string for graph layout direction.

        :param display: bool.
          If set to ``True``, displays the graphic in a window. Press enter
          to close it.

        :returns: 3D numpy array.
          Graphic of the visualization as (H, W, C) image in BGR format.
        """
        if _draw is None or _cv2 is None:  # pragma: no cover
            raise Exception('Drawing is not available!')
        else:  # pragma: no cover
            with _NamedTemporaryFile(mode='w+b', suffix='.png') as tmpfile:
                _draw.draw_net_to_file(self.to_pbuf_message(),
                                       tmpfile.name,
                                       rankdir=layout_dir)
                result_image = _cv2.imread(tmpfile.name)
                assert result_image is not None
            if display:  # pragma: no cover
                _cv2.imshow(self.name, result_image)
                _cv2.waitKey(0)
                _cv2.destroyWindow(self.name)
            return result_image
Ejemplo n.º 3
0
    def visualize(self,
                  layout_dir='LR',
                  display=False):
        """
        Create and optionally display an image of the net structure.

        :param layout_dir: string in ['LR', 'TB', 'BT'].
          Short string for graph layout direction.

        :param display: bool.
          If set to ``True``, displays the graphic in a window. Press enter
          to close it.

        :returns: 3D numpy array.
          Graphic of the visualization as (H, W, C) image in BGR format.
        """
        if _draw is None or _cv2 is None:
            raise Exception('Drawing is not available!')
        with _NamedTemporaryFile(mode='w+b', suffix='.png') as tmpfile:
            _draw.draw_net_to_file(self.to_pbuf_message(),
                                   tmpfile.name,
                                   rankdir=layout_dir)
            result_image = _cv2.imread(tmpfile.name)
            assert result_image is not None
        if display:
            _cv2.imshow(self.name, result_image)
            _cv2.waitKey(0)
            _cv2.destroyWindow(self.name)
        return result_image
Ejemplo n.º 4
0
 def NamedTemporaryFile(self, *args, **kwargs):
     f = tempfile._NamedTemporaryFile(*args, **kwargs)
     try:
         log.debug(("Opened tempfile %s with NamedTemporaryFile:\n" % f.name) + "".join(traceback.format_stack()))
     except AttributeError:
         pass
     return f
Ejemplo n.º 5
0
 def NamedTemporaryFile(self, *args, **kwargs):
     f = tempfile._NamedTemporaryFile(*args, **kwargs)
     try:
         log.debug(("Opened tempfile %s with NamedTemporaryFile:\n" %
                    f.name) + "".join(traceback.format_stack()))
     except AttributeError:
         pass
     return f
    def _save_state_to_s3(self):
        # Dump immutable state data to a config
        state = _ConfigParser(allow_no_value=True)
        state.optionxform = str
        state.add_section(PredictiveService._SERVICE_INFO_SECTION_NAME)
        state.set(PredictiveService._SERVICE_INFO_SECTION_NAME, 'Name', self.name)
        state.set(PredictiveService._SERVICE_INFO_SECTION_NAME, 'Description', self._description)
        state.set(PredictiveService._SERVICE_INFO_SECTION_NAME, 'API Key', self._api_key)

        # Save environment, if we have one
        if self._environment:
            state.add_section(PredictiveService._ENVIRONMENT_SECTION_NAME)
            for (key, value) in self._environment._get_state().iteritems():
                state.set(PredictiveService._ENVIRONMENT_SECTION_NAME, key, value)

        # Save deployment version data to config
        state.add_section(PredictiveService._DEPLOYMENT_SECTION_NAME)
        current_predictive_objects = _copy(self._all_predictive_objects)
        for (model_name, info) in current_predictive_objects.iteritems():
            state.set(PredictiveService._DEPLOYMENT_SECTION_NAME, model_name, info['version'])

        state.add_section(PredictiveService._PREDICTIVE_OBJECT_DOCSTRING)
        for (model_name, info) in current_predictive_objects.iteritems():
            state.set(PredictiveService._PREDICTIVE_OBJECT_DOCSTRING, model_name, info['docstring'].encode('string_escape'))

        if self._has_state_changed_on_s3():
            raise IOError("Can not save changes. The Predictive Service has changed on S3. Please "
                          "reload from S3.")

        # Save any new predictive objects to S3.
        for predictive_object_name in self._local_changes:
            (predictive_object, po_info) = self._local_changes[predictive_object_name]
            if predictive_object:         # if this is not a model deletion:
                save_path = self._get_predictive_object_save_path(predictive_object_name, po_info['version'])
                dependency_path = self._get_dependency_save_path(predictive_object_name, po_info['version'])
                predictive_object.save(save_path, dependency_path, self.aws_credentials)

        # Update the revision number after we have successfully written all predictive objects
        self._revision_number += 1
        state.add_section(self._META_SECTION_NAME)
        state.set(self._META_SECTION_NAME, 'Revision Number', self._revision_number)
        state.set(self._META_SECTION_NAME, 'Schema Version', self._schema_version)

        # Write state file to S3
        with _NamedTemporaryFile() as temp_file:
            state.write(temp_file)
            temp_file.flush()
            conn = _connect_s3(**self.aws_credentials)
            bucket = conn.get_bucket(self._s3_bucket_name, validate=False)
            key = _s3_key(bucket)
            key.key = self._s3_state_key
            key.set_contents_from_filename(temp_file.name)
            temp_file.close()  # deletes temp file

        # Update our state
        self._local_changes = {}
        self._predictive_objects = dict(zip(current_predictive_objects.keys(),
            [{'version':info['version'], 'docstring': info['docstring']} for info in current_predictive_objects.values()]))
Ejemplo n.º 7
0
 def instantiate(self):
     """Create an instantiated net with the current object configuration."""
     # Write spec to a temporary file.
     with _NamedTemporaryFile(mode="w", suffix=".prototxt") as tmpfile:
         tmpfile.write(self.to_prototxt())
         tmpfile.flush()
         _specification = self.copy()
         net = _Net(tmpfile.name, specification=_specification)
     return net
Ejemplo n.º 8
0
def _fill_and_save(file_from, file_to, as_root=False):
    with _NamedTemporaryFile() as ff:
        _fill_constants_from_settings(file_from, ff.name)
        command = 'cp {} {}'.format(ff.name, file_to)
        if as_root:
            local('sudo ' + command)
        else:
            local(command)
        _message_ok('File {} generated form {}'.format(file_to, file_from))
Ejemplo n.º 9
0
Archivo: pdb.py Proyecto: gph82/mdciao
def pdb2traj(code,
             filename=None,
             verbose=True,
             url="https://files.rcsb.org/download/",
             ):
    r""" Return a :obj:`~mdtraj.Trajectory` from a four-letter PDB code via RSCB PBB lookup

    Thinly wraps around :obj:`mdtraj.load_pdb`, printing the corresponding citation.
    Will return None if lookup fails

    Parameters
    ----------
    code : str
        four-letter code, e.g. 3SN6
    filename : str, default is None
        if str, save to this file,
        eventually overwriting
    verbose : bool, default is False
    url : str, default is 'https://files.rcsb.org/download'
        base URL for lookups

    Returns
    -------
    traj : :obj:`~mdtraj.Trajectory` or None

    """
    url1 = "%s/%s.pdb" % (url.strip("/"),code.strip("/"))

    #TODO use print_v elsewhere
    print_v = lambda s, **kwargs: [print(s, **kwargs) if verbose else None][0]
    print_v("Checking %s" % url1, end=" ...", flush=True)
    geom = None
    try:
        a = _requests.get(url1)
        if a.ok:
            with _NamedTemporaryFile(mode="w", suffix=".pdb") as f:
                f.writelines(a.text)
                print_v("done")
                geom = _load_pdb(f.name) #TODO use string.IO
                if filename is not None:
                    print_v("Saving to %s..." % filename, end="", flush=True)
                    if filename.lower().endswith(".pdb"):
                        _copy(f.name, filename)
                    else:
                        geom.save(filename)
                    print_v(filename)
        else:
            raise _URLError(a.text,filename=url1)


    except (_HTTPError, _URLError) as e:
        print(url1, ":", e)

    if geom is not None:
        pdb2ref(code)

    return geom
Ejemplo n.º 10
0
def NamedTemporaryFile():
    global temp_files
    if sys.platform == "linux" or sys.platform == "linux2":
        OPT_DELETE = True
    elif sys.platform == "darwin":
        OPT_DELETE = True
    elif sys.platform == "win32":
        OPT_DELETE = False
    tempFile = _NamedTemporaryFile(delete=OPT_DELETE)
    temp_files.append(tempFile.name)
    return tempFile
Ejemplo n.º 11
0
def NamedTemporaryFile():
    global temp_files
    if sys.platform == "linux" or sys.platform == "linux2":
        OPT_DELETE = True
    elif sys.platform == "darwin":
        OPT_DELETE= True
    elif sys.platform == "win32":
        OPT_DELETE = False    
    tempFile = _NamedTemporaryFile(delete = OPT_DELETE)
    temp_files.append(tempFile.name)
    return tempFile
Ejemplo n.º 12
0
 def instantiate(self):
     """Create an instantiated net with the current object configuration."""
     # Write spec to a temporary file.
     with _NamedTemporaryFile(mode='w', suffix='.prototxt') as tmpfile:
         tmpfile.write(self.to_prototxt())
         tmpfile.flush()
         _specification = self.copy()
         net = _Net(tmpfile.name,
                    mode=self.phase,
                    specification=_specification)
     return net
Ejemplo n.º 13
0
 def Get_caffe_solver_instance(cls, solver_parameter_dict, net):
     """Get a caffe solver object."""
     # now we actually create a instance of the solver
     solver_message = _caffe_pb2.SolverParameter(**solver_parameter_dict)
     messagestr = _gprototext.MessageToString(solver_message)
     with _NamedTemporaryFile(mode='w+b', suffix='.prototxt') as tmpfile:
         tmpfile.write(bytes(messagestr.encode('utf-8')))
         tmpfile.flush()
         return cls.Get_caffe_solver_class(
             solver_parameter_dict['solver_type'])._caffe_solver_class(
                 tmpfile.name, net)
     raise Exception('could not initialize solver class')
Ejemplo n.º 14
0
 def Get_caffe_solver_instance(cls, solver_parameter_dict, net):
     """Get a caffe solver object."""
     # now we actually create a instance of the solver
     solver_message = _caffe_pb2.SolverParameter(**solver_parameter_dict)
     messagestr = _gprototext.MessageToString(solver_message)
     with _NamedTemporaryFile(mode='w+b', suffix='.prototxt') as tmpfile:
         tmpfile.write(bytes(messagestr.encode('utf-8')))
         tmpfile.flush()
         return cls.Get_caffe_solver_class(
             solver_parameter_dict['solver_type'])._caffe_solver_class(
                 tmpfile.name, net)
     raise Exception('could not initialize solver class')
    def _get_s3_state_config(s3_bucket_name, s3_key_name, credentials):
        conn = _connect_s3(**credentials)
        bucket = conn.get_bucket(s3_bucket_name, validate=False)
        key = bucket.get_key(s3_key_name)

        if not key:
            raise IOError("No Predictive Service at the specified location.")

        with _NamedTemporaryFile() as temp_file:
            key.get_contents_to_file(temp_file)
            temp_file.flush()
            config = _ConfigParser(allow_no_value=True)
            config.optionxform = str
            config.read(temp_file.name)
            temp_file.close()  # deletes temp file

        return config
Ejemplo n.º 16
0
 def Get_caffe_solver_instance(cls, solver_parameter_dict, net):
     """Get a caffe solver object."""
     # now we actually create a instance of the solver
     solver_message = _caffe_pb2.SolverParameter(**solver_parameter_dict)
     messagestr = _gprototext.MessageToString(solver_message)
     with _NamedTemporaryFile(mode='w+b', suffix='.prototxt') as tmpfile:
         tmpfile.write(bytes(messagestr.encode('utf-8')))
         tmpfile.flush()
         try:
             # Newer version of caffe with full solver init support.
             return cls.Get_caffe_solver_class(
                 solver_parameter_dict['solver_type'])._caffe_solver_class(
                     tmpfile.name, net, _caffe._caffe.NetVec(), True)
         except TypeError:
             # Fallback for older, patched versions.
             return cls.Get_caffe_solver_class(
                 solver_parameter_dict['solver_type'])._caffe_solver_class(
                     tmpfile.name, net)
     raise Exception('could not initialize solver class')
Ejemplo n.º 17
0
 def Get_caffe_solver_instance(cls, solver_parameter_dict, net):
     """Get a caffe solver object."""
     # now we actually create a instance of the solver
     solver_message = _caffe_pb2.SolverParameter(**solver_parameter_dict)
     messagestr = _gprototext.MessageToString(solver_message)
     with _NamedTemporaryFile(mode='w+b', suffix='.prototxt') as tmpfile:
         tmpfile.write(bytes(messagestr.encode('utf-8')))
         tmpfile.flush()
         try:
             # Newer version of caffe with full solver init support.
             return cls.Get_caffe_solver_class(
                 solver_parameter_dict['solver_type'])._caffe_solver_class(
                     tmpfile.name, net, _caffe._caffe.NetVec(), True)
         except TypeError:
             # Fallback for older, patched versions.
             return cls.Get_caffe_solver_class(
                 solver_parameter_dict['solver_type'])._caffe_solver_class(
                     tmpfile.name, net)
     raise Exception('could not initialize solver class')
Ejemplo n.º 18
0
def buildNet(fname_prototxt, fname_caffemodel, deconv_as_resamp=True, forward=False):
    fname = fname_prototxt.split(".prototxt")[0]
    if os.path.isfile(fname_caffemodel):
        n_instance = caffe.Net(fname_prototxt, fname_caffemodel, caffe.TEST)
    else:
        n_instance = caffe.Net(fname_prototxt, caffe.TEST)
        n_instance.save(fname+".caffemodel")
    if forward:
        n_instance.forward()
    upgrade_bin = os.path.join(CAFFE_BIN_FOLDER, "upgrade_net_proto_text.bin")
    with _NamedTemporaryFile(mode='r', suffix='.prototxt') as tmpfile:
        _subprocess.check_call([upgrade_bin,
                                fname + ".prototxt",
                                tmpfile.name])
        text = tmpfile.read()
    n = _caffe_pb2.NetParameter()
    _gprototext.Merge(text, n)
    layers = n.ListFields()[-1][1]
    net = AbstractNet(fname.split("/")[-1])
    net.buffer_names_to_replace = {}
    for i in range(0, len(layers)):
        l = layers[i]
        log(l.type + " " + l.name)
        if l.type == "Input" or l.type == "Python" or l.type == "Data":
            createInput(l, net, n_instance)
        elif l.type == "Interp":
            createInterp(l, net, n_instance)
        elif l.type == "LRN":
            createLRN(l, net, n_instance)
        elif l.type == "Convolution":
            createConv(l, net, n_instance)
        elif l.type == "Pooling":
            createPool(l, net, n_instance)
        elif l.type == "Deconvolution":
            createDeconv(l, net, n_instance, deconv_as_resamp)
        elif l.type == "ReLU":
            createRelu(l, net, n_instance)
        elif l.type == "Concat":
            createMerge(l, net, n_instance)
        elif l.type == "Flatten":
            createFlatten(l, net, n_instance)
        elif l.type == "Softmax":
            createSoftmax(l, net, n_instance)
        elif l.type == "Sigmoid":
            createSigmoid(l, net, n_instance)
        elif l.type == "AbsVal":
            createAbsVal(l, net, n_instance)
        elif l.type == "TanH":
            createTanH(l, net, n_instance)
        elif l.type == "Eltwise":
            createEltwise(l, net, n_instance)
        elif l.type == "BatchNorm":
            createBatchNorm(l, net, n_instance)
        elif l.type == "Scale":
            createScale(l, net, n_instance)
        elif l.type == "Power":
            createPower(l, net, n_instance)
        elif l.type == "Dropout":
            log("Dropout ignored")
        elif l.type == "InnerProduct":
            createInnerProduct(l, net, n_instance)
        elif l.type == "BNLL":
            createBNLL(l, net, n_instance)
        elif l.type == "Reshape":
            createReshape(l, net, n_instance)
        elif l.type == "ArgMax":
            createArgmax(l, net, n_instance)
        elif l.type == "Crop":
            net.buffer_names_to_replace[l.top[0]] = l.bottom[0]
        else:
            log("============= NOT IMPLEMENTED YET ==============")
            log(l.type)
            log(l.argmax_param)
    read_heatmaps_data(net, n_instance)
    read_weights(net, n_instance)
    replace_buffer_names(net)
    return net
def _make_temp(data):
    tmpfile = _NamedTemporaryFile(delete=False)
    tmpfile.write(data)
    tmpfile.close()
    return tmpfile
Ejemplo n.º 20
0
    def from_prototxt(text=None, filename=None):
        r"""
        Create an :py:class:`NetSpecification` object from a text spec.

        Either ``text`` or ``filename`` may be set, and is accordingly used.
        Files may be of any caffe prototxt version.
        """
        # Check if the user erroneously specified a filename as text.
        if text is not None:
            if _os.linesep not in text:
                if _os.path.exists(text):
                    _LOGGER.warn('You probably mistakenly specified a filename '
                                 'as text: "%s"! Trying to recover...', text)
                    filename = text
                    text = None
        if filename is not None:
            assert text is None
            # Do a conversion if necessary.
            with _NamedTemporaryFile(mode='r', suffix='.prototxt') as tmpfile:
                net_upgrader_exec = _os.path.join(_CAFFE_BIN_FOLDER,
                                                  'upgrade_net_proto_text')
                assert _os.path.exists(net_upgrader_exec),\
                    ("The executable 'upgrade_net_proto_text' was not found "
                     "in your _CAFFE_BIN_FOLDER! Please set it from the "
                     "module `barrista.config`. The current folder is set "
                     "to: " + _CAFFE_BIN_FOLDER + ".")
                _subprocess.check_call([net_upgrader_exec,
                                        filename,
                                        tmpfile.name])
                text = tmpfile.read()
        message = _caffe_pb2.NetParameter()
        _gprototext.Merge(text, message)
        # Check for completeness of the parsing process.
        fields = message.ListFields()
        for fielddesc in map(_itemgetter(0), fields):  # pylint: disable=W0141
            if fielddesc.name not in ['name',
                                      'input_shape',
                                      'debug_info',
                                      'input',
                                      'input_dim',
                                      'layer',
                                      'force_backward',
                                      'state']:
                _LOGGER.warn('Parsed net prototxt contained unknown field ' +
                             fielddesc.name + '. Ignored.')
        if len(message.input_dim) > 0:
            _LOGGER.warn('The loaded prototxt contains `input_dim` fields. '
                         'They are deprecated! Use `input_shape` instead.')
            if _HAS_BLOB_SHAPE:
                assert len(message.input_shape) == 0
            assert len(message.input_dim) % 4 == 0
            input_shape = _copy.deepcopy(list(_chunks(message.input_dim, 4)))
        else:
            input_shape = _copy.deepcopy([bshape.dim for
                                          bshape in message.input_shape])
        inputs = _copy.deepcopy(message.input)
        layerspecs = [LayerSpecification.from_pbuf_message(layer)
                      for layer in message.layer]
        pbforcebw = message.force_backward
        phase = message.state.phase
        level = message.state.level
        stages = _copy.deepcopy(message.state.stage)
        debug_info = message.debug_info
        name = message.name
        spec = NetSpecification(input_shape,
                                inputs,
                                layerspecs,
                                pbforcebw,
                                phase,
                                level,
                                stages,
                                debug_info,
                                name)
        return spec
Ejemplo n.º 21
0
    def from_prototxt(text=None, filename=None):
        r"""
        Create an :py:class:`NetSpecification` object from a text spec.

        Either ``text`` or ``filename`` may be set, and is accordingly used.
        Files may be of any caffe prototxt version.
        """
        # Check if the user erroneously specified a filename as text.
        if text is not None:
            if _os.linesep not in text:
                if _os.path.exists(text):  # pragma: no cover
                    _LOGGER.warn(
                        'You probably mistakenly specified a filename '
                        'as text: "%s"! Trying to recover...', text)
                    filename = text
                    text = None
        if filename is not None:
            assert text is None
            # Do a conversion if necessary.
            with _NamedTemporaryFile(mode='r', suffix='.prototxt') as tmpfile:
                net_upgrader_exec = _os.path.join(_CAFFE_BIN_FOLDER,
                                                  'upgrade_net_proto_text')
                assert _os.path.exists(net_upgrader_exec),\
                    ("The executable 'upgrade_net_proto_text' was not found "
                     "in your _CAFFE_BIN_FOLDER! Please set it from the "
                     "module `barrista.config`. The current folder is set "
                     "to: " + _CAFFE_BIN_FOLDER + ".")
                _subprocess.check_call(
                    [net_upgrader_exec, filename, tmpfile.name])
                text = tmpfile.read()
        message = _caffe_pb2.NetParameter()
        _gprototext.Merge(text, message)
        # Check for completeness of the parsing process.
        fields = message.ListFields()
        for fielddesc in map(_itemgetter(0), fields):  # pylint: disable=W0141
            if fielddesc.name not in [
                    'name', 'input_shape', 'debug_info', 'input', 'input_dim',
                    'layer', 'force_backward', 'state'
            ]:
                _LOGGER.warn('Parsed net prototxt contained unknown field ' +
                             fielddesc.name + '. Ignored.')
        if len(message.input_dim) > 0:
            _LOGGER.warn('The loaded prototxt contains `input_dim` fields. '
                         'They are deprecated! Use `input_shape` instead.')
            if _HAS_BLOB_SHAPE:
                assert len(message.input_shape) == 0
            assert len(message.input_dim) % 4 == 0
            input_shape = _copy.deepcopy(list(_chunks(message.input_dim, 4)))
        else:  # pragma: no cover
            input_shape = _copy.deepcopy(
                [bshape.dim for bshape in message.input_shape])
        inputs = _copy.deepcopy(message.input)
        layerspecs = [
            LayerSpecification.from_pbuf_message(layer)
            for layer in message.layer
        ]
        pbforcebw = message.force_backward
        phase = message.state.phase
        level = message.state.level
        stages = _copy.deepcopy(message.state.stage)
        debug_info = message.debug_info
        name = message.name
        spec = NetSpecification(input_shape, inputs, layerspecs, pbforcebw,
                                phase, level, stages, debug_info, name)
        return spec