Exemple #1
0
def api_shop_operation(operation):
    def set_token(inner_who):
        token = hashlib.md5('%s%s' % (inner_who, utc_8_now())).hexdigest()
        key = key_shop_token.format(content=token)
        # 插入token到redis中, 7天有效期: get shop:token:fe11ad907e2fa779ed2f363ef589d3f9 => 7740959
        redis_client.setex(key, inner_who, 7 * 24 * 3600)
        return token

    operation = str(operation).upper().strip()
    kw = ctx.request.input()
    # ==> 商户注册
    if operation == OPERATIONS.REGISTER:
        kw = Schema({
            'tel': schema_unicode,
            'sms_code': schema_unicode,
            'password': schema_unicode,
            Optional('name'): schema_unicode,
            'contact': {
                Optional('name', default=''): schema_unicode_empty,
                Optional('tel', default=''): schema_unicode_empty
            },
            Optional("loc"): {
                "address": schema_unicode,
                "longitude": schema_float,
                "latitude": schema_float,
                Optional(object): object
            },
            Optional('recommended_by'): {
                'tel': schema_unicode_empty
            },
        }).validate(ctx.request.input())
        tel = kw['tel']
        sms_code = kw['sms_code']
        password = kw['password']
        # song.123feng.com/APP注册: 手机号+验证码
        if tel:
            sms_key = key_sms_code.format(tel=tel)
            # 验证成功: 验证码匹配 get sms:code:15901739717 => 123456
            sms_code_in_redis = redis_client.get(sms_key)
            if sms_code_in_redis == sms_code:
                shop = Shop.objects(tel=tel).first()
                if not shop:
                    # 记录商户到mongodb(如无记录): status=STATUS_VALID
                    # 记录商户到mongodb(如无记录): status=STATUS_INFO_YES
                    if 'recommended_by' in kw:
                        kw['recommended_by']['time'] = TimeZone.utc_now()
                    kw_filtered = ShopLogic.filter_shop(kw)
                    shop = Shop(**kw_filtered).save()
                    # ===== 注册风信账户 =====
                    http_client = HTTPClient()
                    http_client.fetch(account.req_create(
                        account_type=conf.ACCOUNT_TYPE_SHOP,
                        account_id=str(shop.pk)),
                                      raise_error=False)
                else:
                    # 允许重复注册, 改一下密码
                    shop.password = password
                    shop.save()
                shop.reload()
                shop_id = str(shop.pk)
                content = set_token(shop_id)
                s_packed = ShopLogic.pack_shop(shop)
                s_packed.update(dict(token=content))
                return s_packed
            # 登录失败
            else:
                logging.info(
                    "SMS code validation for [%s]=[%s] failed, expected[%s]." %
                    (tel, sms_code, sms_code_in_redis))
                raise ValueError("验证码验证失败.")
    # ==> 商户事件
    elif operation in ShopFSM.FE_INSIDE_EVENTS or operation in ShopFSM.OUTSIDE_EVENTS:
        # 对完善资料特殊添加时间信息
        if operation == ShopFSM.EVENT_COMPLETE_INFO and 'recommended_by' in kw:
            kw['recommended_by']['time'] = TimeZone.utc_now()
        # 对修改定价特殊处理: 能用tel找商户id
        if operation == ShopFSM.EVENT_ALTER_INFO:
            shop = Shop.objects(tel=kw.tel).first()
        # 其他事件一概用shop_id
        else:
            shop = Shop.objects(id=kw.shop_id).first()
        if shop:
            operator_type = kw.operator_type
            kw.pop('operator_type')
            modified_shop = ShopFSM.update_status(operator_type, shop,
                                                  operation, **kw)
            if not modified_shop:
                raise ValueError(
                    "State transfer for shop[%s][%s][%s] using [%s] failed." %
                    (kw.shop_id, shop.name, shop.status, operation))
            return ShopLogic.pack_shop(modified_shop)

    else:
        pass
Exemple #2
0
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

import os
from schema import Schema, And, Use, Optional, Regex, Or

common_schema = {
    'authorName':
    str,
    'experimentName':
    str,
    Optional('description'):
    str,
    'trialConcurrency':
    And(int, lambda n: 1 <= n <= 999999),
    Optional('maxExecDuration'):
    Regex(r'^[1-9][0-9]*[s|m|h|d]$'),
    Optional('maxTrialNum'):
    And(int, lambda x: 1 <= x <= 99999),
    'trainingServicePlatform':
    And(str, lambda x: x in ['remote', 'local', 'pai', 'kubeflow']),
    Optional('searchSpacePath'):
    os.path.exists,
    Optional('multiPhase'):
    bool,
    Optional('multiThread'):
    bool,
Exemple #3
0
class LoonDeptDetailView(LoonBaseView):
    patch_schema = Schema({
        'name':
        And(str, lambda n: n != '', error='name is need'),
        Optional('parent_dept_id'):
        int,
        Optional('leader'):
        str,
        Optional('approver'):
        str,
        Optional('label'):
        str,
    })

    @manage_permission_check('admin')
    def delete(self, request, *args, **kwargs):
        """
        delete dept
        删除部门
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        operator = request.user.username
        dept_id = kwargs.get('dept_id')
        flag, result = account_base_service_ins.delete_dept(dept_id)
        if flag is False:
            return api_response(-1, result, {})
        return api_response(0, '', {})

    @manage_permission_check('admin')
    def patch(self, request, *args, **kwargs):
        """
        更新部门
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        dept_id = kwargs.get('dept_id')
        json_str = request.body.decode('utf-8')
        request_data_dict = json.loads(json_str)
        name = request_data_dict.get('name')
        parent_dept_id = request_data_dict.get('parent_dept_id')
        leader_id = request_data_dict.get('leader')
        approver_str_list = request_data_dict.get('approver')
        approvers = approver_str_list.split(',')
        approver_id_list = [int(approver_str) for approver_str in approvers]
        label = request_data_dict.get('label')

        if leader_id:
            ok, result = account_base_service_ins.get_user_by_user_id(
                int(leader_id))
            if not ok:
                return api_response(-1, result, {})
            leader = result.username
        else:
            leader = None

        flag, result = account_base_service_ins.get_user_name_list_by_id_list(
            approver_id_list)
        if flag is False:
            return api_response(-1, result, {})
        approver_username_list = result.get('username_list')
        approver_username_str = ','.join(approver_username_list)

        flag, result = account_base_service_ins.update_dept(
            dept_id, name, parent_dept_id, leader, approver_username_str,
            label)
        if flag is False:
            return api_response(-1, result, {})
        return api_response(0, '', {})
Exemple #4
0
class Config(object):  # pylint: disable=too-many-instance-attributes
    """Class that manages configuration files for a dvc repo.

    Args:
        dvc_dir (str): optional path to `.dvc` directory, that is used to
            access repo-specific configs like .dvc/config and
            .dvc/config.local.
        validate (bool): optional flag to tell dvc if it should validate the
            config or just load it as is. 'True' by default.


    Raises:
        ConfigError: thrown when config has an invalid format.
    """

    APPNAME = "dvc"
    APPAUTHOR = "iterative"

    # NOTE: used internally in RemoteLOCAL to know config
    # location, that url should resolved relative to.
    PRIVATE_CWD = "_cwd"

    CONFIG = "config"
    CONFIG_LOCAL = "config.local"

    SECTION_CORE = "core"
    SECTION_CORE_LOGLEVEL = "loglevel"
    SECTION_CORE_LOGLEVEL_SCHEMA = And(Use(str.lower), supported_loglevel)
    SECTION_CORE_REMOTE = "remote"
    SECTION_CORE_INTERACTIVE_SCHEMA = And(str, is_bool, Use(to_bool))
    SECTION_CORE_INTERACTIVE = "interactive"
    SECTION_CORE_ANALYTICS = "analytics"
    SECTION_CORE_ANALYTICS_SCHEMA = And(str, is_bool, Use(to_bool))

    SECTION_CACHE = "cache"
    SECTION_CACHE_DIR = "dir"
    SECTION_CACHE_TYPE = "type"
    SECTION_CACHE_TYPE_SCHEMA = supported_cache_type
    SECTION_CACHE_PROTECTED = "protected"
    SECTION_CACHE_LOCAL = "local"
    SECTION_CACHE_S3 = "s3"
    SECTION_CACHE_GS = "gs"
    SECTION_CACHE_SSH = "ssh"
    SECTION_CACHE_HDFS = "hdfs"
    SECTION_CACHE_AZURE = "azure"
    SECTION_CACHE_SLOW_LINK_WARNING = "slow_link_warning"
    SECTION_CACHE_SCHEMA = {
        Optional(SECTION_CACHE_LOCAL):
        str,
        Optional(SECTION_CACHE_S3):
        str,
        Optional(SECTION_CACHE_GS):
        str,
        Optional(SECTION_CACHE_HDFS):
        str,
        Optional(SECTION_CACHE_SSH):
        str,
        Optional(SECTION_CACHE_AZURE):
        str,
        Optional(SECTION_CACHE_DIR):
        str,
        Optional(SECTION_CACHE_TYPE, default=None):
        SECTION_CACHE_TYPE_SCHEMA,
        Optional(SECTION_CACHE_PROTECTED, default=False):
        And(str, is_bool, Use(to_bool)),
        Optional(PRIVATE_CWD):
        str,
        Optional(SECTION_CACHE_SLOW_LINK_WARNING, default=True):
        And(str, is_bool, Use(to_bool)),
    }

    # backward compatibility
    SECTION_CORE_CLOUD = "cloud"
    SECTION_CORE_CLOUD_SCHEMA = And(Use(str.lower), supported_cloud)
    SECTION_CORE_STORAGEPATH = "storagepath"

    SECTION_CORE_SCHEMA = {
        Optional(SECTION_CORE_LOGLEVEL):
        And(str, Use(str.lower), SECTION_CORE_LOGLEVEL_SCHEMA),
        Optional(SECTION_CORE_REMOTE, default=""):
        And(str, Use(str.lower)),
        Optional(SECTION_CORE_INTERACTIVE, default=False):
        SECTION_CORE_INTERACTIVE_SCHEMA,
        Optional(SECTION_CORE_ANALYTICS, default=True):
        SECTION_CORE_ANALYTICS_SCHEMA,
        # backward compatibility
        Optional(SECTION_CORE_CLOUD, default=""):
        SECTION_CORE_CLOUD_SCHEMA,
        Optional(SECTION_CORE_STORAGEPATH, default=""):
        str,
    }

    # backward compatibility
    SECTION_AWS = "aws"
    SECTION_AWS_STORAGEPATH = "storagepath"
    SECTION_AWS_CREDENTIALPATH = "credentialpath"
    SECTION_AWS_ENDPOINT_URL = "endpointurl"
    SECTION_AWS_LIST_OBJECTS = "listobjects"
    SECTION_AWS_REGION = "region"
    SECTION_AWS_PROFILE = "profile"
    SECTION_AWS_USE_SSL = "use_ssl"
    SECTION_AWS_SCHEMA = {
        SECTION_AWS_STORAGEPATH:
        str,
        Optional(SECTION_AWS_REGION):
        str,
        Optional(SECTION_AWS_PROFILE):
        str,
        Optional(SECTION_AWS_CREDENTIALPATH):
        str,
        Optional(SECTION_AWS_ENDPOINT_URL):
        str,
        Optional(SECTION_AWS_LIST_OBJECTS, default=False):
        And(str, is_bool, Use(to_bool)),
        Optional(SECTION_AWS_USE_SSL, default=True):
        And(str, is_bool, Use(to_bool)),
    }

    # backward compatibility
    SECTION_GCP = "gcp"
    SECTION_GCP_STORAGEPATH = SECTION_AWS_STORAGEPATH
    SECTION_GCP_CREDENTIALPATH = SECTION_AWS_CREDENTIALPATH
    SECTION_GCP_PROJECTNAME = "projectname"
    SECTION_GCP_SCHEMA = {
        SECTION_GCP_STORAGEPATH: str,
        Optional(SECTION_GCP_PROJECTNAME): str,
    }

    # backward compatibility
    SECTION_LOCAL = "local"
    SECTION_LOCAL_STORAGEPATH = SECTION_AWS_STORAGEPATH
    SECTION_LOCAL_SCHEMA = {SECTION_LOCAL_STORAGEPATH: str}

    SECTION_AZURE_CONNECTION_STRING = "connection_string"

    SECTION_REMOTE_REGEX = r'^\s*remote\s*"(?P<name>.*)"\s*$'
    SECTION_REMOTE_FMT = 'remote "{}"'
    SECTION_REMOTE_URL = "url"
    SECTION_REMOTE_USER = "******"
    SECTION_REMOTE_PORT = "port"
    SECTION_REMOTE_KEY_FILE = "keyfile"
    SECTION_REMOTE_TIMEOUT = "timeout"
    SECTION_REMOTE_PASSWORD = "******"
    SECTION_REMOTE_ASK_PASSWORD = "******"
    SECTION_REMOTE_SCHEMA = {
        SECTION_REMOTE_URL:
        str,
        Optional(SECTION_AWS_REGION):
        str,
        Optional(SECTION_AWS_PROFILE):
        str,
        Optional(SECTION_AWS_CREDENTIALPATH):
        str,
        Optional(SECTION_AWS_ENDPOINT_URL):
        str,
        Optional(SECTION_AWS_LIST_OBJECTS, default=False):
        And(str, is_bool, Use(to_bool)),
        Optional(SECTION_AWS_USE_SSL, default=True):
        And(str, is_bool, Use(to_bool)),
        Optional(SECTION_GCP_PROJECTNAME):
        str,
        Optional(SECTION_CACHE_TYPE):
        SECTION_CACHE_TYPE_SCHEMA,
        Optional(SECTION_CACHE_PROTECTED, default=False):
        And(str, is_bool, Use(to_bool)),
        Optional(SECTION_REMOTE_USER):
        str,
        Optional(SECTION_REMOTE_PORT):
        Use(int),
        Optional(SECTION_REMOTE_KEY_FILE):
        str,
        Optional(SECTION_REMOTE_TIMEOUT):
        Use(int),
        Optional(SECTION_REMOTE_PASSWORD):
        str,
        Optional(SECTION_REMOTE_ASK_PASSWORD):
        And(str, is_bool, Use(to_bool)),
        Optional(SECTION_AZURE_CONNECTION_STRING):
        str,
        Optional(PRIVATE_CWD):
        str,
    }

    SECTION_STATE = "state"
    SECTION_STATE_ROW_LIMIT = "row_limit"
    SECTION_STATE_ROW_CLEANUP_QUOTA = "row_cleanup_quota"
    SECTION_STATE_SCHEMA = {
        Optional(SECTION_STATE_ROW_LIMIT): And(Use(int), is_whole),
        Optional(SECTION_STATE_ROW_CLEANUP_QUOTA): And(Use(int), is_percent),
    }

    SCHEMA = {
        Optional(SECTION_CORE, default={}): SECTION_CORE_SCHEMA,
        Optional(Regex(SECTION_REMOTE_REGEX)): SECTION_REMOTE_SCHEMA,
        Optional(SECTION_CACHE, default={}): SECTION_CACHE_SCHEMA,
        Optional(SECTION_STATE, default={}): SECTION_STATE_SCHEMA,
        # backward compatibility
        Optional(SECTION_AWS, default={}): SECTION_AWS_SCHEMA,
        Optional(SECTION_GCP, default={}): SECTION_GCP_SCHEMA,
        Optional(SECTION_LOCAL, default={}): SECTION_LOCAL_SCHEMA,
    }

    def __init__(self, dvc_dir=None, validate=True):
        self.system_config_file = os.path.join(self.get_system_config_dir(),
                                               self.CONFIG)
        self.global_config_file = os.path.join(self.get_global_config_dir(),
                                               self.CONFIG)

        if dvc_dir is not None:
            self.dvc_dir = os.path.abspath(os.path.realpath(dvc_dir))
            self.config_file = os.path.join(dvc_dir, self.CONFIG)
            self.config_local_file = os.path.join(dvc_dir, self.CONFIG_LOCAL)
        else:
            self.dvc_dir = None
            self.config_file = None
            self.config_local_file = None

        self._system_config = None
        self._global_config = None
        self._repo_config = None
        self._local_config = None

        self.config = None

        self.load(validate=validate)

    @staticmethod
    def get_global_config_dir():
        """Returns global config location. E.g. ~/.config/dvc/config.

        Returns:
            str: path to the global config directory.
        """
        from appdirs import user_config_dir

        return user_config_dir(appname=Config.APPNAME,
                               appauthor=Config.APPAUTHOR)

    @staticmethod
    def get_system_config_dir():
        """Returns system config location. E.g. /etc/dvc.conf.

        Returns:
            str: path to the system config directory.
        """
        from appdirs import site_config_dir

        return site_config_dir(appname=Config.APPNAME,
                               appauthor=Config.APPAUTHOR)

    @staticmethod
    def init(dvc_dir):
        """Initializes dvc config.

        Args:
            dvc_dir (str): path to .dvc directory.

        Returns:
            dvc.config.Config: config object.
        """
        config_file = os.path.join(dvc_dir, Config.CONFIG)
        open(config_file, "w+").close()
        return Config(dvc_dir)

    def _load(self):
        self._system_config = configobj.ConfigObj(self.system_config_file)
        self._global_config = configobj.ConfigObj(self.global_config_file)

        if self.config_file is not None:
            self._repo_config = configobj.ConfigObj(self.config_file)
        else:
            self._repo_config = configobj.ConfigObj()

        if self.config_local_file is not None:
            self._local_config = configobj.ConfigObj(self.config_local_file)
        else:
            self._local_config = configobj.ConfigObj()

        self.config = None

    def _load_config(self, path):
        config = configobj.ConfigObj(path)
        config = self._lower(config)
        self._resolve_paths(config, path)
        return config

    @staticmethod
    def _resolve_path(path, config_file):
        assert os.path.isabs(config_file)
        config_dir = os.path.dirname(config_file)
        return os.path.abspath(os.path.join(config_dir, path))

    def _resolve_cache_path(self, config, fname):
        cache = config.get(self.SECTION_CACHE)
        if cache is None:
            return

        cache_dir = cache.get(self.SECTION_CACHE_DIR)
        if cache_dir is None:
            return

        cache[self.PRIVATE_CWD] = os.path.dirname(fname)

    def _resolve_paths(self, config, fname):
        if fname is None:
            return

        self._resolve_cache_path(config, fname)
        for section in config.values():
            if self.SECTION_REMOTE_URL not in section.keys():
                continue

            section[self.PRIVATE_CWD] = os.path.dirname(fname)

    def load(self, validate=True):
        """Loads config from all the config files.

        Args:
            validate (bool): optional flag to tell dvc if it should validate
                the config or just load it as is. 'True' by default.


        Raises:
            dvc.config.ConfigError: thrown if config has invalid format.
        """
        self._load()
        try:
            self.config = self._load_config(self.system_config_file)
            user = self._load_config(self.global_config_file)
            config = self._load_config(self.config_file)
            local = self._load_config(self.config_local_file)

            # NOTE: schema doesn't support ConfigObj.Section validation, so we
            # need to convert our config to dict before passing it to
            for conf in [user, config, local]:
                self.config = self._merge(self.config, conf)

            if validate:
                self.config = Schema(self.SCHEMA).validate(self.config)

            # NOTE: now converting back to ConfigObj
            self.config = configobj.ConfigObj(self.config,
                                              write_empty_values=True)
            self.config.filename = self.config_file
            self._resolve_paths(self.config, self.config_file)
        except Exception as ex:
            raise ConfigError(ex)

    @staticmethod
    def _get_key(conf, name, add=False):
        for k in conf.keys():
            if k.lower() == name.lower():
                return k

        if add:
            conf[name] = {}
            return name

        return None

    def save(self, config=None):
        """Saves config to config files.

        Args:
            config (configobj.ConfigObj): optional config object to save.

        Raises:
            dvc.config.ConfigError: thrown if failed to write config file.
        """
        if config is not None:
            clist = [config]
        else:
            clist = [
                self._system_config,
                self._global_config,
                self._repo_config,
                self._local_config,
            ]

        for conf in clist:
            if conf.filename is None:
                continue

            try:
                logger.debug("Writing '{}'.".format(conf.filename))
                dname = os.path.dirname(os.path.abspath(conf.filename))
                try:
                    os.makedirs(dname)
                except OSError as exc:
                    if exc.errno != errno.EEXIST:
                        raise
                conf.write()
            except Exception as exc:
                msg = "failed to write config '{}'".format(conf.filename)
                raise ConfigError(msg, exc)

    def get_remote_settings(self, name):
        import posixpath
        """
        Args:
            name (str): The name of the remote that we want to retrieve

        Returns:
            dict: The content beneath the given remote name.

        Example:
            >>> config = {'remote "server"': {'url': 'ssh://localhost/'}}
            >>> get_remote_settings("server")
            {'url': 'ssh://localhost/'}
        """
        settings = self.config[self.SECTION_REMOTE_FMT.format(name)]
        parsed = urlparse(settings["url"])

        # Support for cross referenced remotes.
        # This will merge the settings, giving priority to the outer reference.
        # For example, having:
        #
        #       dvc remote add server ssh://localhost
        #       dvc remote modify server user root
        #       dvc remote modify server ask_password true
        #
        #       dvc remote add images remote://server/tmp/pictures
        #       dvc remote modify images user alice
        #       dvc remote modify images ask_password false
        #       dvc remote modify images password asdf1234
        #
        # Results on a config dictionary like:
        #
        #       {
        #           "url": "ssh://localhost/tmp/pictures",
        #           "user": "******",
        #           "password": "******",
        #           "ask_password": False,
        #       }
        #
        if parsed.scheme == "remote":
            reference = self.get_remote_settings(parsed.netloc)
            url = posixpath.join(reference["url"], parsed.path.lstrip("/"))
            merged = reference.copy()
            merged.update(settings)
            merged["url"] = url
            return merged

        return settings

    @staticmethod
    def unset(config, section, opt=None):
        """Unsets specified option and/or section in the config.

        Args:
            config (configobj.ConfigObj): config to work on.
            section (str): section name.
            opt (str): optional option name.
        """
        if section not in config.keys():
            raise ConfigError("section '{}' doesn't exist".format(section))

        if opt is None:
            del config[section]
            return

        if opt not in config[section].keys():
            raise ConfigError("option '{}.{}' doesn't exist".format(
                section, opt))
        del config[section][opt]

        if not config[section]:
            del config[section]

    @staticmethod
    def set(config, section, opt, value):
        """Sets specified option in the config.

        Args:
            config (configobj.ConfigObj): config to work on.
            section (str): section name.
            opt (str): option name.
            value: value to set option to.
        """
        if section not in config.keys():
            config[section] = {}

        config[section][opt] = value

    @staticmethod
    def show(config, section, opt):
        """Prints option value from the config.

        Args:
            config (configobj.ConfigObj): config to work on.
            section (str): section name.
            opt (str): option name.
        """
        if section not in config.keys():
            raise ConfigError("section '{}' doesn't exist".format(section))

        if opt not in config[section].keys():
            raise ConfigError("option '{}.{}' doesn't exist".format(
                section, opt))

        logger.info(config[section][opt])

    @staticmethod
    def _merge(first, second):
        res = {}
        sections = list(first.keys()) + list(second.keys())
        for section in sections:
            first_copy = first.get(section, {}).copy()
            second_copy = second.get(section, {}).copy()
            first_copy.update(second_copy)
            res[section] = first_copy
        return res

    @staticmethod
    def _lower(config):
        new_config = {}
        for s_key, s_value in config.items():
            new_s = {}
            for key, value in s_value.items():
                new_s[key.lower()] = str(value)
            new_config[s_key.lower()] = new_s
        return new_config
Exemple #5
0
# try:
#     validated = schema.validate(data)
#     print(validated)
# except Exception as e:
#     print(type(e))
#     print(e.__dict__)
#
#     if len(e.errors)==4:
#         print(f"{e.autos[1]} {e}")
#     else:
#         print(e)

schema = Schema({
    'name': str,
    'version': str,
    Optional('arch'): str,
    Optional('dependencies'): [str]
})

data = {
    'name': 'test',
    'version': '1.0.0',
    'arch': 'arm-m0',
    'dependencies': [
        'coucou',
    ]
}

try:
    validated = schema.validate(data)
    print(validated)
Exemple #6
0
class LoonUserView(LoonBaseView):
    post_schema = Schema({
        'username':
        And(str, lambda n: n != '', error='username is needed'),
        'alias':
        And(str, lambda n: n != '', error='alias is needed'),
        'email':
        And(str, lambda n: n != '', error='alias is needed'),
        Optional('password'):
        str,
        'phone':
        str,
        'dept_id':
        And(int, lambda n: n > 0),
        'is_active':
        Use(bool),
        'is_admin':
        Use(bool),
        'is_workflow_admin':
        Use(bool),
    })

    @manage_permission_check('workflow_admin')
    def get(self, request, *args, **kwargs):
        """
        获取用户列表
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        request_data = request.GET
        search_value = request_data.get('search_value', '')
        per_page = int(request_data.get('per_page', 10))
        page = int(request_data.get('page', 1))

        flag, result = account_base_service_ins.get_user_list(
            search_value, page, per_page)
        if flag is not False:
            data = dict(value=result.get('user_result_object_format_list'),
                        per_page=result.get('paginator_info').get('per_page'),
                        page=result.get('paginator_info').get('page'),
                        total=result.get('paginator_info').get('total'))
            code, msg, = 0, ''
        else:
            code, data = -1, ''
        return api_response(code, msg, data)

    @manage_permission_check('admin')
    def post(self, request, *args, **kwargs):
        """
        add user
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        json_str = request.body.decode('utf-8')
        if not json_str:
            return api_response(-1, 'post参数为空', {})
        request_data_dict = json.loads(json_str)
        username = request_data_dict.get('username')
        alias = request_data_dict.get('alias')
        email = request_data_dict.get('email')
        password = request_data_dict.get('password')
        phone = request_data_dict.get('phone')
        dept_id = int(request_data_dict.get('dept_id'))
        is_active = request_data_dict.get('is_active')
        is_admin = request_data_dict.get('is_admin')
        is_workflow_admin = request_data_dict.get('is_workflow_admin')
        creator = request.user.username
        flag, result = account_base_service_ins.add_user(
            username, alias, email, phone, dept_id, is_active, is_admin,
            is_workflow_admin, creator, password)
        if flag is False:
            code, msg, data = -1, result, {}
        else:
            code, msg, data = 0, '', result
        return api_response(code, msg, data)
Exemple #7
0
class WorkflowStateView(LoonBaseView):
    post_schema = Schema({
        'name':
        And(str, lambda n: n != '', error='name is needed'),
        'order_id':
        And(int, error='order_id is needed'),
        'type_id':
        And(int, error='type_id is needed'),
        'participant_type_id':
        int,
        'distribute_type_id':
        And(int, lambda n: n != 0, error='distribute_type_id is needed'),
        Optional('remember_last_man_enable'):
        int,
        Optional('state_field_str'):
        str,
        Optional('label'):
        str,
        str:
        object
    })

    def get(self, request, *args, **kwargs):
        """
        获取工作流拥有的state列表信息
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        workflow_id = kwargs.get('workflow_id')
        request_data = request.GET
        # username = request_data.get('username', '')  # 后续会根据username做必要的权限控制
        username = request.META.get('HTTP_USERNAME')
        search_value = request_data.get('search_value', '')
        per_page = int(request_data.get('per_page', 10)) if request_data.get(
            'per_page', 10) else 10
        page = int(request_data.get('page', 1)) if request_data.get('page',
                                                                    1) else 1
        # if not username:
        #     return api_response(-1, '请提供username', '')
        flag, result = workflow_state_service_ins.get_workflow_states_serialize(
            workflow_id, per_page, page, search_value)

        if flag is not False:
            paginator_info = result.get('paginator_info')
            data = dict(value=result.get('workflow_states_restful_list'),
                        per_page=paginator_info.get('per_page'),
                        page=paginator_info.get('page'),
                        total=paginator_info.get('total'))
            code, msg, = 0, ''
        else:
            code, data, msg = -1, {}, result
        return api_response(code, msg, data)

    @manage_permission_check('workflow_admin')
    def post(self, request, *args, **kwargs):
        """
        新增状态
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        json_str = request.body.decode('utf-8')
        if not json_str:
            return api_response(-1, 'post参数为空', {})
        request_data_dict = json.loads(json_str)
        workflow_data = {}
        app_name = request.META.get('HTTP_APPNAME')
        username = request.META.get('HTTP_USERNAME')
        name = request_data_dict.get('name', '')
        is_hidden = request_data_dict.get('is_hidden', 0)
        order_id = int(request_data_dict.get('order_id', 0))
        type_id = int(request_data_dict.get('type_id', 0))
        remember_last_man_enable = int(
            request_data_dict.get('remember_last_man_enable', 0))
        enable_retreat = int(request_data_dict.get('enable_retreat', 0))
        participant_type_id = int(
            request_data_dict.get('participant_type_id', 0))

        participant = request_data_dict.get('participant', '')
        distribute_type_id = int(request_data_dict.get('distribute_type_id',
                                                       1))
        state_field_str = request_data_dict.get('state_field_str', '')
        label = request_data_dict.get('label', '')
        workflow_id = kwargs.get('workflow_id')

        flag, result = workflow_state_service_ins.add_workflow_state(
            workflow_id, name, is_hidden, order_id, type_id,
            remember_last_man_enable, participant_type_id, participant,
            distribute_type_id, state_field_str, label, username,
            enable_retreat)
        if flag is False:
            code, msg, data = -1, result, {}
        else:
            code, msg, data = 0, '', {
                'state_id': result.get('workflow_state_id')
            }
        return api_response(code, msg, data)
Exemple #8
0
class Config(object):
    CONFIG = 'config'
    CONFIG_LOCAL = 'config.local'

    SECTION_CORE = 'core'
    SECTION_CORE_LOGLEVEL = 'loglevel'
    SECTION_CORE_LOGLEVEL_SCHEMA = And(Use(str.lower), supported_loglevel)
    SECTION_CORE_REMOTE = 'remote'

    SECTION_CACHE = 'cache'
    SECTION_CACHE_DIR = 'dir'
    SECTION_CACHE_TYPE = 'type'
    SECTION_CACHE_TYPE_SCHEMA = supported_cache_type
    SECTION_CACHE_LOCAL = 'local'
    SECTION_CACHE_S3 = 's3'
    SECTION_CACHE_GS = 'gs'
    SECTION_CACHE_SSH = 'ssh'
    SECTION_CACHE_HDFS = 'hdfs'
    SECTION_CACHE_AZURE = 'azure'
    SECTION_CACHE_SCHEMA = {
        Optional(SECTION_CACHE_LOCAL): str,
        Optional(SECTION_CACHE_S3): str,
        Optional(SECTION_CACHE_GS): str,
        Optional(SECTION_CACHE_HDFS): str,
        Optional(SECTION_CACHE_SSH): str,
        Optional(SECTION_CACHE_AZURE): str,

        # backward compatibility
        Optional(SECTION_CACHE_DIR, default='cache'): str,
        Optional(SECTION_CACHE_TYPE, default=None): SECTION_CACHE_TYPE_SCHEMA,
    }

    # backward compatibility
    SECTION_CORE_CLOUD = 'cloud'
    SECTION_CORE_CLOUD_SCHEMA = And(Use(str.lower), supported_cloud)
    SECTION_CORE_STORAGEPATH = 'storagepath'

    SECTION_CORE_SCHEMA = {
        Optional(SECTION_CORE_LOGLEVEL, default='info'):
        And(str, Use(str.lower), SECTION_CORE_LOGLEVEL_SCHEMA),
        Optional(SECTION_CORE_REMOTE, default=''):
        And(str, Use(str.lower)),

        # backward compatibility
        Optional(SECTION_CORE_CLOUD, default=''):
        SECTION_CORE_CLOUD_SCHEMA,
        Optional(SECTION_CORE_STORAGEPATH, default=''):
        str,
    }

    # backward compatibility
    SECTION_AWS = 'aws'
    SECTION_AWS_STORAGEPATH = 'storagepath'
    SECTION_AWS_CREDENTIALPATH = 'credentialpath'
    SECTION_AWS_ENDPOINT_URL = 'endpointurl'
    SECTION_AWS_REGION = 'region'
    SECTION_AWS_PROFILE = 'profile'
    SECTION_AWS_SCHEMA = {
        SECTION_AWS_STORAGEPATH: str,
        Optional(SECTION_AWS_REGION): str,
        Optional(SECTION_AWS_PROFILE, default='default'): str,
        Optional(SECTION_AWS_CREDENTIALPATH, default=''): str,
        Optional(SECTION_AWS_ENDPOINT_URL, default=None): str,
    }

    # backward compatibility
    SECTION_GCP = 'gcp'
    SECTION_GCP_STORAGEPATH = SECTION_AWS_STORAGEPATH
    SECTION_GCP_PROJECTNAME = 'projectname'
    SECTION_GCP_SCHEMA = {
        SECTION_GCP_STORAGEPATH: str,
        Optional(SECTION_GCP_PROJECTNAME): str,
    }

    # backward compatibility
    SECTION_LOCAL = 'local'
    SECTION_LOCAL_STORAGEPATH = SECTION_AWS_STORAGEPATH
    SECTION_LOCAL_SCHEMA = {
        SECTION_LOCAL_STORAGEPATH: str,
    }

    SECTION_REMOTE_REGEX = r'^\s*remote\s*"(?P<name>.*)"\s*$'
    SECTION_REMOTE_FMT = 'remote "{}"'
    SECTION_REMOTE_URL = 'url'
    SECTION_REMOTE_USER = '******'
    SECTION_REMOTE_SCHEMA = {
        SECTION_REMOTE_URL: supported_url,
        Optional(SECTION_AWS_REGION): str,
        Optional(SECTION_AWS_PROFILE, default='default'): str,
        Optional(SECTION_AWS_CREDENTIALPATH, default=''): str,
        Optional(SECTION_AWS_ENDPOINT_URL, default=None): str,
        Optional(SECTION_GCP_PROJECTNAME): str,
        Optional(SECTION_CACHE_TYPE): SECTION_CACHE_TYPE_SCHEMA,
        Optional(SECTION_REMOTE_USER): str,
    }

    SCHEMA = {
        Optional(SECTION_CORE, default={}): SECTION_CORE_SCHEMA,
        Optional(Regex(SECTION_REMOTE_REGEX)): SECTION_REMOTE_SCHEMA,
        Optional(SECTION_CACHE, default={}): SECTION_CACHE_SCHEMA,

        # backward compatibility
        Optional(SECTION_AWS, default={}): SECTION_AWS_SCHEMA,
        Optional(SECTION_GCP, default={}): SECTION_GCP_SCHEMA,
        Optional(SECTION_LOCAL, default={}): SECTION_LOCAL_SCHEMA,
    }

    def __init__(self, dvc_dir):
        self.dvc_dir = os.path.abspath(os.path.realpath(dvc_dir))
        self.config_file = os.path.join(dvc_dir, self.CONFIG)
        self.config_local_file = os.path.join(dvc_dir, self.CONFIG_LOCAL)

        try:
            self._config = configobj.ConfigObj(self.config_file)
            local = configobj.ConfigObj(self.config_local_file)

            # NOTE: schema doesn't support ConfigObj.Section validation, so we
            # need to convert our config to dict before passing it to
            self._config = self._lower(self._config)
            local = self._lower(local)
            self._config.update(local)

            self._config = Schema(self.SCHEMA).validate(self._config)

            # NOTE: now converting back to ConfigObj
            self._config = configobj.ConfigObj(self._config,
                                               write_empty_values=True)
            self._config.filename = self.config_file
        except Exception as ex:
            raise ConfigError(ex)

    @staticmethod
    def _lower(config):
        new_config = {}
        for s_key, s_value in config.items():
            new_s = {}
            for key, value in s_value.items():
                new_s[key.lower()] = value
            new_config[s_key.lower()] = new_s
        return new_config

    @staticmethod
    def init(dvc_dir):
        config_file = os.path.join(dvc_dir, Config.CONFIG)
        open(config_file, 'w+').close()
        return Config(dvc_dir)
Exemple #9
0
class WorkflowTransitionView(LoonBaseView):
    post_schema = Schema({
        'name':
        And(str, lambda n: n != '', error='name is needed'),
        'transition_type_id':
        And(int, error='transition_type_id is needed'),
        'source_state_id':
        And(int, lambda n: n != 0, error='source_state_id is needed'),
        'attribute_type_id':
        And(int, lambda n: n != 0, error='attribute_type_id is needed'),
        Optional('alert_enable'):
        int,
        Optional('field_require_check'):
        int,
        Optional('alert_text'):
        str,
        Optional('destination_state_id'):
        int,
        Optional('timer'):
        int,
        Optional('condition_expression'):
        str,
    })

    @manage_permission_check('workflow_admin')
    def get(self, request, *args, **kwargs):
        """
        获取流转
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        workflow_id = kwargs.get('workflow_id')
        request_data = request.GET
        per_page = int(request_data.get('per_page', 10)) if request_data.get(
            'per_page', 10) else 10
        page = int(request_data.get('page', 1)) if request_data.get('page',
                                                                    1) else 1
        query_value = request_data.get('search_value', '')
        # if not username:
        #     return api_response(-1, '请提供username', '')
        flag, result = workflow_transition_service_ins.get_transitions_serialize_by_workflow_id(
            workflow_id, per_page, page, query_value)

        if flag is not False:
            paginator_info = result.get('paginator_info')
            data = dict(value=result.get('workflow_transitions_restful_list'),
                        per_page=paginator_info.get('per_page'),
                        page=paginator_info.get('page'),
                        total=paginator_info.get('total'))
            code, msg, = 0, ''
        else:
            code, data, msg = -1, {}, result
        return api_response(code, msg, data)

    @manage_permission_check('workflow_admin')
    def post(self, request, *args, **kwargs):
        """
        新增流转
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        json_str = request.body.decode('utf-8')
        if not json_str:
            return api_response(-1, 'post参数为空', {})
        request_data_dict = json.loads(json_str)
        workflow_id = kwargs.get('workflow_id')
        username = request.user.username
        name = request_data_dict.get('name', '')
        transition_type_id = int(request_data_dict.get('transition_type_id',
                                                       0))
        timer = int(request_data_dict.get('timer', 0))
        source_state_id = int(request_data_dict.get('source_state_id', 0))
        destination_state_id = int(
            request_data_dict.get('destination_state_id', 0))
        condition_expression = request_data_dict.get('condition_expression',
                                                     '')
        attribute_type_id = int(request_data_dict.get('attribute_type_id', 0))
        field_require_check = int(
            request_data_dict.get('field_require_check', 0))
        alert_enable = int(request_data_dict.get('alert_enable', 0))
        alert_text = request_data_dict.get('alert_text', '')
        flag, result = workflow_transition_service_ins.add_workflow_transition(
            workflow_id, name, transition_type_id, timer, source_state_id,
            destination_state_id, condition_expression, attribute_type_id,
            field_require_check, alert_enable, alert_text, username)
        if flag is not False:
            data = dict(value=dict(transition_id=result.get('transition_id')))
            code, msg, = 0, ''
        else:
            code, data, msg = -1, {}, result
        return api_response(code, msg, data)
Exemple #10
0
class WorkflowTransitionDetailView(LoonBaseView):
    patch_schema = Schema({
        'name':
        And(str, lambda n: n != '', error='name is needed'),
        'transition_type_id':
        And(int, error='transition_type_id is needed'),
        'source_state_id':
        And(int, lambda n: n != 0, error='source_state_id is needed'),
        'attribute_type_id':
        And(int, lambda n: n != 0, error='attribute_type_id is needed'),
        Optional('alert_enable'):
        int,
        Optional('field_require_check'):
        int,
        Optional('alert_text'):
        str,
        Optional('destination_state_id'):
        int,
        Optional('timer'):
        int,
        Optional('condition_expression'):
        str,
    })

    @manage_permission_check('workflow_admin')
    def patch(self, request, *args, **kwargs):
        """
        编辑
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        json_str = request.body.decode('utf-8')
        if not json_str:
            return api_response(-1, 'post参数为空', {})
        request_data_dict = json.loads(json_str)
        workflow_id = kwargs.get('workflow_id')
        app_name = request.META.get('HTTP_APPNAME')
        username = request.user.username
        name = request_data_dict.get('name', '')
        transition_type_id = int(request_data_dict.get('transition_type_id',
                                                       0))
        timer = int(request_data_dict.get('timer', 0))
        source_state_id = int(request_data_dict.get('source_state_id', 0))
        destination_state_id = int(
            request_data_dict.get('destination_state_id', 0))
        condition_expression = request_data_dict.get('condition_expression',
                                                     '')
        attribute_type_id = int(request_data_dict.get('attribute_type_id', 0))
        field_require_check = int(
            request_data_dict.get('field_require_check', 0))
        alert_enable = int(request_data_dict.get('alert_enable', 0))
        alert_text = request_data_dict.get('alert_text', '')
        transition_id = kwargs.get('transition_id')
        flag, result = workflow_transition_service_ins.edit_workflow_transition(
            transition_id, workflow_id, name, transition_type_id, timer,
            source_state_id, destination_state_id, condition_expression,
            attribute_type_id, field_require_check, alert_enable, alert_text)
        if flag is not False:
            data = {}
            code, msg, = 0, ''
        else:
            code, data, msg = -1, {}, ''
        return api_response(code, msg, data)

    @manage_permission_check('workflow_admin')
    def delete(self, request, *args, **kwargs):
        """
        删除transition
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        transition_id = kwargs.get('transition_id')
        flag, result = workflow_transition_service_ins.del_workflow_transition(
            transition_id)
        if flag is not False:
            data = {}
            code, msg, = 0, ''
        else:
            code, data, msg = -1, {}, ''
        return api_response(code, msg, data)
Exemple #11
0
class WorkflowView(LoonBaseView):
    post_schema = Schema({
        'name':
        And(str, lambda n: n != '', error='name is needed'),
        Optional('description'):
        str,
        str:
        object
    })

    def get(self, request, *args, **kwargs):
        """
        获取工作流列表
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        request_data = request.GET
        search_value = request_data.get('search_value', '')
        per_page = int(request_data.get('per_page', 10))
        page = int(request_data.get('page', 1))
        from_admin = int(request_data.get('from_admin', 0))  # 获取有管理权限的工作流列表
        username = request.META.get('HTTP_USERNAME')
        app_name = request.META.get('HTTP_APPNAME')

        flag, result = account_base_service_ins.app_workflow_permission_list(
            app_name)

        if not flag:
            return api_response(-1, result, {})
        if not result.get('workflow_id_list'):
            data = dict(value=[], per_page=per_page, page=page, total=0)
            code, msg, = 0, ''
            return api_response(code, msg, data)
        permission_workflow_id_list = result.get('workflow_id_list')

        flag, result = workflow_base_service_ins.get_workflow_list(
            search_value, page, per_page, permission_workflow_id_list,
            username, from_admin)
        if flag is not False:
            paginator_info = result.get('paginator_info')
            data = dict(value=result.get('workflow_result_restful_list'),
                        per_page=paginator_info.get('per_page'),
                        page=paginator_info.get('page'),
                        total=paginator_info.get('total'))
            code, msg, = 0, ''
        else:
            code, data, msg = -1, '', result
        return api_response(code, msg, data)

    @manage_permission_check('workflow_admin')
    def post(self, request, *args, **kwargs):
        """
        新增工作流
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        json_str = request.body.decode('utf-8')
        if not json_str:
            return api_response(-1, 'post参数为空', {})
        request_data_dict = json.loads(json_str)
        name = request_data_dict.get('name', '')
        description = request_data_dict.get('description', '')
        notices = request_data_dict.get('notices', '')
        view_permission_check = request_data_dict.get('view_permission_check',
                                                      1)
        limit_expression = request_data_dict.get('limit_expression', '')
        display_form_str = request_data_dict.get('display_form_str', '')
        workflow_admin = request_data_dict.get('workflow_admin', '')
        title_template = request_data_dict.get('title_template', '')
        content_template = request_data_dict.get('content_template', '')
        creator = request.META.get('HTTP_USERNAME', '')
        flag, result = workflow_base_service_ins.add_workflow(
            name, description, notices, view_permission_check,
            limit_expression, display_form_str, creator, workflow_admin,
            title_template, content_template)
        if flag is False:
            code, msg, data = -1, result, {}
        else:
            code, msg, data = 0, '', {'workflow_id': result.get('workflow_id')}
        return api_response(code, msg, data)
Exemple #12
0
class WorkflowDetailView(LoonBaseView):
    patch_schema = Schema({
        'name':
        And(str, lambda n: n != '', error='name is needed'),
        Optional('description'):
        str,
        str:
        object
    })

    @manage_permission_check('workflow_admin')
    def get(self, request, *args, **kwargs):
        """
        获取工作流详情
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        workflow_id = kwargs.get('workflow_id')
        app_name = request.META.get('HTTP_APPNAME')
        # 判断是否有工作流的权限
        app_permission, msg = account_base_service_ins.app_workflow_permission_check(
            app_name, workflow_id)
        if not app_permission:
            return api_response(
                -1,
                'APP:{} have no permission to get this workflow info'.format(
                    app_name), '')
        flag, workflow_result = workflow_base_service_ins.get_by_id(
            workflow_id)
        if flag is False:
            code, msg, data = -1, workflow_result, {}
        else:
            data = dict(
                name=workflow_result.name,
                description=workflow_result.description,
                notices=workflow_result.notices,
                view_permission_check=workflow_result.view_permission_check,
                limit_expression=workflow_result.limit_expression,
                display_form_str=workflow_result.display_form_str,
                creator=workflow_result.creator,
                gmt_created=str(workflow_result.gmt_created)[:19])
            code = 0
        return api_response(code, msg, data)

    @manage_permission_check('workflow_admin')
    def patch(self, request, *args, **kwargs):
        """
        修改工作流
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        json_str = request.body.decode('utf-8')
        if not json_str:
            return api_response(-1, 'post参数为空', {})
        request_data_dict = json.loads(json_str)
        app_name = request.META.get('HTTP_APPNAME')
        workflow_id = kwargs.get('workflow_id')
        from service.account.account_base_service import AccountBaseService
        # 判断是否有工作流的权限
        app_permission, msg = AccountBaseService.app_workflow_permission_check(
            app_name, workflow_id)
        if not app_permission:
            return api_response(
                -1,
                'APP:{} have no permission to get this workflow info'.format(
                    app_name), '')
        name = request_data_dict.get('name', '')
        description = request_data_dict.get('description', '')
        notices = request_data_dict.get('notices', '')
        view_permission_check = request_data_dict.get('view_permission_check',
                                                      1)
        limit_expression = request_data_dict.get('limit_expression', '')
        display_form_str = request_data_dict.get('display_form_str', '')
        workflow_admin = request_data_dict.get('workflow_admin', '')
        title_template = request_data_dict.get('title_template', '')
        content_template = request_data_dict.get('content_template', '')

        flag, result = workflow_base_service_ins.edit_workflow(
            workflow_id, name, description, notices, view_permission_check,
            limit_expression, display_form_str, workflow_admin, title_template,
            content_template)
        if flag is False:
            code, msg, data = -1, result, {}
        else:
            code, msg, data = 0, '', {}
        return api_response(code, msg, data)

    @manage_permission_check('workflow_admin')
    def delete(self, request, *args, **kwargs):
        """
        删除工作流
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        app_name = request.META.get('HTTP_APPNAME')
        workflow_id = kwargs.get('workflow_id')
        # 判断是否有工作流的权限
        app_permission, msg = account_base_service_ins.app_workflow_permission_check(
            app_name, workflow_id)
        if not app_permission:
            return api_response(
                -1,
                'APP:{} have no permission to get this workflow info'.format(
                    app_name), '')
        flag, result = workflow_base_service_ins.delete_workflow(workflow_id)
        if flag is False:
            code, msg, data = -1, msg, {}
        else:
            code, msg, data = 0, '', {}
        return api_response(code, msg, data)
Exemple #13
0
class WorkflowCustomFieldDetailView(LoonBaseView):
    patch_schema = Schema({
        'field_key':
        And(str, lambda n: n != '', error='field_key is needed'),
        'field_name':
        And(str, lambda n: n != '', error='field_name is needed'),
        'field_type_id':
        And(int,
            lambda n: n != 0,
            error='field_type_id is needed and should be a number'),
        'order_id':
        And(int, error='order_id is needed and should be a number'),
        Optional('description'):
        str,
        Optional('label'):
        str,
        Optional('field_template'):
        str,
        Optional('default_value'):
        str,
        Optional('boolean_field_display'):
        str,
        Optional('field_choice'):
        str,
    })

    @manage_permission_check('workflow_admin')
    def patch(self, request, *args, **kwargs):
        """
        更新自定义字段
        :param request: 
        :param args: 
        :param kwargs: 
        :return: 
        """
        custom_field_id = kwargs.get('custom_field_id')
        app_name = request.META.get('HTTP_APPNAME')
        username = request.META.get('HTTP_USERNAME')
        workflow_id = kwargs.get('workflow_id')
        # 判断是否有工作流的权限
        app_permission, msg = account_base_service_ins.app_workflow_permission_check(
            app_name, workflow_id)
        if not app_permission:
            return api_response(
                -1,
                'APP:{} have no permission to get this workflow info'.format(
                    app_name), '')
        json_str = request.body.decode('utf-8')
        if not json_str:
            return api_response(-1, 'post参数为空', {})
        request_data_dict = json.loads(json_str)
        field_key = request_data_dict.get('field_key', '')
        field_name = request_data_dict.get('field_name', '')
        field_type_id = request_data_dict.get('field_type_id', '')
        order_id = int(request_data_dict.get('order_id', 0))
        label = request_data_dict.get('label', '')
        description = request_data_dict.get('description', '')
        field_template = request_data_dict.get('field_template', '')
        default_value = request_data_dict.get('default_value', '')
        boolean_field_display = request_data_dict.get('boolean_field_display',
                                                      '')
        field_choice = request_data_dict.get('field_choice', '')
        result, msg = workflow_custom_field_service_ins.edit_record(
            custom_field_id, workflow_id, field_type_id, field_key, field_name,
            order_id, default_value, description, field_template,
            boolean_field_display, field_choice, label)

        if result is not False:
            code, msg, data = 0, '', {}
        else:
            code, data = -1, ''
        return api_response(code, msg, data)

    @manage_permission_check('workflow_admin')
    def delete(self, request, *args, **kwargs):
        """删除记录"""
        app_name = request.META.get('HTTP_APPNAME')
        username = request.META.get('HTTP_USERNAME')
        workflow_id = kwargs.get('workflow_id')
        custom_field_id = kwargs.get('custom_field_id')
        # 判断是否有工作流的权限
        app_permission, msg = account_base_service_ins.app_workflow_permission_check(
            app_name, workflow_id)
        if not app_permission:
            return api_response(
                -1,
                'APP:{} have no permission to get this workflow info'.format(
                    app_name), '')
        flag, result = workflow_custom_field_service_ins.delete_record(
            custom_field_id)
        if flag is not False:
            data = dict(value={'custom_field_id': result})
            code, msg, = 0, ''
        else:
            code, data = -1, ''
        return api_response(code, msg, data)
Exemple #14
0
class OutputBase(object):
    IS_DEPENDENCY = False

    REMOTE = RemoteBASE

    PARAM_PATH = "path"
    PARAM_CACHE = "cache"
    PARAM_METRIC = "metric"
    PARAM_METRIC_TYPE = "type"
    PARAM_METRIC_XPATH = "xpath"
    PARAM_PERSIST = "persist"

    METRIC_SCHEMA = Or(
        None,
        bool,
        {
            Optional(PARAM_METRIC_TYPE): Or(str, None),
            Optional(PARAM_METRIC_XPATH): Or(str, None),
        },
    )

    PARAM_TAGS = "tags"

    DoesNotExistError = OutputDoesNotExistError
    IsNotFileOrDirError = OutputIsNotFileOrDirError
    IsStageFileError = OutputIsStageFileError

    sep = "/"

    def __init__(
        self,
        stage,
        path,
        info=None,
        remote=None,
        cache=True,
        metric=False,
        persist=False,
        tags=None,
    ):
        self._validate_output_path(path)
        # This output (and dependency) objects have too many paths/urls
        # here is a list and comments:
        #
        #   .def_path - path from definition in stage file
        #   .path_info - PathInfo/URLInfo structured resolved path
        #   .fspath - local only, resolved
        #   .__str__ - for presentation purposes, def_path/relpath
        #
        # By resolved path, which contains actual location,
        # should be absolute and don't contain remote:// refs.
        self.stage = stage
        self.repo = stage.repo if stage else None
        self.def_path = path
        self.info = info
        self.remote = remote or self.REMOTE(self.repo, {})
        self.use_cache = False if self.IS_DEPENDENCY else cache
        self.metric = False if self.IS_DEPENDENCY else metric
        self.persist = persist
        self.tags = None if self.IS_DEPENDENCY else (tags or {})

        if self.use_cache and self.cache is None:
            raise DvcException(
                "no cache location setup for '{}' outputs.".format(
                    self.REMOTE.scheme))

        self.path_info = self._parse_path(remote, path)

    def _parse_path(self, remote, path):
        if remote:
            parsed = urlparse(path)
            return remote.path_info / parsed.path.lstrip("/")
        return self.REMOTE.path_cls(path)

    def __repr__(self):
        return "{class_name}: '{def_path}'".format(
            class_name=type(self).__name__, def_path=self.def_path)

    def __str__(self):
        return self.def_path

    @property
    def scheme(self):
        return self.REMOTE.scheme

    @property
    def is_in_repo(self):
        return False

    @property
    def use_scm_ignore(self):
        if not self.is_in_repo:
            return False

        return self.use_cache or self.stage.is_repo_import

    @property
    def cache(self):
        return getattr(self.repo.cache, self.scheme)

    @property
    def dir_cache(self):
        return self.cache.get_dir_cache(self.checksum)

    @classmethod
    def supported(cls, url):
        return cls.REMOTE.supported(url)

    @property
    def cache_path(self):
        return self.cache.checksum_to_path_info(self.checksum).url

    @property
    def checksum(self):
        return self.info.get(self.remote.PARAM_CHECKSUM)

    @property
    def is_dir_checksum(self):
        return self.remote.is_dir_checksum(self.checksum)

    @property
    def exists(self):
        return self.remote.exists(self.path_info)

    def changed_checksum(self):
        return (self.checksum != self.remote.save_info(
            self.path_info)[self.remote.PARAM_CHECKSUM])

    def changed_cache(self):
        if not self.use_cache or not self.checksum:
            return True

        return self.cache.changed_cache(self.checksum)

    def status(self):
        if self.checksum and self.use_cache and self.changed_cache():
            return {str(self): "not in cache"}

        if not self.exists:
            return {str(self): "deleted"}

        if self.changed_checksum():
            return {str(self): "modified"}

        if not self.checksum:
            return {str(self): "new"}

        return {}

    def changed(self):
        status = self.status()
        logger.debug(str(status))
        return bool(status)

    @property
    def is_empty(self):
        return self.remote.is_empty(self.path_info)

    def isdir(self):
        return self.remote.isdir(self.path_info)

    def isfile(self):
        return self.remote.isfile(self.path_info)

    def save(self):
        if not self.exists:
            raise self.DoesNotExistError(self)

        if not self.isfile and not self.isdir:
            raise self.IsNotFileOrDirError(self)

        if self.is_empty:
            logger.warning("'{}' is empty.".format(self))

        if self.use_scm_ignore:
            if self.repo.scm.is_tracked(self.fspath):
                raise OutputAlreadyTrackedError(self)

            self.repo.scm.ignore(self.fspath)

        if not self.use_cache:
            self.info = self.remote.save_info(self.path_info)
            if self.metric:
                self.verify_metric()
            if not self.IS_DEPENDENCY:
                logger.info(
                    "Output '{}' doesn't use cache. Skipping saving.".format(
                        self))
            return

        assert not self.IS_DEPENDENCY

        if not self.changed():
            logger.info(
                "Output '{}' didn't change. Skipping saving.".format(self))
            return

        self.info = self.remote.save_info(self.path_info)

    def commit(self):
        if self.use_cache:
            self.cache.save(self.path_info, self.info)

    def dumpd(self):
        ret = copy(self.info)
        ret[self.PARAM_PATH] = self.def_path

        if self.IS_DEPENDENCY:
            return ret

        ret[self.PARAM_CACHE] = self.use_cache

        if isinstance(self.metric, dict):
            if (self.PARAM_METRIC_XPATH in self.metric
                    and not self.metric[self.PARAM_METRIC_XPATH]):
                del self.metric[self.PARAM_METRIC_XPATH]

        ret[self.PARAM_METRIC] = self.metric
        ret[self.PARAM_PERSIST] = self.persist

        if self.tags:
            ret[self.PARAM_TAGS] = self.tags

        return ret

    def verify_metric(self):
        raise DvcException("verify metric is not supported for {}".format(
            self.scheme))

    def download(self, to):
        self.remote.download(self.path_info, to.path_info)

    def checkout(self, force=False, progress_callback=None, tag=None):
        if not self.use_cache:
            progress_callback(str(self.path_info), self.get_files_number())
            return None

        if tag:
            info = self.tags[tag]
        else:
            info = self.info

        return self.cache.checkout(
            self.path_info,
            info,
            force=force,
            progress_callback=progress_callback,
        )

    def remove(self, ignore_remove=False):
        self.remote.remove(self.path_info)
        if self.scheme != "local":
            return

        if ignore_remove and self.use_scm_ignore:
            self.repo.scm.ignore_remove(self.fspath)

    def move(self, out):
        if self.scheme == "local" and self.use_scm_ignore:
            self.repo.scm.ignore_remove(self.fspath)

        self.remote.move(self.path_info, out.path_info)
        self.def_path = out.def_path
        self.path_info = out.path_info
        self.save()
        self.commit()

        if self.scheme == "local" and self.use_scm_ignore:
            self.repo.scm.ignore(self.fspath)

    def get_files_number(self):
        if not self.use_cache:
            return 0

        return self.cache.get_files_number(self.checksum)

    def unprotect(self):
        if self.exists:
            self.remote.unprotect(self.path_info)

    def _collect_used_dir_cache(self, remote=None, force=False, jobs=None):
        """Get a list of `info`s retaled to the given directory.

        - Pull the directory entry from the remote cache if it was changed.

        Example:

            Given the following commands:

            $ echo "foo" > directory/foo
            $ echo "bar" > directory/bar
            $ dvc add directory

            It will return a NamedCache like:

            nc = NamedCache()
            nc.add(self.scheme, 'c157a79031e1', 'directory/foo')
            nc.add(self.scheme, 'd3b07384d113', 'directory/bar')
        """

        ret = []

        if self.cache.changed_cache_file(self.checksum):
            try:
                self.repo.cloud.pull(
                    NamedCache.make("local", self.checksum, str(self)),
                    jobs=jobs,
                    remote=remote,
                    show_checksums=False,
                )
            except DvcException:
                logger.debug("failed to pull cache for '{}'".format(self))

        if self.cache.changed_cache_file(self.checksum):
            msg = ("Missing cache for directory '{}'. "
                   "Cache for files inside will be lost. "
                   "Would you like to continue? Use '-f' to force.")
            if not force and not prompt.confirm(msg.format(self.path_info)):
                raise DvcException(
                    "unable to fully collect used cache"
                    " without cache for directory '{}'".format(self))
            else:
                return ret

        cache = NamedCache()
        for entry in self.dir_cache:
            checksum = entry[self.remote.PARAM_CHECKSUM]
            path_info = self.path_info / entry[self.remote.PARAM_RELPATH]
            cache.add(self.scheme, checksum, str(path_info))

        return cache

    def get_used_cache(self, **kwargs):
        """Get a dumpd of the given `out`, with an entry including the branch.

        The `used_cache` of an output is no more than its `info`.

        In case that the given output is a directory, it will also
        include the `info` of its files.
        """

        if not self.use_cache:
            return NamedCache()

        if not self.info:
            logger.warning(
                "Output '{}'({}) is missing version info. Cache for it will "
                "not be collected. Use dvc repro to get your pipeline up to "
                "date.".format(self, self.stage))
            return NamedCache()

        ret = NamedCache.make(self.scheme, self.checksum, str(self))

        if not self.is_dir_checksum:
            return ret

        ret.update(self._collect_used_dir_cache(**kwargs))

        return ret

    @classmethod
    def _validate_output_path(cls, path):
        from dvc.stage import Stage

        if Stage.is_valid_filename(path):
            raise cls.IsStageFileError(path)
Exemple #15
0
class WorkflowStateDetailView(LoonBaseView):
    patch_schema = Schema({
        'name':
        And(str, lambda n: n != '', error='name is needed'),
        'order_id':
        And(int, error='order_id is needed'),
        'type_id':
        And(int, error='type_id is needed'),
        'participant_type_id':
        int,
        'distribute_type_id':
        And(int, lambda n: n != 0, error='distribute_type_id is needed'),
        Optional('remember_last_man_enable'):
        int,
        Optional('state_field_str'):
        str,
        Optional('label'):
        str,
        str:
        object
    })

    @manage_permission_check('workflow_admin')
    def patch(self, request, *args, **kwargs):
        """
        编辑状态
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        json_str = request.body.decode('utf-8')
        if not json_str:
            return api_response(-1, 'post参数为空', {})
        request_data_dict = json.loads(json_str)
        workflow_data = {}
        app_name = request.META.get('HTTP_APPNAME')
        username = request.META.get('HTTP_USERNAME')
        name = request_data_dict.get('name', '')
        is_hidden = request_data_dict.get('is_hidden', 0)
        order_id = int(request_data_dict.get('order_id', 0))
        type_id = int(request_data_dict.get('type_id', 0))
        remember_last_man_enable = int(
            request_data_dict.get('remember_last_man_enable', 0))
        enable_retreat = int(request_data_dict.get('enable_retreat', 0))
        participant_type_id = int(
            request_data_dict.get('participant_type_id', 0))

        participant = request_data_dict.get('participant', '')
        distribute_type_id = int(request_data_dict.get('distribute_type_id',
                                                       1))
        state_field_str = request_data_dict.get('state_field_str', '')
        label = request_data_dict.get('label', '')
        workflow_id = kwargs.get('workflow_id')
        state_id = kwargs.get('state_id')

        flag, result = workflow_state_service_ins.edit_workflow_state(
            state_id, workflow_id, name, is_hidden, order_id, type_id,
            remember_last_man_enable, participant_type_id, participant,
            distribute_type_id, state_field_str, label, enable_retreat)
        if flag is False:
            code, msg, data = -1, result, {}
        else:
            code, msg, data = 0, '', {}
        return api_response(code, msg, data)

    @manage_permission_check('workflow_admin')
    def delete(self, request, *args, **kwargs):
        """
        删除状态
        delete state
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        app_name = request.META.get('HTTP_APPNAME')
        state_id = kwargs.get('state_id')
        flag, result = workflow_state_service_ins.del_workflow_state(state_id)
        if flag is False:
            code, msg, data = -1, result, {}
        else:
            code, msg, data = 0, '', {}
        return api_response(code, msg, data)
Exemple #16
0
 def __init__(self, name, default_value=None):
     Optional.__init__(self, name)
     self.default_value = default_value
Exemple #17
0
class WorkflowCustomNoticeView(LoonBaseView):
    post_schema = Schema({
        'name':
        And(str, lambda n: n != '', error='name is needed'),
        'hook_url':
        And(str, lambda n: n != '', error='hook_url is needed'),
        'hook_token':
        And(str, lambda n: n != '', error='hook_token is needed'),
        Optional('description'):
        str,
    })

    @manage_permission_check('admin')
    def get(self, request, *args, **kwargs):
        """
        get worklfow custom notice list
        获取工作流通知列表
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        request_data = request.GET
        # username = request_data.get('username', '')  # 后续会根据username做必要的权限控制
        username = request.META.get('HTTP_USERNAME')
        if not username:
            username = request.user.username
        search_value = request_data.get('search_value', '')
        per_page = int(request_data.get('per_page', 10)) if request_data.get(
            'per_page', 10) else 10
        page = int(request_data.get('page', 1)) if request_data.get('page',
                                                                    1) else 1
        if not username:
            return api_response(-1, '请提供username', '')
        result, msg = workflow_custom_notice_service_ins.get_notice_list(
            search_value, page, per_page)

        if result is not False:
            data = dict(value=result,
                        per_page=msg['per_page'],
                        page=msg['page'],
                        total=msg['total'])
            code, msg, = 0, ''
        else:
            code, data = -1, ''
        return api_response(code, msg, data)

    @manage_permission_check('admin')
    def post(self, request, *args, **kwargs):
        """
        add notice record
        新增通知记录
        :param request:
        :param args:
        :param kwargs:
        :return:
        """

        json_str = request.body.decode('utf-8')
        if not json_str:
            return api_response(-1, 'post参数为空', {})
        request_data_dict = json.loads(json_str)

        name = request_data_dict.get('name', '')
        description = request_data_dict.get('description', '')
        hook_url = request_data_dict.get('hook_url', '')
        hook_token = request_data_dict.get('hook_token', '')
        creator = request.user.username

        flag, result = account_base_service_ins.admin_permission_check(creator)
        if flag is False:
            return api_response(-1, result, {})

        result, msg = workflow_custom_notice_service_ins.add_custom_notice(
            name, description, hook_url, hook_token, creator)
        if result is not False:
            data = {}
            code, msg, = 0, ''
        else:
            code, data = -1, {}
        return api_response(code, msg, data)
Exemple #18
0
    x.update(d2)

    return x


#: Schema to validate the CP2K general settings
schema_cp2k_general_settings = Schema({

    # "Basis set to carry out the quantum chemistry simulation"
    "basis": str,

    # "Pseudo-potential to carry out the quantum chemistry simulation"
    "potential": str,

    # Charge of the system
    Optional("charge", default=0): int,

    # Multiplicity
    Optional("multiplicity", default=1): int,

    # Specify the Cartesian components for the cell vector
    "cell_parameters": Or(
        Real,
        lambda xs: len(xs) == 3 and isinstance(xs, list),
        lambda xs: len(xs) == 3 and all(len(r) == 3 for r in xs)),

    # Type of periodicity
    "periodic": any_lambda(("none", "x", "y", "z", "xy", "xy", "yz", "xyz")),

    # Specify the angles between the vectors defining the unit cell
    Optional("cell_angles"): list,
Exemple #19
0
class WorkflowCustomNoticeDetailView(LoonBaseView):
    patch_schema = Schema({
        'name':
        And(str, lambda n: n != '', error='name is needed'),
        'hook_url':
        And(str, lambda n: n != '', error='hook_url is needed'),
        'hook_token':
        And(str, lambda n: n != '', error='hook_token is needed'),
        Optional('description'):
        str,
    })

    @manage_permission_check('admin')
    def patch(self, request, *args, **kwargs):
        """
        修改通知
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        notice_id = kwargs.get('notice_id')

        json_str = request.body.decode('utf-8')
        if not json_str:
            return api_response(-1, 'post参数为空', {})
        request_data_dict = json.loads(json_str)

        name = request_data_dict.get('name', '')
        description = request_data_dict.get('description', '')
        hook_url = request_data_dict.get('hook_url', '')
        hook_token = request_data_dict.get('hook_token', '')
        creator = request.user.username

        flag, result = account_base_service_ins.admin_permission_check(creator)
        if flag is False:
            return api_response(-1, result, {})

        result, msg = workflow_custom_notice_service_ins.update_custom_notice(
            notice_id, name, description, hook_url, hook_token)
        if result is not False:
            data = {}
            code, msg, = 0, ''
        else:
            code, data = -1, {}
        return api_response(code, msg, data)

    @manage_permission_check('admin')
    def delete(self, request, *args, **kwargs):
        """
        删除自定义通知
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        notice_id = kwargs.get('notice_id')
        result, msg = workflow_custom_notice_service_ins.del_custom_notice(
            notice_id)
        if result is not False:
            code, msg, data = 0, '', {}
        else:
            code, data = -1, {}
        return api_response(code, msg, data)
Exemple #20
0
def valid_pycron_expr(txt):
    logging.debug("validating as crontab entry - '%s'" % txt)
    if validate_crontab_time_format_regex.match(txt):
        return True
    raise SchemaError('Bad crontab format: %s' % txt)


def port_range(port):
    return 0 <= port <= 65535


schema = Schema({
    'mqtt': {
        'host': And(str, len),
        'port': And(int, port_range),
        Optional('username'): And(str, len),
        Optional('password'): And(str, len),
        Optional('cafile'): os.path.exists,
        Optional('certfile'): os.path.exists,
        Optional('keyfile'): os.path.exists,
    },
    Optional('http'): {
        'destination': And(str, len),
        'action': And(str, len),
        Optional('username'): And(str, len),
        Optional('password'): And(str, len)
    },

    'influxdb': {
        'host': And(str, len),
        'port': And(int, port_range),
Exemple #21
0
class WorkflowCustomFieldView(LoonBaseView):
    post_schema = Schema({
        'field_key':
        And(str, lambda n: n != '', error='field_key is needed'),
        'field_name':
        And(str, lambda n: n != '', error='field_name is needed'),
        'field_type_id':
        And(int,
            lambda n: n != 0,
            error='field_type_id is needed and should be a number'),
        'order_id':
        And(int, error='order_id is needed and should be a number'),
        Optional('description'):
        str,
        Optional('label'):
        str,
        Optional('field_template'):
        str,
        Optional('default_value'):
        str,
        Optional('boolean_field_display'):
        str,
        Optional('field_choice'):
        str,
    })

    def get(self, request, *args, **kwargs):
        """
        获取工作流自定义字段列表
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        request_data = request.GET
        # username = request_data.get('username', '')  # 后续会根据username做必要的权限控制
        username = request.META.get('HTTP_USERNAME')
        if not username:
            username = request.user.username
        search_value = request_data.get('search_value', '')
        per_page = int(request_data.get('per_page', 10)) if request_data.get(
            'per_page', 10) else 10
        page = int(request_data.get('page', 1)) if request_data.get('page',
                                                                    1) else 1
        if not username:
            return api_response(-1, '请提供username', '')
        flag, result = workflow_custom_field_service_ins.get_workflow_custom_field_list(
            kwargs.get('workflow_id'), search_value, page, per_page)

        if flag is not False:
            paginator_info = result.get('paginator_info')
            data = dict(
                value=result.get('workflow_custom_field_result_restful_list'),
                per_page=paginator_info.get('per_page'),
                page=paginator_info.get('page'),
                total=paginator_info.get('total'))
            code, msg, = 0, ''
        else:
            code, data, msg = -1, {}, ''
        return api_response(code, msg, data)

    @manage_permission_check('workflow_admin')
    def post(self, request, *args, **kwargs):
        """
        新增工作流自定义字段
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        app_name = request.META.get('HTTP_APPNAME')
        username = request.META.get('HTTP_USERNAME')
        workflow_id = kwargs.get('workflow_id')
        # 判断是否有工作流的权限
        app_permission, msg = account_base_service_ins.app_workflow_permission_check(
            app_name, workflow_id)
        if not app_permission:
            return api_response(
                -1,
                'APP:{} have no permission to get this workflow info'.format(
                    app_name), '')
        json_str = request.body.decode('utf-8')
        if not json_str:
            return api_response(-1, 'post参数为空', {})
        request_data_dict = json.loads(json_str)
        field_key = request_data_dict.get('field_key', '')
        field_name = request_data_dict.get('field_name', '')
        field_type_id = request_data_dict.get('field_type_id', '')
        order_id = int(request_data_dict.get('order_id', 0))
        label = request_data_dict.get('label', '')
        description = request_data_dict.get('description', '')
        field_template = request_data_dict.get('field_template', '')
        default_value = request_data_dict.get('default_value', '')
        boolean_field_display = request_data_dict.get('boolean_field_display',
                                                      '')
        field_choice = request_data_dict.get('field_choice', '')
        flag, result = workflow_custom_field_service_ins.add_record(
            workflow_id, field_type_id, field_key, field_name, order_id,
            default_value, description, field_template, boolean_field_display,
            field_choice, label, username)

        if flag is not False:
            data = dict(
                value={'custom_field_id': result.get('custom_field_id')})
            code, msg, = 0, ''
        else:
            code, data, msg = -1, {}, result
        return api_response(code, msg, data)
Exemple #22
0
class LoonDeptView(LoonBaseView):
    post_schema = Schema({
        'name': And(str, lambda n: n != ''),
        Optional('parent_dept_id'): int,
        Optional('leader'): str,
        Optional('approver'): str,
        Optional('label'): str,
    })

    @manage_permission_check('admin')
    def get(self, request, *args, **kwargs):
        """
        部门列表
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        request_data = request.GET
        search_value = request_data.get('search_value', '')
        per_page = int(request_data.get('per_page', 10))
        page = int(request_data.get('page', 1))
        flag, result = account_base_service_ins.get_dept_list(
            search_value, page, per_page)
        if flag is not False:
            paginator_info = result.get('paginator_info')
            data = dict(value=result.get('dept_result_object_format_list'),
                        per_page=paginator_info.get('per_page'),
                        page=paginator_info.get('page'),
                        total=paginator_info.get('total'))
            code, msg, = 0, ''
        else:
            code, data = -1, ''
        return api_response(code, msg, data)

    @manage_permission_check('admin')
    def post(self, request, *args, **kwargs):
        """
        新增部门
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        json_str = request.body.decode('utf-8')
        request_data_dict = json.loads(json_str)
        name = request_data_dict.get('name')
        parent_dept_id = request_data_dict.get('parent_dept_id')
        leader_id = request_data_dict.get('leader')
        approver_str_list = request_data_dict.get('approver')
        label = request_data_dict.get('label')
        creator = request.user.username
        approver_id_list = [
            int(approver_str) for approver_str in approver_str_list
        ]
        if approver_id_list:
            flag, result = account_base_service_ins.get_user_name_list_by_id_list(
                approver_id_list)
            if flag is False:
                return api_response(-1, result, {})
            approver_username_list = result.get('username_list')
            approver_username_str = ','.join(approver_username_list)
        else:
            approver_username_str = ''

        if leader_id:
            flag, result = account_base_service_ins.get_user_by_user_id(
                int(leader_id))
            if flag is False:
                return api_response(-1, result, {})
            leader = result.username
        else:
            leader = ''
        flag, result = account_base_service_ins.add_dept(
            name, parent_dept_id, leader, approver_username_str, label,
            creator)
        if flag is False:
            return api_response(-1, result, {})
        return api_response(0, result, {})
Exemple #23
0
from schema import Schema, And, Or, Use, Optional, SchemaError

all_products = Schema({
    Optional('caffe'):
    And(Or(float, int), lambda x: x > 0),
    Optional('cioccolato'):
    And(Or(float, int), lambda x: x > 0),
    Optional('te'):
    And(Or(float, int), lambda x: x > 0),
    Optional('acqua calda'):
    And(Or(float, int), lambda x: x > 0),
    Optional('bicchiere vuoto'):
    And(Or(float, int), lambda x: x > 0),
    Optional('cappucino'):
    And(Or(float, int), lambda x: x > 0),
    Optional('laghine'):
    And(Or(float, int), lambda x: x > 0),
})

all_consumable_S = Schema({
    Optional('bicchiere'):
    And(Or(float, int), lambda x: x >= 0 and x <= 100),
    Optional('palettina'):
    And(Or(float, int), lambda x: x >= 0 and x <= 100),
    Optional('caffe'):
    And(Or(float, int), lambda x: x >= 0 and x <= 100),
    Optional('zucchero'):
    And(Or(float, int), lambda x: x >= 0 and x <= 100),
    Optional('te'):
    And(Or(float, int), lambda x: x >= 0 and x <= 100),
    Optional('cioccolato'):
Exemple #24
0
class LoonAppTokenView(LoonBaseView):
    post_schema = Schema({
        'app_name':
        And(str, lambda n: n != '', error='app_name is needed'),
        Optional('ticket_sn_prefix'):
        str,
        'workflow_ids':
        And(str, lambda n: n != '', error='workflow_ids is needed'),
    })

    @manage_permission_check('admin')
    def get(self, request, *args, **kwargs):
        """
        call api permission
        调用权限列表
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        request_data = request.GET
        search_value = request_data.get('search_value', '')
        per_page = int(request_data.get('per_page', 10))
        page = int(request_data.get('page', 1))
        flag, result = account_base_service_ins.get_token_list(
            search_value, page, per_page)
        if flag is not False:
            paginator_info = result.get('paginator_info')
            data = dict(value=result.get('token_result_object_format_list'),
                        per_page=paginator_info.get('per_page'),
                        page=paginator_info.get('page'),
                        total=paginator_info.get('total'))
            code, msg, = 0, ''
        else:
            code, data = -1, ''
        return api_response(code, msg, data)

    @manage_permission_check('admin')
    def post(self, request, *args, **kwargs):
        """
        add call api permission
        新增调用权限记录
        :param request:
        :param args:
        :param kwargs:
        :return:
        """
        json_str = request.body.decode('utf-8')
        request_data_dict = json.loads(json_str)
        app_name = request_data_dict.get('app_name', '')
        ticket_sn_prefix = request_data_dict.get('ticket_sn_prefix', '')
        workflow_ids = request_data_dict.get('workflow_ids', '')
        username = request.user.username
        flag, result = account_base_service_ins.add_token_record(
            app_name, ticket_sn_prefix, workflow_ids, username)
        if flag is False:
            code, data = -1, {}
        else:
            code, data = 0, {'id': result.get('app_token_id')}

        return api_response(code, result, data)
Exemple #25
0
class Stage(object):
    STAGE_FILE = "Dvcfile"
    STAGE_FILE_SUFFIX = ".dvc"

    PARAM_MD5 = "md5"
    PARAM_CMD = "cmd"
    PARAM_WDIR = "wdir"
    PARAM_DEPS = "deps"
    PARAM_OUTS = "outs"
    PARAM_LOCKED = "locked"
    PARAM_META = "meta"

    SCHEMA = {
        Optional(PARAM_MD5): Or(str, None),
        Optional(PARAM_CMD): Or(str, None),
        Optional(PARAM_WDIR): Or(str, None),
        Optional(PARAM_DEPS): Or(And(list, Schema([dependency.SCHEMA])), None),
        Optional(PARAM_OUTS): Or(And(list, Schema([output.SCHEMA])), None),
        Optional(PARAM_LOCKED): bool,
        Optional(PARAM_META): object,
    }

    TAG_REGEX = r"^(?P<path>.*)@(?P<tag>[^\\/@:]*)$"

    def __init__(
        self,
        repo,
        path=None,
        cmd=None,
        wdir=os.curdir,
        deps=None,
        outs=None,
        md5=None,
        locked=False,
        tag=None,
        state=None,
    ):
        if deps is None:
            deps = []
        if outs is None:
            outs = []

        self.repo = repo
        self.path = path
        self.cmd = cmd
        self.wdir = wdir
        self.outs = outs
        self.deps = deps
        self.md5 = md5
        self.locked = locked
        self.tag = tag
        self._state = state or {}

    def __repr__(self):
        return "Stage: '{path}'".format(
            path=self.relpath if self.path else "No path")

    @property
    def relpath(self):
        return os.path.relpath(self.path)

    @property
    def is_data_source(self):
        """Whether the stage file was created with `dvc add` or `dvc import`"""
        return self.cmd is None

    @staticmethod
    def is_valid_filename(path):
        return (
            # path.endswith doesn't work for encoded unicode filenames on
            # Python 2 and since Stage.STAGE_FILE_SUFFIX is ascii then it is
            # not needed to decode the path from py2's str
            path[-len(Stage.STAGE_FILE_SUFFIX):] == Stage.STAGE_FILE_SUFFIX
            or os.path.basename(path) == Stage.STAGE_FILE)

    @staticmethod
    def is_stage_file(path):
        return os.path.isfile(path) and Stage.is_valid_filename(path)

    def changed_md5(self):
        return self.md5 != self._compute_md5()

    @property
    def is_callback(self):
        """
        A callback stage is always considered as changed,
        so it runs on every `dvc repro` call.
        """
        return not self.is_data_source and len(self.deps) == 0

    @property
    def is_import(self):
        """Whether the stage file was created with `dvc import`."""
        return not self.cmd and len(self.deps) == 1 and len(self.outs) == 1

    def _changed_deps(self):
        if self.locked:
            return False

        if self.is_callback:
            logger.warning(
                "Dvc file '{fname}' is a 'callback' stage "
                "(has a command and no dependencies) and thus always "
                "considered as changed.".format(fname=self.relpath))
            return True

        for dep in self.deps:
            status = dep.status()
            if status:
                logger.warning(
                    "Dependency '{dep}' of '{stage}' changed because it is "
                    "'{status}'.".format(dep=dep,
                                         stage=self.relpath,
                                         status=status[str(dep)]))
                return True

        return False

    def _changed_outs(self):
        for out in self.outs:
            status = out.status()
            if status:
                logger.warning(
                    "Output '{out}' of '{stage}' changed because it is "
                    "'{status}'".format(out=out,
                                        stage=self.relpath,
                                        status=status[str(out)]))
                return True

        return False

    def _changed_md5(self):
        if self.changed_md5():
            logger.warning("Dvc file '{}' changed.".format(self.relpath))
            return True
        return False

    def changed(self):
        ret = any(
            [self._changed_deps(),
             self._changed_outs(),
             self._changed_md5()])

        if ret:
            logger.warning("Stage '{}' changed.".format(self.relpath))
        else:
            logger.info("Stage '{}' didn't change.".format(self.relpath))

        return ret

    def remove_outs(self, ignore_remove=False, force=False):
        """Used mainly for `dvc remove --outs` and :func:`Stage.reproduce`."""
        for out in self.outs:
            if out.persist and not force:
                out.unprotect()
            else:
                logger.debug("Removing output '{out}' of '{stage}'.".format(
                    out=out, stage=self.relpath))
                out.remove(ignore_remove=ignore_remove)

    def unprotect_outs(self):
        for out in self.outs:
            out.unprotect()

    def remove(self, force=False):
        self.remove_outs(ignore_remove=True, force=force)
        os.unlink(self.path)

    def reproduce(self,
                  force=False,
                  dry=False,
                  interactive=False,
                  no_commit=False):
        if not self.changed() and not force:
            return None

        msg = ("Going to reproduce '{stage}'. "
               "Are you sure you want to continue?".format(stage=self.relpath))

        if interactive and not prompt.confirm(msg):
            raise DvcException("reproduction aborted by the user")

        logger.info("Reproducing '{stage}'".format(stage=self.relpath))

        self.run(dry=dry, no_commit=no_commit, force=force)

        logger.debug("'{stage}' was reproduced".format(stage=self.relpath))

        return self

    @staticmethod
    def validate(d, fname=None):
        from dvc.utils import convert_to_unicode

        try:
            Schema(Stage.SCHEMA).validate(convert_to_unicode(d))
        except SchemaError as exc:
            raise StageFileFormatError(fname, exc)

    @classmethod
    def _stage_fname(cls, fname, outs, add):
        if fname:
            return fname

        if not outs:
            return cls.STAGE_FILE

        out = outs[0]
        path_handler = out.remote.ospath

        fname = path_handler.basename(out.path) + cls.STAGE_FILE_SUFFIX

        fname = Stage._expand_to_path_on_add_local(add, fname, out,
                                                   path_handler)

        return fname

    @staticmethod
    def _expand_to_path_on_add_local(add, fname, out, path_handler):
        if (add and out.is_in_repo
                and not contains_symlink_up_to(out.path, out.repo.root_dir)):
            fname = path_handler.join(path_handler.dirname(out.path), fname)
        return fname

    @staticmethod
    def _check_stage_path(repo, path):
        assert repo is not None

        real_path = os.path.realpath(path)
        if not os.path.exists(real_path):
            raise StagePathNotFoundError(path)

        if not os.path.isdir(real_path):
            raise StagePathNotDirectoryError(path)

        proj_dir = os.path.realpath(repo.root_dir) + os.path.sep
        if not (real_path + os.path.sep).startswith(proj_dir):
            raise StagePathOutsideError(path)

    @property
    def is_cached(self):
        """
        Checks if this stage has been already ran and stored
        """
        from dvc.remote.local import RemoteLOCAL
        from dvc.remote.s3 import RemoteS3

        old = Stage.load(self.repo, self.path)
        if old._changed_outs():
            return False

        # NOTE: need to save checksums for deps in order to compare them
        # with what is written in the old stage.
        for dep in self.deps:
            dep.save()

        old_d = old.dumpd()
        new_d = self.dumpd()

        # NOTE: need to remove checksums from old dict in order to compare
        # it to the new one, since the new one doesn't have checksums yet.
        old_d.pop(self.PARAM_MD5, None)
        new_d.pop(self.PARAM_MD5, None)
        outs = old_d.get(self.PARAM_OUTS, [])
        for out in outs:
            out.pop(RemoteLOCAL.PARAM_CHECKSUM, None)
            out.pop(RemoteS3.PARAM_CHECKSUM, None)

        if old_d != new_d:
            return False

        # NOTE: committing to prevent potential data duplication. For example
        #
        #    $ dvc config cache.type hardlink
        #    $ echo foo > foo
        #    $ dvc add foo
        #    $ rm -f foo
        #    $ echo foo > foo
        #    $ dvc add foo # should replace foo with a link to cache
        #
        old.commit()

        return True

    @staticmethod
    def create(
        repo=None,
        cmd=None,
        deps=None,
        outs=None,
        outs_no_cache=None,
        metrics=None,
        metrics_no_cache=None,
        fname=None,
        cwd=None,
        wdir=None,
        locked=False,
        add=False,
        overwrite=True,
        ignore_build_cache=False,
        remove_outs=False,
        validate_state=True,
        outs_persist=None,
        outs_persist_no_cache=None,
    ):
        if outs is None:
            outs = []
        if deps is None:
            deps = []
        if outs_no_cache is None:
            outs_no_cache = []
        if metrics is None:
            metrics = []
        if metrics_no_cache is None:
            metrics_no_cache = []
        if outs_persist is None:
            outs_persist = []
        if outs_persist_no_cache is None:
            outs_persist_no_cache = []

        # Backward compatibility for `cwd` option
        if wdir is None and cwd is not None:
            if fname is not None and os.path.basename(fname) != fname:
                raise StageFileBadNameError(
                    "stage file name '{fname}' may not contain subdirectories"
                    " if '-c|--cwd' (deprecated) is specified. Use '-w|--wdir'"
                    " along with '-f' to specify stage file path and working"
                    " directory.".format(fname=fname))
            wdir = cwd
        else:
            wdir = os.curdir if wdir is None else wdir

        stage = Stage(repo=repo, wdir=wdir, cmd=cmd, locked=locked)

        Stage._fill_stage_outputs(
            stage,
            outs,
            outs_no_cache,
            metrics,
            metrics_no_cache,
            outs_persist,
            outs_persist_no_cache,
        )
        stage.deps = dependency.loads_from(stage, deps)

        stage._check_circular_dependency()
        stage._check_duplicated_arguments()

        fname = Stage._stage_fname(fname, stage.outs, add=add)
        wdir = os.path.abspath(wdir)

        if cwd is not None:
            path = os.path.join(wdir, fname)
        else:
            path = os.path.abspath(fname)

        Stage._check_stage_path(repo, wdir)
        Stage._check_stage_path(repo, os.path.dirname(path))

        stage.wdir = wdir
        stage.path = path

        # NOTE: remove outs before we check build cache
        if remove_outs:
            logger.warning("--remove-outs is deprecated."
                           " It is now the default behavior,"
                           " so there's no need to use this option anymore.")
            stage.remove_outs(ignore_remove=False)
            logger.warning("Build cache is ignored when using --remove-outs.")
            ignore_build_cache = True
        else:
            stage.unprotect_outs()

        if os.path.exists(path) and any(out.persist for out in stage.outs):
            logger.warning("Build cache is ignored when persisting outputs.")
            ignore_build_cache = True

        if validate_state:
            if os.path.exists(path):
                if not ignore_build_cache and stage.is_cached:
                    logger.info("Stage is cached, skipping.")
                    return None

                msg = (
                    "'{}' already exists. Do you wish to run the command and "
                    "overwrite it?".format(stage.relpath))

                if not overwrite and not prompt.confirm(msg):
                    raise StageFileAlreadyExistsError(stage.relpath)

                os.unlink(path)

        return stage

    @staticmethod
    def _fill_stage_outputs(
        stage,
        outs,
        outs_no_cache,
        metrics,
        metrics_no_cache,
        outs_persist,
        outs_persist_no_cache,
    ):
        stage.outs = output.loads_from(stage, outs, use_cache=True)
        stage.outs += output.loads_from(stage,
                                        metrics,
                                        use_cache=True,
                                        metric=True)
        stage.outs += output.loads_from(stage,
                                        outs_persist,
                                        use_cache=True,
                                        persist=True)
        stage.outs += output.loads_from(stage, outs_no_cache, use_cache=False)
        stage.outs += output.loads_from(stage,
                                        metrics_no_cache,
                                        use_cache=False,
                                        metric=True)
        stage.outs += output.loads_from(stage,
                                        outs_persist_no_cache,
                                        use_cache=False,
                                        persist=True)

    @staticmethod
    def _check_dvc_filename(fname):
        if not Stage.is_valid_filename(fname):
            raise StageFileBadNameError(
                "bad stage filename '{}'. Stage files should be named"
                " 'Dvcfile' or have a '.dvc' suffix (e.g. '{}.dvc').".format(
                    os.path.relpath(fname), os.path.basename(fname)))

    @staticmethod
    def _check_file_exists(repo, fname):
        if not repo.tree.exists(fname):
            raise StageFileDoesNotExistError(fname)

    @staticmethod
    def _check_isfile(repo, fname):
        if not repo.tree.isfile(fname):
            raise StageFileIsNotDvcFileError(fname)

    @classmethod
    def _get_path_tag(cls, s):
        regex = re.compile(cls.TAG_REGEX)
        match = regex.match(s)
        if not match:
            return s, None
        return match.group("path"), match.group("tag")

    @staticmethod
    def load(repo, fname):
        fname, tag = Stage._get_path_tag(fname)

        # it raises the proper exceptions by priority:
        # 1. when the file doesn't exists
        # 2. filename is not a dvc filename
        # 3. path doesn't represent a regular file
        Stage._check_file_exists(repo, fname)
        Stage._check_dvc_filename(fname)
        Stage._check_isfile(repo, fname)

        with repo.tree.open(fname) as fd:
            d = load_stage_fd(fd, fname)
        # Making a deepcopy since the original structure
        # looses keys in deps and outs load
        state = copy.deepcopy(d)

        Stage.validate(d, fname=os.path.relpath(fname))
        path = os.path.abspath(fname)

        stage = Stage(
            repo=repo,
            path=path,
            wdir=os.path.abspath(
                os.path.join(os.path.dirname(path),
                             d.get(Stage.PARAM_WDIR, "."))),
            cmd=d.get(Stage.PARAM_CMD),
            md5=d.get(Stage.PARAM_MD5),
            locked=d.get(Stage.PARAM_LOCKED, False),
            tag=tag,
            state=state,
        )

        stage.deps = dependency.loadd_from(stage, d.get(Stage.PARAM_DEPS, []))
        stage.outs = output.loadd_from(stage, d.get(Stage.PARAM_OUTS, []))

        return stage

    def dumpd(self):
        from dvc.remote.base import RemoteBASE

        return {
            key: value
            for key, value in {
                Stage.PARAM_MD5:
                self.md5,
                Stage.PARAM_CMD:
                self.cmd,
                Stage.PARAM_WDIR:
                RemoteBASE.to_posixpath(
                    os.path.relpath(self.wdir, os.path.dirname(self.path))),
                Stage.PARAM_LOCKED:
                self.locked,
                Stage.PARAM_DEPS: [d.dumpd() for d in self.deps],
                Stage.PARAM_OUTS: [o.dumpd() for o in self.outs],
                Stage.PARAM_META:
                self._state.get("meta"),
            }.items() if value
        }

    def dump(self):
        fname = self.path

        self._check_dvc_filename(fname)

        logger.info("Saving information to '{file}'.".format(
            file=os.path.relpath(fname)))
        d = self.dumpd()
        apply_diff(d, self._state)
        dump_stage_file(fname, self._state)

        self.repo.scm.track_file(os.path.relpath(fname))

    def _compute_md5(self):
        from dvc.output.base import OutputBase

        d = self.dumpd()

        # NOTE: removing md5 manually in order to not affect md5s in deps/outs
        if self.PARAM_MD5 in d.keys():
            del d[self.PARAM_MD5]

        # Ignore the wdir default value. In this case stage file w/o
        # wdir has the same md5 as a file with the default value specified.
        # It's important for backward compatibility with pipelines that
        # didn't have WDIR in their stage files.
        if d.get(self.PARAM_WDIR) == ".":
            del d[self.PARAM_WDIR]

        # NOTE: excluding parameters that don't affect the state of the
        # pipeline. Not excluding `OutputLOCAL.PARAM_CACHE`, because if
        # it has changed, we might not have that output in our cache.
        m = dict_md5(
            d,
            exclude=[
                self.PARAM_LOCKED,
                OutputBase.PARAM_METRIC,
                OutputBase.PARAM_TAGS,
                OutputBase.PARAM_PERSIST,
            ],
        )
        logger.debug("Computed stage '{}' md5: '{}'".format(self.relpath, m))
        return m

    def save(self):
        for dep in self.deps:
            dep.save()

        for out in self.outs:
            out.save()

        self.md5 = self._compute_md5()

    @staticmethod
    def _changed_entries(entries):
        ret = []
        for entry in entries:
            if entry.checksum and entry.changed_checksum():
                ret.append(entry.rel_path)
        return ret

    def check_can_commit(self, force):
        changed_deps = self._changed_entries(self.deps)
        changed_outs = self._changed_entries(self.outs)

        if changed_deps or changed_outs or self.changed_md5():
            msg = ("dependencies {}".format(changed_deps)
                   if changed_deps else "")
            msg += " and " if (changed_deps and changed_outs) else ""
            msg += "outputs {}".format(changed_outs) if changed_outs else ""
            msg += "md5" if not (changed_deps or changed_outs) else ""
            msg += " of '{}' changed. Are you sure you commit it?".format(
                self.relpath)
            if not force and not prompt.confirm(msg):
                raise StageCommitError(
                    "unable to commit changed '{}'. Use `-f|--force` to "
                    "force.`".format(self.relpath))
            self.save()

    def commit(self):
        for out in self.outs:
            out.commit()

    def _check_missing_deps(self):
        missing = [dep for dep in self.deps if not dep.exists]

        if any(missing):
            raise MissingDep(missing)

    @staticmethod
    def _warn_if_fish(executable):  # pragma: no cover
        if (executable is None
                or os.path.basename(os.path.realpath(executable)) != "fish"):
            return

        logger.warning(
            "DVC detected that you are using fish as your default "
            "shell. Be aware that it might cause problems by overwriting "
            "your current environment variables with values defined "
            "in '.fishrc', which might affect your command. See "
            "https://github.com/iterative/dvc/issues/1307. ")

    def _check_circular_dependency(self):
        from dvc.exceptions import CircularDependencyError

        circular_dependencies = set(d.path for d in self.deps) & set(
            o.path for o in self.outs)

        if circular_dependencies:
            raise CircularDependencyError(circular_dependencies.pop())

    def _check_duplicated_arguments(self):
        from dvc.exceptions import ArgumentDuplicationError
        from collections import Counter

        path_counts = Counter(edge.path for edge in self.deps + self.outs)

        for path, occurrence in path_counts.items():
            if occurrence > 1:
                raise ArgumentDuplicationError(path)

    def _run(self):
        self._check_missing_deps()
        executable = os.getenv("SHELL") if os.name != "nt" else None
        self._warn_if_fish(executable)

        p = subprocess.Popen(
            self.cmd,
            cwd=self.wdir,
            shell=True,
            env=fix_env(os.environ),
            executable=executable,
        )
        p.communicate()

        if p.returncode != 0:
            raise StageCmdFailedError(self)

    def run(self, dry=False, resume=False, no_commit=False, force=False):
        if (self.cmd or self.is_import) and not self.locked and not dry:
            self.remove_outs(ignore_remove=False, force=False)

        if self.locked:
            logger.info("Verifying outputs in locked stage '{stage}'".format(
                stage=self.relpath))
            if not dry:
                self.check_missing_outputs()

        elif self.is_import:
            logger.info("Importing '{dep}' -> '{out}'".format(
                dep=self.deps[0].path, out=self.outs[0].path))
            if not dry:
                if self._already_cached() and not force:
                    self.outs[0].checkout()
                else:
                    self.deps[0].download(self.outs[0].path_info,
                                          resume=resume)

        elif self.is_data_source:
            msg = "Verifying data sources in '{}'".format(self.relpath)
            logger.info(msg)
            if not dry:
                self.check_missing_outputs()

        else:
            logger.info("Running command:\n\t{}".format(self.cmd))
            if not dry:
                if (not force and not self.is_callback
                        and self._already_cached()):
                    self.checkout()
                else:
                    self._run()

        if not dry:
            self.save()
            if not no_commit:
                self.commit()

    def check_missing_outputs(self):
        paths = [
            out.path if out.scheme != "local" else out.rel_path
            for out in self.outs if not out.exists
        ]

        if paths:
            raise MissingDataSource(paths)

    def checkout(self, force=False, progress_callback=None):
        for out in self.outs:
            out.checkout(force=force,
                         tag=self.tag,
                         progress_callback=progress_callback)

    @staticmethod
    def _status(entries):
        ret = {}

        for entry in entries:
            ret.update(entry.status())

        return ret

    def status(self):
        ret = []

        if not self.locked:
            deps_status = self._status(self.deps)
            if deps_status:
                ret.append({"changed deps": deps_status})

        outs_status = self._status(self.outs)
        if outs_status:
            ret.append({"changed outs": outs_status})

        if self.changed_md5():
            ret.append("changed checksum")

        if self.is_callback:
            ret.append("always changed")

        if ret:
            return {self.relpath: ret}

        return {}

    def _already_cached(self):
        return (not self.changed_md5()
                and all(not dep.changed() for dep in self.deps)
                and all(not out.changed_cache() if out.
                        use_cache else not out.changed() for out in self.outs))

    def get_all_files_number(self):
        return sum(out.get_files_number() for out in self.outs)
Exemple #26
0
class SMSAppTemplateAPI(SQLModelSchemaResource):
    """短信应用模版管理"""

    model = SMSAppTemplate
    business_unique_fields = ("template_title", )
    allow_query_all = True
    has_is_delete = True
    filter_fields = (
        ("id", "==", "id", int),
        ("app_id", "==", "app_id", int),
        ("template_content_code", "==", "template_content_code", str),
        ("template_title", "contains", "template_title", str),
        ("operator", "contains", "operator", str),
        ("operator_id", "==", "operator_id", str),
        ("is_valid", "==", "is_valid", int),
        ("is_delete", "==", "is_delete", int),
        ("channel_name", "contains", "channel_name", str),
    )

    can_not_be_empty = And(Use(lambda s: str(s).strip()), len)
    is_bool = And(Use(int), lambda n: n in (0, 1))
    validate_schemas = {
        "post":
        Schema({
            "app_id": Use(int),
            Optional("template_content_code"): And(str),
            "template_title": can_not_be_empty,
            "channel_name": can_not_be_empty,
            "template": can_not_be_empty,
            "price": can_not_be_empty,
            "operator_id": can_not_be_empty,
            "operator": can_not_be_empty,
        }),
        "put":
        Schema({
            "id": Use(int),
            Optional("app_id"): Use(int),
            Optional("template_content_code"): And(str),
            Optional("template_title"): can_not_be_empty,
            Optional("channel_name"): can_not_be_empty,
            Optional("template"): can_not_be_empty,
            Optional("price"): can_not_be_empty,
            Optional("operator_id"): can_not_be_empty,
            Optional("operator"): can_not_be_empty,
            Optional("is_valid"): is_bool,
        }),
        "delete":
        Schema({"id": Use(int)}),
    }

    def get_queryset(self, args):
        queryset = super().get_queryset(args)
        return queryset.join(SMSApp,
                             SMSAppTemplate.app_id == SMSApp.id).add_columns(
                                 SMSApp.app_name)

    def custom_serializable(self, data):
        """因为做了 join 查询,所有需要自定义序列化返回结果"""

        result_list = []
        for (model, app_name) in data:
            result = {k: v for k, v in model.__dict__.items()}
            result.pop("_sa_instance_state")
            result["app_name"] = app_name
            result_list.append(result)
        return result_list

    def delete(self):
        """如果短信模板中引用了相关应用模板,则禁止删除该短信模板
        """

        pk = self.validate_data.get(self.pk_name)
        if (SmsBussinessTemplate.query.filter(
                SmsBussinessTemplate.app_templates.contains(
                    '"{}"'.format(pk))).count() > 0):
            return Msg.SMS_APP_TEMP_DEPENDENT_DELETE
        return super().delete()
Exemple #27
0
        params = {}  # Fix in case the yaml file is empty

    try:
        params = param_schema.validate(params)
    except SchemaError as e:
        message = 'Invalid input file {0}\n{1}'.format(fname, e)
        raise RuntimeError(message)

    return params


# Parameter file schemas and defaults
MuAirssSchema = Schema({
    # Name to call the folder for containing each structure. This name will
    # be postfixed with a unique number. e.g. struct_001
    Optional('name', default='struct'):
    validate_str,
    # Calculator to generate structure files for. Must be a comma seperated
    # list of values. Currently supported calculators are CASTEP, DFTB+ and
    # UEP. Can also pass all as an option to generate files for all
    # calculators.
    Optional('calculator', default='dftb+'):
    validate_all_of('castep', 'dftb+', 'uep', 'all', case_sensitive=False),
    # Command to use to run CASTEP.
    Optional('castep_command', default='castep.serial'):
    validate_str,
    # Command to use to run DFTB+.
    Optional('dftb_command', default='dftb+'):
    validate_str,
    # Path to script file to copy in all folders
    Optional('script_file', default=None):
Exemple #28
0
from schema import Schema, And, Or, Optional, Use

feature_point = Schema({
    'position': And(int, lambda n: n >= 0),
    Optional('uncertain'): And(bool, True)
})

feature = Schema({
    'features': Schema({}, ignore_extra_keys=True),
    'start': feature_point,
    'end': feature_point,
    'type': And(str, len)
})


def validate(current):
    feature.validate(current)
    for next in current['features'].values():
        validate(next)


TEST = {
    'features': {
        'nested': {
            'features': {},
            'start': {
                'position': 0
            },
            'end': {
                'position': 0
            },
Exemple #29
0
class Config(object):
    CONFIG = 'config'
    CONFIG_LOCAL = 'config.local'

    SECTION_CORE = 'core'
    SECTION_CORE_LOGLEVEL = 'loglevel'
    SECTION_CORE_LOGLEVEL_SCHEMA = And(Use(str.lower), supported_loglevel)
    SECTION_CORE_REMOTE = 'remote'
    SECTION_CORE_INTERACTIVE_SCHEMA = And(str, is_bool, Use(to_bool))
    SECTION_CORE_INTERACTIVE = 'interactive'

    SECTION_CACHE = 'cache'
    SECTION_CACHE_DIR = 'dir'
    SECTION_CACHE_TYPE = 'type'
    SECTION_CACHE_TYPE_SCHEMA = supported_cache_type
    SECTION_CACHE_LOCAL = 'local'
    SECTION_CACHE_S3 = 's3'
    SECTION_CACHE_GS = 'gs'
    SECTION_CACHE_SSH = 'ssh'
    SECTION_CACHE_HDFS = 'hdfs'
    SECTION_CACHE_AZURE = 'azure'
    SECTION_CACHE_SCHEMA = {
        Optional(SECTION_CACHE_LOCAL): str,
        Optional(SECTION_CACHE_S3): str,
        Optional(SECTION_CACHE_GS): str,
        Optional(SECTION_CACHE_HDFS): str,
        Optional(SECTION_CACHE_SSH): str,
        Optional(SECTION_CACHE_AZURE): str,

        # backward compatibility
        Optional(SECTION_CACHE_DIR, default='cache'): str,
        Optional(SECTION_CACHE_TYPE, default=None): SECTION_CACHE_TYPE_SCHEMA,
    }

    # backward compatibility
    SECTION_CORE_CLOUD = 'cloud'
    SECTION_CORE_CLOUD_SCHEMA = And(Use(str.lower), supported_cloud)
    SECTION_CORE_STORAGEPATH = 'storagepath'

    SECTION_CORE_SCHEMA = {
        Optional(SECTION_CORE_LOGLEVEL,
                 default='info'): And(str, Use(str.lower),
                                      SECTION_CORE_LOGLEVEL_SCHEMA),
        Optional(SECTION_CORE_REMOTE, default=''): And(str, Use(str.lower)),
        Optional(SECTION_CORE_INTERACTIVE,
                 default=False): SECTION_CORE_INTERACTIVE_SCHEMA,

        # backward compatibility
        Optional(SECTION_CORE_CLOUD, default=''): SECTION_CORE_CLOUD_SCHEMA,
        Optional(SECTION_CORE_STORAGEPATH, default=''): str,
    }

    # backward compatibility
    SECTION_AWS = 'aws'
    SECTION_AWS_STORAGEPATH = 'storagepath'
    SECTION_AWS_CREDENTIALPATH = 'credentialpath'
    SECTION_AWS_ENDPOINT_URL = 'endpointurl'
    SECTION_AWS_REGION = 'region'
    SECTION_AWS_PROFILE = 'profile'
    SECTION_AWS_SCHEMA = {
        SECTION_AWS_STORAGEPATH: str,
        Optional(SECTION_AWS_REGION): str,
        Optional(SECTION_AWS_PROFILE, default='default'): str,
        Optional(SECTION_AWS_CREDENTIALPATH, default=''): str,
        Optional(SECTION_AWS_ENDPOINT_URL, default=None): str,
    }

    # backward compatibility
    SECTION_GCP = 'gcp'
    SECTION_GCP_STORAGEPATH = SECTION_AWS_STORAGEPATH
    SECTION_GCP_PROJECTNAME = 'projectname'
    SECTION_GCP_SCHEMA = {
        SECTION_GCP_STORAGEPATH: str,
        Optional(SECTION_GCP_PROJECTNAME): str,
    }

    # backward compatibility
    SECTION_LOCAL = 'local'
    SECTION_LOCAL_STORAGEPATH = SECTION_AWS_STORAGEPATH
    SECTION_LOCAL_SCHEMA = {
        SECTION_LOCAL_STORAGEPATH: str,
    }

    SECTION_REMOTE_REGEX = r'^\s*remote\s*"(?P<name>.*)"\s*$'
    SECTION_REMOTE_FMT = 'remote "{}"'
    SECTION_REMOTE_URL = 'url'
    SECTION_REMOTE_USER = '******'
    SECTION_REMOTE_PORT = 'port'
    SECTION_REMOTE_KEY_FILE = 'keyfile'
    SECTION_REMOTE_TIMEOUT = 'timeout'
    SECTION_REMOTE_PASSWORD = '******'
    SECTION_REMOTE_ASK_PASSWORD = '******'
    SECTION_REMOTE_SCHEMA = {
        SECTION_REMOTE_URL: And(supported_url, error="Unsupported URL"),
        Optional(SECTION_AWS_REGION): str,
        Optional(SECTION_AWS_PROFILE, default='default'): str,
        Optional(SECTION_AWS_CREDENTIALPATH, default=''): str,
        Optional(SECTION_AWS_ENDPOINT_URL, default=None): str,
        Optional(SECTION_GCP_PROJECTNAME): str,
        Optional(SECTION_CACHE_TYPE): SECTION_CACHE_TYPE_SCHEMA,
        Optional(SECTION_REMOTE_USER): str,
        Optional(SECTION_REMOTE_PORT): Use(int),
        Optional(SECTION_REMOTE_KEY_FILE): str,
        Optional(SECTION_REMOTE_TIMEOUT): Use(int),
        Optional(SECTION_REMOTE_PASSWORD): str,
        Optional(SECTION_REMOTE_ASK_PASSWORD): And(str, is_bool, Use(to_bool)),
    }

    SECTION_STATE = 'state'
    SECTION_STATE_ROW_LIMIT = 'row_limit'
    SECTION_STATE_ROW_CLEANUP_QUOTA = 'row_cleanup_quota'
    SECTION_STATE_SCHEMA = {
        Optional(SECTION_STATE_ROW_LIMIT): And(Use(int), is_whole),
        Optional(SECTION_STATE_ROW_CLEANUP_QUOTA): And(Use(int), is_percent),
    }

    SCHEMA = {
        Optional(SECTION_CORE, default={}): SECTION_CORE_SCHEMA,
        Optional(Regex(SECTION_REMOTE_REGEX)): SECTION_REMOTE_SCHEMA,
        Optional(SECTION_CACHE, default={}): SECTION_CACHE_SCHEMA,
        Optional(SECTION_STATE, default={}): SECTION_STATE_SCHEMA,

        # backward compatibility
        Optional(SECTION_AWS, default={}): SECTION_AWS_SCHEMA,
        Optional(SECTION_GCP, default={}): SECTION_GCP_SCHEMA,
        Optional(SECTION_LOCAL, default={}): SECTION_LOCAL_SCHEMA,
    }

    def __init__(self, dvc_dir):
        self.dvc_dir = os.path.abspath(os.path.realpath(dvc_dir))
        self.config_file = os.path.join(dvc_dir, self.CONFIG)
        self.config_local_file = os.path.join(dvc_dir, self.CONFIG_LOCAL)

        try:
            self._config = configobj.ConfigObj(self.config_file)
            local = configobj.ConfigObj(self.config_local_file)

            # NOTE: schema doesn't support ConfigObj.Section validation, so we
            # need to convert our config to dict before passing it to
            self._config = self._lower(self._config)
            local = self._lower(local)
            self._config = self._merge(self._config, local)

            self._config = Schema(self.SCHEMA).validate(self._config)

            # NOTE: now converting back to ConfigObj
            self._config = configobj.ConfigObj(self._config,
                                               write_empty_values=True)
            self._config.filename = self.config_file
        except Exception as ex:
            raise ConfigError(ex)

    @staticmethod
    def _merge(first, second):
        res = {}
        sections = list(first.keys()) + list(second.keys())
        for section in sections:
            f = first.get(section, {}).copy()
            s = second.get(section, {}).copy()
            f.update(s)
            res[section] = f
        return res

    @staticmethod
    def _lower(config):
        new_config = {}
        for s_key, s_value in config.items():
            new_s = {}
            for key, value in s_value.items():
                new_s[key.lower()] = value
            new_config[s_key.lower()] = new_s
        return new_config

    @staticmethod
    def init(dvc_dir):
        config_file = os.path.join(dvc_dir, Config.CONFIG)
        open(config_file, 'w+').close()
        return Config(dvc_dir)
Exemple #30
0
 def __init__(self):
     super().__init__(
         priority=50,
         schema=Schema({"type": And(str, len), Optional("context"): Or(dict, None)}),
     )
Exemple #31
0

def port_range(port):
    return 0 <= port <= 65535


schema = Schema({
    'zmq': {
        'host': And(str, len),
        'port': And(Use(int), port_range),
        'timeout': And(Use(int), lambda x: x > 0)
    },
    'azure': {
        'connection_string': And(str, len)
    },
    Optional('log'): dict
})


def load_config(config_file):
    config = yaml.safe_load(config_file)
    try:
        config = schema.validate(config)
    except SchemaError as e:
        # Better error format
        error = str(e).splitlines()
        del error[1]
        raise Exception(' '.join(error))

    _apply_default(config, DEFAULT)