'Connection': 'close' } DETECT_TABLE = ("fate_flow_detect_table_namespace", "fate_flow_detect_table_name", 16) # fate-serving SERVINGS_ZK_PATH = '/FATE-SERVICES/serving/online/publishLoad/providers' FATE_FLOW_ZK_PATH = '/FATE-SERVICES/flow/online/transfer/providers' FATE_FLOW_MODEL_TRANSFER_PATH = '/v1/model/transfer' # fate-manager FATE_MANAGER_GET_NODE_INFO = '/node/info' FATE_MANAGER_NODE_CHECK = '/node/management/check' # logger log_utils.LoggerFactory.LEVEL = 10 # {CRITICAL: 50, FATAL:50, ERROR:40, WARNING:30, WARN:30, INFO:20, DEBUG:10, NOTSET:0} log_utils.LoggerFactory.set_directory(os.path.join(file_utils.get_project_base_directory(), 'logs', 'fate_flow')) stat_logger = log_utils.getLogger("fate_flow_stat") detect_logger = log_utils.getLogger("fate_flow_detect") access_logger = log_utils.getLogger("fate_flow_access") audit_logger = log_utils.audit_logger() """ Services """ IP = get_base_config("fate_flow", {}).get("host", "0.0.0.0") HTTP_PORT = get_base_config("fate_flow", {}).get("http_port") GRPC_PORT = get_base_config("fate_flow", {}).get("grpc_port") # standalone job will be send to the standalone job server when FATE-Flow work on cluster deploy mode, # but not the port for FATE-Flow on standalone deploy mode. CLUSTER_STANDALONE_JOB_SERVER_PORT = 9381
from fate_flow.entity.metric import Metric from fate_flow.entity.metric import MetricMeta from federatedml.feature.instance import Instance from federatedml.feature.sparse_vector import SparseVector from federatedml.util import consts from federatedml.util import abnormal_detection from federatedml.statistic import data_overview from federatedml.model_base import ModelBase from federatedml.protobuf.generated.data_io_meta_pb2 import DataIOMeta from federatedml.protobuf.generated.data_io_param_pb2 import DataIOParam from federatedml.protobuf.generated.data_io_meta_pb2 import ImputerMeta from federatedml.protobuf.generated.data_io_param_pb2 import ImputerParam from federatedml.protobuf.generated.data_io_meta_pb2 import OutlierMeta from federatedml.protobuf.generated.data_io_param_pb2 import OutlierParam LOGGER = log_utils.getLogger() # ============================================================================= # DenseFeatureReader # ============================================================================= class DenseFeatureReader(object): def __init__(self, data_io_param): self.delimitor = data_io_param.delimitor self.data_type = data_io_param.data_type self.missing_fill = data_io_param.missing_fill self.default_value = data_io_param.default_value self.missing_fill_method = data_io_param.missing_fill_method self.missing_impute = data_io_param.missing_impute self.outlier_replace = data_io_param.outlier_replace self.outlier_replace_method = data_io_param.outlier_replace_method
# you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from collections import deque from arch.api.utils.log_utils import getLogger LOGGER = getLogger() class Rubbish(object): """ a collection collects all tables / objects in federation tagged by `tag`. """ def __init__(self, name, tag): self._name = name self._tag = tag self._tables = [] self._kv = {} @property def tag(self): return self._tag