示例#1
0
    def __init__(self):
        if self._initialized:
            return

        self._initialized = True
        self._logger = log_helper.get_logger(
            __name__, 1, fmt='%(asctime)s-%(levelname)s: %(message)s')
        self._verbosity_level = None
        self._transformed_code_level = None
示例#2
0
    def __init__(self):
        if self._initialized:
            return

        self._initialized = True
        self.logger_name = "Dynamic-to-Static"
        self._logger = log_helper.get_logger(
            self.logger_name,
            1,
            fmt='%(asctime)s %(name)s %(levelname)s: %(message)s')
        self._verbosity_level = None
        self._transformed_code_level = None
        self._need_to_echo_log_to_stdout = None
        self._need_to_echo_code_to_stdout = None
示例#3
0
import os
import sys
import subprocess
import multiprocessing
from datetime import datetime

import re
import copy
import errno

import logging
from paddle.fluid.log_helper import get_logger

__all__ = ["HDFSClient", "multi_download", "multi_upload"]

_logger = get_logger(
    __name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s')


class HDFSClient(object):
    """
    A tool of HDFS 

    Args:
        hadoop_home (string): hadoop_home 
        configs (dict): hadoop config, it is a dict, please contain \
            key "fs.default.name" and "hadoop.job.ugi"
        Can be a float value
    Examples:
        hadoop_home = "/home/client/hadoop-client/hadoop/"

        configs = {
示例#4
0
# See the License for the specific language governing permissions and
# limitations under the License.

import argparse
import logging
import time

import paddle.fluid as fluid
import paddle.fluid.incubate.fleet.base.role_maker as role_maker
from paddle.fluid.incubate.fleet.parameter_server.distribute_transpiler import fleet
from paddle.fluid.transpiler.distribute_transpiler import DistributeTranspilerConfig
from paddle.fluid.log_helper import get_logger

import ctr_dataset_reader

logger = get_logger(
    "fluid", logging.INFO, fmt='%(asctime)s - %(levelname)s - %(message)s')


def parse_args():
    parser = argparse.ArgumentParser(description="PaddlePaddle Fleet ctr")

    # the following arguments is used for distributed train, if is_local == false, then you should set them
    parser.add_argument(
        '--role',
        type=str,
        default='pserver',  # trainer or pserver
        help='The path for model to store (default: models)')
    parser.add_argument(
        '--endpoints',
        type=str,
        default='127.0.0.1:6000',
示例#5
0
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function

import argparse
import json
import logging
from collections import defaultdict

import paddle.fluid.core as core
import paddle.fluid.proto.framework_pb2 as framework_pb2
from paddle.fluid.log_helper import get_logger

logger = get_logger(__name__, logging.INFO)

try:
    from .graphviz import Graph
except ImportError:
    logger.info(
        'Cannot import graphviz, which is required for drawing a network. This '
        'can usually be installed in python with "pip install graphviz". Also, '
        'pydot requires graphviz to convert dot files to pdf: in ubuntu, this '
        'can usually be installed with "sudo apt-get install graphviz".')
    print('net_drawer will not run correctly. Please install the correct '
          'dependencies.')
    exit(0)

OP_STYLE = {
    'shape': 'oval',
示例#6
0
import time
import logging

import paddle
from paddle.fluid import core
from paddle.fluid import io
from paddle.fluid import Program
from paddle.fluid.log_helper import get_logger

__all__ = [
    "load_persistables_for_increment", "load_persistables_for_inference",
    "convert_dist_to_sparse_program"
]

_logger = get_logger(
    'lookup_table_utils',
    logging.INFO,
    fmt='%(asctime)s-%(levelname)s: %(message)s')

model_filename = "__model__"
lookup_table_dir = "__lookup_table__"


def __insert_lookup_sparse_table_op(main_program, idx, ids, w, out):
    main_program.global_block()._insert_op(
        index=idx,
        type="lookup_sparse_table",
        inputs={"Ids": [ids],
                "W": [w]},
        outputs={"Out": [out]},
        attrs={
            "is_distributed": False,
示例#7
0
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
import paddle
import tarfile

from paddle.fluid.log_helper import get_logger

logger = get_logger("paddle", logging.INFO)

DATA_URL = "http://paddle-ctr-data.bj.bcebos.com/avazu_ctr_data.tgz"
DATA_MD5 = "c11df99fbd14e53cd4bfa6567344b26e"
"""
avazu_ctr_data/train.txt
avazu_ctr_data/infer.txt
avazu_ctr_data/test.txt
avazu_ctr_data/data.meta.txt
"""


def read_data(file_name):
    path = paddle.dataset.common.download(DATA_URL, "avazu_ctr_data", DATA_MD5)
    tar = tarfile.open(path, "r:gz")
    tar_info = None
示例#8
0
from __future__ import print_function
import numpy as np
import logging
import six

from paddle.fluid import log_helper
from paddle.fluid import framework, backward, core
from paddle.fluid.dygraph import layers
from paddle.fluid.dygraph.base import switch_to_static_graph
from paddle.fluid.dygraph.dygraph_to_static.return_transformer import RETURN_NO_VALUE_MAGIC_NUM
from paddle.fluid.layers.utils import flatten
from paddle.fluid.layers.utils import pack_sequence_as
import paddle.compat as cpt

_logger = log_helper.get_logger(__name__,
                                logging.WARNING,
                                fmt='%(asctime)s-%(levelname)s: %(message)s')


class NestSequence(object):
    """
    A wrapper class that easily to flatten and restore the nest structure of
    given sequence.
    """
    def __init__(self, raw_input, need_check=False):
        self.__raw_input = raw_input
        self.__var_ids = self._get_var_ids()
        self._check_non_variable(need_check)

    def tolist(self):
        """