예제 #1
0
def test_start_local():
    if not core.SERVER:
        core.start_local_server()
    starting_server = id(core.SERVER)
    n_init = len(core._server_instances)
    server = core.start_local_server(as_global=False, ansys_path=core.SERVER.ansys_path)
    assert len(core._server_instances) == n_init + 1
    core._server_instances[-1]().shutdown()

    # ensure global channel didn't change
    assert starting_server == id(core.SERVER)
예제 #2
0
 def __getitem__(self, item):
     if len(self._local_servers) <= item:
         while len(self._local_servers) <= item:
             self._local_servers.append(
                 core.start_local_server(as_global=False))
     try:
         self._local_servers[item].info
         return self._local_servers[item]
     except:
         for iter in range(0, self._max_iter):
             try:
                 self._local_servers[item] = core.start_local_server(
                     as_global=False)
                 self._local_servers[item].info
                 break
             except:
                 pass
         return self._local_servers[item]
예제 #3
0
        "Ans.Dpf.Hdf5.dll",
    ]

local_dir = os.path.dirname(os.path.abspath(__file__))
TARGET_PATH = os.path.join(local_dir, os.pardir, "ansys", "dpf", "core",
                           "operators")
files = glob.glob(os.path.join(TARGET_PATH, "*"))
for f in files:
    if Path(f).stem == "specification":
        continue
    try:
        if os.path.isdir(f):
            shutil.rmtree(f)
        else:
            os.remove(f)
    except:
        pass
core.start_local_server()
code_gen = core.Operator("python_generator")
code_gen.connect(1, TARGET_PATH)
for lib in LIB_TO_GENERATE:
    code_gen.connect(0, lib)
    if lib != LIB_TO_GENERATE[0]:
        code_gen.connect(2, False)
    else:
        code_gen.connect(2, True)
    code_gen.run()
    time.sleep(0.1)

core.SERVER.shutdown()
예제 #4
0
from ansys.dpf.core import examples
from ansys.dpf.core import operators as ops

###############################################################################
# Configure the servers
# ~~~~~~~~~~~~~~~~~~~~~~
# Make a list of ip addresses and port numbers on which dpf servers are
# started. Operator instances will be created on each of those servers to
# address each a different result file.
# In this example, we will post process an analysis distributed in 2 files,
# we will consequently require 2 remote processes.
# To make this example easier, we will start local servers here,
# but we could get connected to any existing servers on the network.

remote_servers = [
    dpf.start_local_server(as_global=False),
    dpf.start_local_server(as_global=False)
]
ips = [remote_server.ip for remote_server in remote_servers]
ports = [remote_server.port for remote_server in remote_servers]

###############################################################################
# Print the ips and ports
print("ips:", ips)
print("ports:", ports)

###############################################################################
# Here we show how we could send files in temporary directory if we were not
# in shared memory
files = examples.download_distributed_files()
server_file_paths = [
from ansys.dpf import core as dpf
from ansys.dpf.core import examples
from ansys.dpf.core import operators as ops

###############################################################################
# Configure the servers
# ~~~~~~~~~~~~~~~~~~~~~~
# Make a list of ip addresses and port numbers on which dpf servers are
# started. Operator instances will be created on each of those servers to
# address each a different result file.
# In this example, we will post process an analysis distributed in 2 files,
# we will consequently require 2 remote processes.
# To make this example easier, we will start local servers here,
# but we could get connected to any existing servers on the network.

remote_servers = [dpf.start_local_server(as_global=False), dpf.start_local_server(as_global=False)]
ips = [remote_server.ip for remote_server in remote_servers]
ports = [remote_server.port for remote_server in remote_servers]

###############################################################################
# Print the ips and ports.
print("ips:", ips)
print("ports:", ports)

###############################################################################
# Choose the file path.

base_path = examples.distributed_msup_folder
files = [base_path + r'/file0.mode', base_path + r'/file1.mode']
files_aux = [base_path + r'/file0.rst', base_path + r'/file1.rst']
예제 #6
0
def test_server_info_cache():
    if not dpf.SERVER:
        dpf.start_local_server()
    dpf.SERVER.info
    identifier = dpf.cache.MethodIdentifier("_get_server_info", (), {})
    assert identifier in dpf.SERVER._base_service._cache.cached
from ansys.dpf import core as dpf
from ansys.dpf.core import examples
from ansys.dpf.core import operators as ops

###############################################################################
# Create 2 servers
# ~~~~~~~~~~~~~~~~~
# Here the 2 servers are started on the local machine with start_local_server
# but, if the user has another server, he can connect on any dpf's server on
# the network via: connect_to_server

# the as_global attributes allows to choose whether a server will be stored
# by the module and used by default
# Here, we choose the 1st server to be the default
server1 = dpf.start_local_server(as_global=True)
server2 = dpf.start_local_server(as_global=False)

# Check that the 2 servers are on different ports
print(server1.port, server2.port)

###############################################################################
# Send the result file
# ~~~~~~~~~~~~~~~~~~~~~
# Here, the result file is sent in a temporary dir of the first server
# This file upload is useless in our case, since the 2 servers are locals
file = examples.complex_rst
file_path_in_tmp = dpf.upload_file_in_tmp_folder(file)

###############################################################################
# Create a workflow on the first server
예제 #8
0
def test_start_local_failed():
    with pytest.raises(NotADirectoryError):
        core.start_local_server(ansys_path="", use_docker_by_default=False)