Esempio n. 1
0
def main():
    # Extract the trunk directory and append to PYTHONPATH cause we need
    # the specific operation process class
    # backends, environments, pySPACE are the parts to go back
    file_path = os.path.dirname(os.path.abspath(__file__))
    pyspace_path = file_path[:file_path.rfind('pySPACE') - 1]
    if pyspace_path not in sys.path:
        sys.path.append(pyspace_path)

    # Get the file name of the process to unpickle and call
    proc_file_name = sys.argv[1]

    # Unpickle the process
    with open(proc_file_name, 'rb') as proc_file:
        proc = pickle.load(proc_file)

    # noinspection PyBroadException
    try:
        # Do the actual call
        proc()
    except Exception:
        logging.exception("Error while executing the process:")
    finally:
        # Deleted the proc_file since we don't need it any more
        try:
            os.unlink(proc_file_name)
        except IOError:
            logging.exception("Error while unlinking the process file:")

        # Inform the listener socket in the Backend that this job has finished
        send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        host = sys.argv[2]
        port = int(sys.argv[3])
        send_sock.connect((host, port))
        try:
            LoadLevelerComHandler.send_message(
                send_sock, host, port, LoadLevelerComHandler.MESSAGES.FINISHED)
        finally:
            send_sock.shutdown(socket.SHUT_RDWR)
            send_sock.close()
Esempio n. 2
0
def main():
    # Extract the trunk directory and append to PYTHONPATH cause we need
    # the specific operation process class
    # backends, environments, pySPACE are the parts to go back
    file_path = os.path.dirname(os.path.abspath(__file__))
    pyspace_path = file_path[:file_path.rfind('pySPACE')-1]
    if pyspace_path not in sys.path:
        sys.path.append(pyspace_path)

    # Get the file name of the process to unpickle and call
    proc_file_name = sys.argv[1]

    # Unpickle the process
    with open(proc_file_name, 'rb') as proc_file:
            proc = pickle.load(proc_file)

    # noinspection PyBroadException
    try:
        # Do the actual call
        proc()
    except Exception:
        logging.exception("Error while executing the process:")
    finally:
        # Deleted the proc_file since we don't need it any more
        try:
            os.unlink(proc_file_name)
        except IOError:
            logging.exception("Error while unlinking the process file:")

        # Inform the listener socket in the Backend that this job has finished
        send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        host = sys.argv[2]
        port = int(sys.argv[3])
        send_sock.connect((host, port))
        try:
            LoadLevelerComHandler.send_message(send_sock, host, port, LoadLevelerComHandler.MESSAGES.FINISHED)
        finally:
            send_sock.shutdown(socket.SHUT_RDWR)
            send_sock.close()
Esempio n. 3
0
def main():
    # Parse input
    dir_path = sys.argv[1]
    train_instances_file_name = os.path.join(dir_path,
                                             'subflow_data.pickle')
    runs = eval(sys.argv[4])
    # construct socket to communicate with backend
    send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    send_sock.connect((sys.argv[2], int(sys.argv[3])))
    try:
        flow_ids = eval(sys.argv[5])
        for flow_id in flow_ids:
            flow_file_name = os.path.join(dir_path, flow_id + '.pickle')
            # Unpickle the flow and train_instances
            with open(flow_file_name, 'rb') as flow_file:
                flow = pickle.load(flow_file)
            with open(train_instances_file_name, 'rb') as train_instances_file:
                train_instances = pickle.load(train_instances_file)
            try:
                # Execute the flow
                # .. note:: the here executed flows can not store anything
                #           meta data of result collection is NOT updated!
                _, result_collection = flow(train_instances=train_instances, runs=runs)
                # Store results
                result_file_name = flow_file_name.replace('.pickle', '_result.pickle')
                with open(result_file_name, 'wb') as result_file:
                    pickle.dump(result_collection, result_file, protocol=pickle.HIGHEST_PROTOCOL)
            finally:
                # Delete the flow_file since we don't need it any more
                # training_file is may be used by other flows so don't delete it
                os.remove(flow_file_name)
                # Inform Backend that this flow has finished
                send_sock = LoadLevelerComHandler(send_sock, sys.argv[2], int(sys.argv[3]),
                                                  LoadLevelerComHandler.MESSAGES.FINISHED, flow_id)
        # give backend some time to get information
        time.sleep(1)
    finally:
        send_sock.shutdown(socket.SHUT_RDWR)
        send_sock.close()
Esempio n. 4
0
def main():
    # Parse input
    dir_path = sys.argv[1]
    train_instances_file_name = os.path.join(dir_path, 'subflow_data.pickle')
    runs = eval(sys.argv[4])
    # construct socket to communicate with backend
    send_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    send_sock.connect((sys.argv[2], int(sys.argv[3])))
    try:
        flow_ids = eval(sys.argv[5])
        for flow_id in flow_ids:
            flow_file_name = os.path.join(dir_path, flow_id + '.pickle')
            # Unpickle the flow and train_instances
            with open(flow_file_name, 'rb') as flow_file:
                flow = pickle.load(flow_file)
            with open(train_instances_file_name, 'rb') as train_instances_file:
                train_instances = pickle.load(train_instances_file)
            try:
                # Execute the flow
                # .. note:: the here executed flows can not store anything
                #           meta data of result collection is NOT updated!
                _, result_collection = flow(train_instances=train_instances,
                                            runs=runs)
                # Store results
                result_file_name = flow_file_name.replace(
                    '.pickle', '_result.pickle')
                with open(result_file_name, 'wb') as result_file:
                    pickle.dump(result_collection,
                                result_file,
                                protocol=pickle.HIGHEST_PROTOCOL)
            finally:
                # Delete the flow_file since we don't need it any more
                # training_file is may be used by other flows so don't delete it
                os.remove(flow_file_name)
                # Inform Backend that this flow has finished
                send_sock = LoadLevelerComHandler(
                    send_sock, sys.argv[2], int(sys.argv[3]),
                    LoadLevelerComHandler.MESSAGES.FINISHED, flow_id)
        # give backend some time to get information
        time.sleep(1)
    finally:
        send_sock.shutdown(socket.SHUT_RDWR)
        send_sock.close()