Esempio n. 1
0
def demonstrateSOS():
    SOS = SSOS()

    print "Analysis: Initializing SOS..."
    SOS.init()
    time.sleep(5.0)

    iterations = 0
    while iterations < 48:
        sql_string = "select v.val, v.guid, max(v.time_pack), count(v.time_pack) from tblvals v inner join tbldata d on v.guid = d.guid and d.name like 'Iteration' group by (v.guid);"
        #print "Sending this query to the SOS daemon: "
        #print "    " + sql_string
        results, col_names = SOS.query(sql_string, "localhost",
                                       os.environ['SOS_CMD_PORT'])
        values = []
        themax = 5.0
        for i in range(0, len(results)):
            values.append(float(results[i][0]))
            if iterations < int(results[i][3]):
                iterations = int(results[i][3])
        if len(values) > 0:
            themax = check_balance(SOS, values, iterations)
        time.sleep(themax)

    SOS.finalize()
    print "Analysis: DONE!"
    print
Esempio n. 2
0
def demonstrateManifest():
    SOS = SSOS()

    sos_host = "localhost"
    sos_port = os.environ.get("SOS_CMD_PORT")

    SOS.init()

    max_frame, manifest, col_names = SOS.request_pub_manifest(
        "", sos_host, sos_port)

    print "Manifest:"
    print(str(col_names))

    # Print out the manifest in a pretty column-aligned way:
    widths = [max(map(len, col)) for col in zip(*manifest)]
    for row in manifest:
        print "  ".join((val.ljust(width) for val, width in zip(row, widths)))

    print ""
    print "    Pub count .....: " + str(len(manifest))
    print "    Max frame .....: " + str(max_frame)
    print ""
    SOS.finalize()
    print
Esempio n. 3
0
def queryUniqueNames():
    SOS = SSOS()

    sos_host = "localhost"
    sos_port = os.environ.get("SOS_CMD_PORT")

    print "Initializing SOS..."
    SOS.init()
    print "DONE initializing SOS..."

    sql_string = """
    SELECT
    DISTINCT value_name 
    FROM viewCombined
    ;
    """
    results, col_names = SOS.query(sql_string, sos_host, sos_port)

    numeric_fields = dict()
    numeric_fields['name'] = [el[0] for el in results]
    name_count = len(numeric_fields['name'])

    print str(numeric_fields['name'])

    print str(name_count) + " unique names."

    SOS.finalize()
    print "   ...DONE!"
    print
Esempio n. 4
0
def my_main():
    global SOS
    global config
    parseConfigFile()
    SOS = SSOS()

    print("Initializing SOS: ...")
    SOS.init()
    print("OK!\n")

    # Get at least one active aggregator
    lookupAggregators()
    
    # wait for a frame to show up.
    next_frame = 0
    # while config["aggregators"]["runtime"] or next_frame < config["aggregators"]["maxframe"]:
    while next_frame < config["aggregators"]["maxframe"]:
        # wait for the next batch of frames
        waitForServer(SOS, next_frame)
        print "Processing frame", next_frame
        start = time.time()
        do_something(next_frame)
        # clean up the database for long runs
        cleanDB(SOS, next_frame)
        next_frame = next_frame + 1
        end = time.time()
        print "loop time:", str(end-start)

    # finalize SOS
    SOS.finalize()

    print "   ...DONE!"
    return
Esempio n. 5
0
def demonstrateSOS():
    SOS = SSOS()

    sos_host = "localhost"
    sos_port = os.environ.get("SOS_CMD_PORT")

    print "Initializing SOS..."
    SOS.init()

    sql_string = "SELECT * FROM viewCombined;"

    print "Sending this query to the SOS daemon: "
    print "    " + sql_string
    results, col_names = SOS.query(sql_string, sos_host, sos_port)
    print "Results:"
    print "    Output.........: "
    print str(results)
    print ""
    print "    Row count......: " + str(len(results))
    print "    Column count...: " + str(len(col_names))
    print "    Column names...: "  # + str(col_names)    #pp.pprint(col_names)
    pp.pprint(col_names)
    print ""
    print "Finalizing..."

    SOS.finalize()
    print "   ...DONE!"
    print
Esempio n. 6
0
def demonstrateSOS():
    SOS = SSOS()

    print "Initializing SOS..."
    SOS.init()

    count = 0
    count_max = 10
    print "    Packing " + str(count_max) + " integer values in a loop..."
    while (count < count_max):
        count = count + 1
        SOS.pack(("loop_val_" + str(count)), SOS.INT, count)
        #SOS.announce()
        SOS.publish()

    sql_pubs = "SELECT * FROM tblPubs;"
    sql_data = "SELECT * FROM tblData;"

    pubs, col_names = SOS.query(sql_pubs, "localhost",
                                os.environ.get("SOS_CMD_PORT"))

    print "-----"
    print "Pubs: (" + str(len(pubs)) + ")"
    count = 0
    print str(col_names)
    while count < len(pubs):
        print str(pubs[count])
        count = count + 1

    data, col_names = SOS.query(sql_data, "localhost",
                                os.environ.get("SOS_CMD_PORT"))

    print "-----"
    print "Data: (" + str(len(data)) + ")"
    count = 0
    print str(col_names)
    while count < len(data):
        print str(data[count])
        count = count + 1

    print ""

    SOS.finalize()
    print "   ...DONE!"
    print
Esempio n. 7
0
def triggerSOSD():
    SOS = SSOS()

    sense_handle = "example_sense"
    payload_data = "Hello, I've been triggered by Python!"
    #payload_data = "adjust;100ms"
    payload_size = len(payload_data)

    print "Initializing SOS..."
    SOS.init()
    print "Triggering SOSD w/the following:"
    print "   sense_handle = " + str(sense_handle)
    print "   payload_size = " + str(payload_size)
    print "   payload_data = " + str(payload_data)
    SOS.trigger(sense_handle, payload_size, payload_data)
    SOS.finalize()
    print "DONE!"
    print
Esempio n. 8
0
def demonstrateSOS():
    SOS = SSOS()

    sos_host = "localhost"
    sos_port = os.environ.get("SOS_CMD_PORT")

    print "Initializing SOS..."
    SOS.init()

    frame_start = -1  #-1 == latest_frame
    frame_depth = 1  #-1 == all frames
    pub_filter = ""
    val_filter = ""

    print "Sending this cache_grab to the SOS daemon: "
    print "    pub_filter  == " + str(pub_filter)
    print "    val_filter  == " + str(val_filter)
    print "    frame_start == " + str(frame_start)
    print "    frame_depth == " + str(frame_depth)

    results, col_names =                                \
            SOS.cache_grab(pub_filter, val_filter,      \
                           frame_start, frame_depth,    \
                           sos_host, sos_port)
    print "Results:"
    print "    Output.........: "
    print str(results)
    print ""
    print "    Row count......: " + str(len(results))
    print "    Column count...: " + str(len(col_names))
    print "    Column names...: "  # + str(col_names)    #pp.pprint(col_names)
    pp.pprint(col_names)
    print ""
    print "Finalizing..."

    SOS.finalize()
    print "   ...DONE!"
    print
Esempio n. 9
0
def sosToADIOS():
    global SOS
    global config
    parseConfigFile()
    SOS = SSOS()

    printf("Initializing SOS: ...\b\b\b")
    SOS.init()
    printf("OK!\n")

    #####
    #
    # Get the maximum simulation cycle found in the database.
    #
    # NOTE: The cycleFieldName variable should match what is being used
    #       either by your application or SOSflow. If you are not using
    #       an explicit cycle value, you can use SOSflow's internal
    #       field named "frame" that is updated every time SOS_publish(...)
    #       is called. As long as you are publishing to SOS at the end
    #       of major program steps, this will give you what you want.
    #
    # NOTE: For online queries, if you want to ensure that your most
    #       current projection represents a complete set of values,
    #       and you're investigating a block-synchronous code, you can
    #       grab the current maximum and subtract one.
    #
    #
    num_rows = 0
    # Get at least one active aggregator
    lookupAggregators()

    g = None
    if config["output_adios"]:
        # ADIOS file output
        ad.init_noxml()
        g = ad.declare_group("TAU_metrics", "", ad.FLAG.YES)
        ad.define_var(g, "program_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "comm_rank_count", "", ad.DATATYPE.unsigned_integer,
                      "", "", "")
        ad.define_var(g, "thread_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "metric_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "timer_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "timer_value_count", "", ad.DATATYPE.unsigned_integer,
                      "", "", "")
        ad.define_var(g, "timer_values", "", ad.DATATYPE.unsigned_integer,
                      "timer_value_count,6", "timer_value_count,6", "0,0")
        ad.define_var(g, "counter_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "counter_value_count", "",
                      ad.DATATYPE.unsigned_integer, "", "", "")
        ad.define_var(g, "counter_values", "", ad.DATATYPE.double,
                      "counter_value_count,5", "counter_value_count,5", "0,0")
        print "using ADIOS method:", str(config["adios_method"])
        ad.select_method(g, str(config["adios_method"]), "verbose=3", "")

    # wait for a frame to show up. Frame 0 (and maybe 1) are TAU metadata.
    # The rest should be just timers.
    next_frame = 0
    # first iteration, we are writing the file. after that, appending.
    adios_mode = "w"

    # Keep running until there are no more frames to wait for.
    # At runtime, this is a moving target, since next_frame gets updated.
    while config["aggregators"][
            "runtime"] or next_frame < config["aggregators"]["maxframe"]:
        # wait for the next batch of frames
        timeout = waitForServer(SOS, next_frame)
        if timeout:
            break
        print "Processing frame", next_frame
        start = time.time()
        fd = ad.open("TAU_metrics", "tau-metrics.bp", adios_mode)
        writeMetaData(SOS, next_frame, g, fd)
        writeTimerData(SOS, next_frame, g, fd)
        writeCounterData(SOS, next_frame, g, fd)
        ad.close(fd)
        # future iterations are appending, not writing
        adios_mode = "a"
        # clean up the database for long runs
        cleanDB(SOS, next_frame)
        next_frame = next_frame + 1
        end = time.time()
        print "loop time:", str(end - start)

    # finalize adios
    if config["output_adios"]:
        ad.finalize()

    # finalize SOS
    SOS.finalize()

    print "   ...DONE!"
    return
Esempio n. 10
0
def queryAndPlot():
    SOS = SSOS()

    print "Initializing SOS..."
    SOS.init()

    #####
    #
    #  Get the maximum simulation frame.
    #
    #sql_string = """
    #SELECT
    #MAX(frame)
    #FROM viewCombined
    #WHERE viewCombined.value_name LIKE "lulesh.time"
    #;
    #"""
    #results, col_names = SOS.query(sql_string,
    #        "localhost",
    #        os.environ.get("SOS_CMD_PORT"))
    #max_cycle = int(results[0][0])
    #print "Max cycle: " + str(max_cycle)
    #
    #####

    #####
    #
    #  Get the list of field names for non-string values.
    #
    #  Removed:  AND frame = """ + str(max_cycle) + """
    #
    sql_string = """
    SELECT
    DISTINCT value_name
    FROM viewCombined
    WHERE value_type NOT LIKE "SOS_VAL_TYPE_STRING"
    ;
    """
    results, col_names = SOS.query(sql_string, "localhost",
                                   os.environ.get("SOS_CMD_PORT"))
    print "Field names:"
    for field_name in results:
        print "    " + str(field_name)
    attr = dict()
    attr['value_name'] = [el[0] for el in results]
    name_count = len(attr['value_name'])
    print str(name_count) + " unique names."
    #
    #####

    #####
    #
    #  Compose a query with those unique fields as columns in the results.
    #
    #  Removed: sql_string += " WHERE frame = " + str(max_cycle) + " "
    #
    sql_string = """ """
    sql_string += """ SELECT """
    sql_string += """ comm_rank """
    sql_string += """,frame """
    for field_name in attr['value_name']:
        sql_string += """,GROUP_CONCAT( CASE WHEN """
        sql_string += ' value_name LIKE "' + field_name + '" '
        sql_string += ' THEN value END) AS "' + field_name + '" '
    sql_string += """ FROM viewCombined """
    sql_string += """ GROUP BY """
    sql_string += """ comm_rank """
    sql_string += """,frame """
    sql_string += """;"""
    print "Composite SQL statement: "
    print sql_string
    print ""
    print "Running composite query..."
    results, col_names = SOS.query(sql_string, "localhost",
                                   os.environ.get("SOS_CMD_PORT"))
    print ""
    #
    #  Print out the results:
    #
    print "=========="
    for col in col_names:
        print str(col) + " "
    print "=========="
    for row in results:
        for col_index in range(len(row)):
            print str(col_names[col_index]) + ": " + str(row[col_index])
        print "----------"
    print "=========="
    #
    #####

    SOS.finalize()
    print "   ...DONE!"
    print
def sosScatterplotGenerator():
    global SOS
    global config
    parseConfigFile()
    SOS = SSOS()

    printf("Initializing SOS: ...\b\b\b")
    SOS.init()
    printf("OK!\n")

    # NOTE: When allocation time is scarce, 'stride' here can be
    #       set so that intermediate cycles can be skipped, which is
    #       especially useful when there are thousands of cycles.
    #
    stride = 1

    #####
    #
    # Get the maximum simulation cycle found in the database.
    #
    # NOTE: The cycleFieldName variable should match what is being used
    #       either by your application or SOSflow. If you are not using
    #       an explicit cycle value, you can use SOSflow's internal
    #       field named "frame" that is updated every time SOS_publish(...)
    #       is called. As long as you are publishing to SOS at the end
    #       of major program steps, this will give you what you want.
    #
    # NOTE: For online queries, if you want to ensure that your most
    #       current projection represents a complete set of values,
    #       and you're investigating a block-synchronous code, you can
    #       grab the current maximum and subtract one.
    #
    cycleFieldName = "frame"
    #
    num_rows = 0
    # Get at least one active aggregator
    lookupAggregators()
    # wait for a few frames to show up. Frame 0 and 1 are TAU metadata.
    # Frame 2 represents a true iteration.
    max_cycle, maxtime = waitForServer(SOS, cycleFieldName, max(stride, 1),
                                       True)
    print "Maximum observed '" + cycleFieldName + "' value: " + str(
        max_cycle) + " (so far)"
    #
    sqlMaxFrame = "SELECT count(*) FROM tblpubs;"
    results, col_names = queryAllAggregators(sqlMaxFrame)
    # We got results from each aggregator, so sum them up
    rank_maxes = [int(x[0]) for x in results]
    rank_max = sum(rank_maxes)
    print "Maximum observed pub_guids: " + str(rank_max)
    #
    #####

    if config["output_adios"]:
        # ADIOS file output
        ad.init_noxml()
        g = ad.declare_group("TAU_metrics", "", ad.FLAG.YES)
        ad.define_var(g, "process_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "program_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "process_index", "", ad.DATATYPE.unsigned_integer,
                      "process_count", "process_count", "0")
        ad.define_var(g, "memory_HWM", "", ad.DATATYPE.double, "process_count",
                      "process_count", "0")
        ad.define_var(g, "memory_RSS", "", ad.DATATYPE.double, "process_count",
                      "process_count", "0")
        ad.define_var(g, "total_FLOPS", "", ad.DATATYPE.double,
                      "process_count", "process_count", "0")
        ad.define_var(g, "latest_FLOPS", "", ad.DATATYPE.double,
                      "process_count", "process_count", "0")
        #ad.define_var(g, "program_name", "", ad.DATATYPE.string, "program_count", "program_count", "0")
        ad.define_var(g, "program_index", "", ad.DATATYPE.unsigned_integer,
                      "process_count", "process_count", "0")
        ad.define_var(g, "MPI_rank", "", ad.DATATYPE.unsigned_integer,
                      "process_count", "process_count", "0")
        print "using ADIOS method:", str(config["adios_method"])
        ad.select_method(g, str(config["adios_method"]), "verbose=3", "")

    #
    #
    # EXAMPLE A: Generate .txt set for ALL simulation cycles:
    print "Generating TXT files..."
    lastX = [0.0] * (rank_max + 1)
    lastY = [0.0] * (rank_max + 1)
    lastZ = [0.0] * (rank_max + 1)
    simCycle = max_cycle
    mintime = 0.0
    # Keep running until there are no more frames to wait for.
    # At runtime, this is a moving target, since max_cycle gets updated.
    while config["aggregators"][
            "runtime"] or simCycle < config["aggregators"]["maxframe"]:
        print "Processing frame", simCycle
        start = time.time()
        vtkOutputFileName = generateADIOSFile(SOS, cycleFieldName, simCycle,
                                              lastX, lastY, lastZ, stride,
                                              mintime, maxtime)
        # clean up the database for long runs
        cleanDB(SOS, cycleFieldName, simCycle)
        simCycle = simCycle + stride
        # wait for the next batch of frames
        mintime = maxtime
        max_cycle, maxtime = waitForServer(SOS, cycleFieldName, simCycle,
                                           False)
        end = time.time()
        print "loop time:", str(end - start)

    #####
    #
    # Whew!  All done!
    #
    # NOTE: See vtkWriter.py for more details.
    #
    if config["output_adios"]:
        ad.finalize()
    SOS.finalize()
    #
    #####
    print "   ...DONE!"
    print
    return
Esempio n. 12
0
def queryAndPlot():
    SOS = SSOS()

    print "Initializing SOS..."
    SOS.init()
    print "DONE init SOS..."
    sql_string = """
    SELECT MAX(frame) FROM viewCombined WHERE viewCombined.value_name LIKE "lulesh.time"
    ;
    """
    results, col_names = SOS.query(sql_string, "localhost",
                                   os.environ.get("SOS_CMD_PORT"))

    max_cycle = int(results[0][0])
    print "Max cycle: " + str(max_cycle)

    #####
    #
    #  Get the list of field names for non-string values.
    #
    sql_string = """
    SELECT
    DISTINCT value_name
    FROM viewCombined
    WHERE value_type != 3
    AND frame = """ + str(max_cycle) + """
    ;
    """
    results, col_names = SOS.query(sql_string, "localhost",
                                   os.environ.get("SOS_CMD_PORT"))
    numeric_fields = dict()
    numeric_fields['name'] = [el[0] for el in results]
    name_count = len(numeric_fields['name'])
    print str(name_count) + " unique names."
    #
    #####

    filenames = []
    for c in range(0, (max_cycle + 1)):
        print "******* CYCLE " + str(c) + " *********"
        #####
        #
        #  Compose a query with the unique numeric fields as columns:
        #
        sql_string = """ """
        sql_string += """ SELECT """
        sql_string += """ comm_rank """
        # sql_string += """,frame """
        for field_name in numeric_fields['name']:
            sql_string += """,GROUP_CONCAT( CASE WHEN """
            sql_string += ' value_name LIKE "' + field_name + '" '
            sql_string += ' THEN value END) AS "' + field_name + '" '
        sql_string += """, GROUP_CONCAT( CASE WHEN """
        sql_string += ' value_name LIKE "lulesh.coords" '
        sql_string += ' THEN value END) AS "lulesh.coords" '
        sql_string += """ FROM viewCombined """
        sql_string += " WHERE frame = " + str(c) + " "
        sql_string += """ GROUP BY """
        sql_string += """ comm_rank """
        # sql_string += """,frame """
        sql_string += """;"""
        #print "Composite SQL statement: "
        #print sql_string
        #print ""
        #print "Running composite query..."
        results, col_names = SOS.query(sql_string, "localhost",
                                       os.environ.get("SOS_CMD_PORT"))
        #print ""
        #
        #  Print out the results:
        #
        #print "=========="
        #for col in col_names:
        #    print str(col) + " "
        #print "=========="
        #for row in results:
        #    for col_index in range(len(row)):
        #        print str(col_names[col_index]) + ": " + str(row[col_index])
        #    print "----------"
        #print "=========="
        #
        #####

        #####
        #
        #  Build an attribute dictionary of the values.
        #
        attr = dict()
        attr['comm_rank'] = [el[0] for el in results]

        position = 1
        for field_name in numeric_fields['name']:
            attr[field_name] = [el[position] for el in results]
            #print str(field_name) + " in position " + str(position) + " = " + str(attr[field_name])
            position += 1
        res_coords = [el[position] for el in results]
        #print "lulesh.coords in position " + str(position) + " = " + str(res_coords)

        for field_name in numeric_fields['name']:
            rank = 0
            for this_ranks_value in attr[field_name]:
                print "comm_rank(" + str(
                    rank) + ")." + field_name + " = " + this_ranks_value
                rank += 1

        rank_max = len(attr['comm_rank'])
        coords = list()
        coords = [el.split() for el in res_coords]
        #print attr
        dset = vtk_writer.vtk_hex_data_set()
        dset.clear()
        dset.set_cycle(c)
        for rank in range(rank_max):
            fields = {}
            for field_name in numeric_fields['name']:
                fields[field_name] = attr[field_name][rank]
            fields["rank"] = rank
            hex_coords = [None] * 24
            xpt = [None] * 8
            ypt = [None] * 8
            zpt = [None] * 8
            cpt = [None] * 8
            for i in range(24):
                hex_coords[i] = float(coords[rank][i])
            dset.add_hex(hex_coords, fields, rank)
        dset.write_vtk_file()
        filenames.append(dset.get_file_name())

    vtk_writer.write_visit_file(filenames)

    visit.AddArgument("-par")
    visit.Launch()
    OpenDatabase("dataset.visit")
    AddPlot("Pseudocolor", "rank")
    AddPlot("Mesh", "mesh")
    DrawPlots()
    # loop through times
    tsNames = GetWindowInformation().timeSliders
    for ts in tsNames:
        SetActiveTimeSlider(ts)
    for state in list(range(TimeSliderGetNStates()))[::10] + [0]:
        SetTimeSliderState(state)
    print "Setting share_power permissions on the newly created VTK files..."
    subprocess.call("$PROJECT_BASE/share_power .", shell=True)
    print ""
    print "Sleeping for 100 seconds..."
    print ""
    time.sleep(100)

    SOS.finalize()
    print "   ...DONE!"
    print
def sosToADIOS():
    global SOS
    global config
    global validation
    parseConfigFile()
    SOS = SSOS()

    printf("Initializing SOS: ...\b\b\b")
    SOS.init()
    printf("OK!\n")

    #####
    #
    # Get the maximum simulation cycle found in the database.
    #
    # NOTE: The cycleFieldName variable should match what is being used
    #       either by your application or SOSflow. If you are not using
    #       an explicit cycle value, you can use SOSflow's internal
    #       field named "frame" that is updated every time SOS_publish(...)
    #       is called. As long as you are publishing to SOS at the end
    #       of major program steps, this will give you what you want.
    #
    # NOTE: For online queries, if you want to ensure that your most
    #       current projection represents a complete set of values,
    #       and you're investigating a block-synchronous code, you can
    #       grab the current maximum and subtract one.
    #
    #
    num_rows = 0
    # Get at least one active aggregator
    lookupAggregators()

    g = None
    if config["output_adios"]:
        # ADIOS file output
        ad.init_noxml()
        g = ad.declare_group("TAU_metrics", "", ad.FLAG.YES)
        ad.define_var(g, "program_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "comm_rank_count", "", ad.DATATYPE.unsigned_integer,
                      "", "", "")
        ad.define_var(g, "thread_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "event_type_count", "", ad.DATATYPE.unsigned_integer,
                      "", "", "")
        ad.define_var(g, "timer_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "timer_event_count", "", ad.DATATYPE.unsigned_integer,
                      "", "", "")
        ad.define_var(g, "event_timestamps", "", ad.DATATYPE.unsigned_long,
                      "timer_event_count,6", "timer_event_count,6", "0,0")
        ad.define_var(g, "counter_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "counter_event_count", "",
                      ad.DATATYPE.unsigned_integer, "", "", "")
        ad.define_var(g, "counter_values", "", ad.DATATYPE.unsigned_long,
                      "counter_event_count,6", "counter_event_count,6", "0,0")
        ad.define_var(g, "comm_count", "", ad.DATATYPE.unsigned_integer, "",
                      "", "")
        ad.define_var(g, "comm_timestamps", "", ad.DATATYPE.unsigned_long,
                      "comm_count,8", "comm_count,8", "0,0")
        print("using ADIOS method:", str(config["adios_method"]))
        ad.select_method(g, str(config["adios_method"]), "verbose=3", "")

    # wait for a frame to show up. Frame 0 (and maybe 1) are TAU metadata.
    # The rest should be just timers.
    next_frame = 0
    # first iteration, we are writing the file. after that, appending.
    adios_mode = "w"

    waitForServer(SOS, 0)
    buildColumnMap(SOS)

    # Keep running until there are no more frames to wait for.
    # At runtime, this is a moving target, since next_frame gets updated.
    done = False
    total_count = 0
    while (not done or total_count > 0) and (
            config["aggregators"]["runtime"]
            or next_frame < config["aggregators"]["maxframe"]):
        # wait for the next batch of frames
        if not done:
            timeout = waitForServer(SOS, next_frame + 1)
        if timeout:
            done = True
        #if len(column_map) == 0:
        #    buildColumnMap(SOS)
        print("Processing frame", next_frame)
        start = time.time()
        fd = ad.open("TAU_metrics",
                     str(config["outputdir"]) + "/tau-metrics.bp", adios_mode)
        meta_count = writeMetaData(SOS, next_frame, g, fd)
        timer_count = writeTimerData(SOS, next_frame, g, fd)
        total_count = meta_count + timer_count
        ad.close(fd)
        # future iterations are appending, not writing
        adios_mode = "a"
        print("Processed", total_count, "rows")
        if total_count == 0 and done:
            break
        next_frame = next_frame + 1
        end = time.time()
        print("loop time:", str(end - start))

    # finalize adios
    if config["output_adios"]:
        ad.finalize()

    # finalize SOS
    SOS.finalize()

    for p in validation:
        for r in validation[p]:
            for t in validation[p][r]:
                if len(validation[p][r][t]) != 0:
                    print("VALIDATION ERROR!", p, r, t, validation[p][r][t],
                          "was not exited")
    print("   ...DONE!")
    return
Esempio n. 14
0
def sosAutoTranspose():
    SOS = SSOS()

    sosHost = "localhost"
    sosPort = os.environ.get("SOS_CMD_PORT")

    printf("Initializing SOS: ...\b\b\b")
    SOS.init()
    printf("OK!\n")

    #####
    #
    # Get the maximum simulation cycle found in the database.
    #
    # NOTE: The cycleFieldName variable should match what is being used
    #       either by your application or SOSflow. If you are not using
    #       an explicit cycle value, you can use SOSflow's internal
    #       field named "frame" that is updated every time SOS_publish(...)
    #       is called. As long as you are publishing to SOS at the end
    #       of major program steps, this will give you what you want.
    #
    # NOTE: For online queries, if you want to ensure that your most
    #       current projection represents a complete set of values,
    #       and you're investigating a block-synchronous code, you can
    #       grab the current maximum and subtract one.
    #
    cycleFieldName = "frame"
    #
    sqlMaxFrame = "SELECT MAX(" + cycleFieldName + ") FROM viewCombined;"
    results, col_names = SOS.query(sqlMaxFrame, sosHost, sosPort)
    max_cycle = int(results[0][0])
    print "Maximum observed '" + cycleFieldName + "' value: " + str(max_cycle)
    #
    sqlMaxFrame = "SELECT MAX(comm_rank) FROM viewCombined;"
    results, col_names = SOS.query(sqlMaxFrame, sosHost, sosPort)
    rank_max = int(results[0][0])
    print "Maximum observed  'comm_rank' value: " + str(rank_max)
    #
    #####

    #####
    #
    # Get the list of field names we will use to build a custom query.
    #
    # NOTE: To filter out SOS_VAL_TYPE_STRING fields, add in:
    #            ... += "WHERE value_type != 3"
    sqlFieldNames = """
    SELECT
    DISTINCT value_name
    FROM viewCombined
    ;
    """
    results, col_names = SOS.query(sqlFieldNames, sosHost, sosPort)
    selectedFields = dict()
    selectedFields['name'] = [el[0] for el in results]
    name_count = len(selectedFields['name'])

    printf("(%d fields)", name_count)
    printf("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b")

    #
    # NOTE: Debug output...
    #
    #print "Selected " + str(name_count) + " unique names:"
    #for name in selectedFields['name']:
    #    print "    " + str(name)
    #print ""
    #
    #####

    #####
    #
    #  Compose a query with the unique numeric fields as columns:
    sqlValsToColsByRank = """ """
    sqlValsToColsByRank += """ SELECT """
    sqlValsToColsByRank += """ comm_rank """
    for field_name in selectedFields['name']:
        sqlValsToColsByRank += """,GROUP_CONCAT( CASE WHEN """
        sqlValsToColsByRank += ' value_name LIKE "' + field_name + '" '
        sqlValsToColsByRank += ' THEN value END) AS "' + field_name + '" '
    #end:for field_name
    #
    # NOTE: We can now manually grab some hardcoded field names
    #       that might not have been included in selectedFields
    #       if things were being filtered by type:
    #
    #
    sqlValsToColsByRank += """ FROM viewCombined """
    #
    #  NOTE: Uncomment this, and comment out the 'GROUP BY' frame below,
    #        for cases where we only want to see the largest frame.
    #
    #sqlValsToColsByRank += " WHERE frame = " + str(simCycle) + " "
    sqlValsToColsByRank += """ GROUP BY """
    sqlValsToColsByRank += """ comm_rank """
    sqlValsToColsByRank += """,frame """
    sqlValsToColsByRank += """;"""
    #
    results, col_names = SOS.query(sqlValsToColsByRank, sosHost, sosPort)
    #
    #
    #####
    print str(col_names)
    print str(results)
    #####
    #
    # Whew!  All done!
    #
    # NOTE: See vtkWriter.py for more details.
    #
    SOS.finalize()
    #
    #####
    print "   ...DONE!"
    print
    return
Esempio n. 15
0
def sosVTKProjector():
    SOS = SSOS()

    sosHost = "localhost"
    sosPort = os.environ.get("SOS_CMD_PORT")
    printf("Initializing SOS: ...\b\b\b")
    SOS.init()
    printf("OK!\n")

    #####
    #
    # Get the maximum simulation cycle found in the database.
    #
    # NOTE: The cycleFieldName variable should match what is being used
    #       either by your application or SOSflow. If you are not using
    #       an explicit cycle value, you can use SOSflow's internal
    #       field named "frame" that is updated every time SOS_publish(...)
    #       is called. As long as you are publishing to SOS at the end
    #       of major program steps, this will give you what you want.
    #
    # NOTE: For online queries, if you want to ensure that your most
    #       current projection represents a complete set of values,
    #       and you're investigating a block-synchronous code, you can
    #       grab the current maximum and subtract one.
    #
    cycleFieldName = "frame"
    #
    sqlMaxFrame = "SELECT MAX(" + cycleFieldName + ") FROM viewCombined;"
    results, col_names = SOS.query(sqlMaxFrame, sosHost, sosPort)
    max_cycle = int(results[0][0])
    print "Maximum observed '" + cycleFieldName + "' value: " + str(max_cycle)
    #
    sqlMaxFrame = "SELECT MAX(comm_rank) FROM viewCombined;"
    results, col_names = SOS.query(sqlMaxFrame, sosHost, sosPort)
    rank_max = int(results[0][0])
    print "Maximum observed  'comm_rank' value: " + str(rank_max)
    #
    #####

    #####
    #
    # Here we drive the generation of the .vtk file[s]:
    filenames = []
    #
    # NOTE: When allocation time is scarce, 'stride' here can be
    #       set so that intermediate cycles can be skipped, which is
    #       especially useful when there are thousands of cycles.
    #
    stride = 1
    #
    # EXAMPLE A: Generate .vtk set for ALL simulation cycles:
    print "Generating VTK files..."
    lastX = [0.0] * (rank_max + 1)
    lastY = [0.0] * (rank_max + 1)
    lastZ = [0.0] * (rank_max + 1)
    for simCycle in range(0, max_cycle, stride):
        printf("    ... %d of %d ", (simCycle + 1), max_cycle)
        vtkOutputFileName = generateVTKFile(SOS, cycleFieldName, simCycle,
                                            lastX, lastY, lastZ)
        filenames.append(vtkOutputFileName)
    #end:for simCycle
    printf("                                        ")
    printf("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b")
    printf("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b")
    # -----
    # EXAMPLE B: Generate .vtk file for MOST RECENT cycle:
    #vtkOutputFile = generateVTKFile(selectedFields, max_cycle)
    #filenames.append(vtkOutputFile)
    #
    #####

    #####
    #
    # Produce a dataset.visit 'group file' that tells VisIt obout our per-
    # cycle .vtk files, to explore them in sequence:
    vtk_writer.write_visit_file(filenames)
    #
    #####

    #####
    #
    # NOTE: This block of code can be used to launch VisIt automatically
    #       after the script generates the input file.
    #
    #visit.AddArgument("-par")
    #visit.Launch()
    #OpenDatabase("dataset.visit")
    #AddPlot("Pseudocolor", "rank")
    #AddPlot("Mesh", "mesh")
    #DrawPlots()
    # loop through times
    #tsNames = GetWindowInformation().timeSliders
    #for ts in tsNames:
    #    SetActiveTimeSlider(ts)
    #for state in list(range(TimeSliderGetNStates()))[::10] + [0]:
    #    SetTimeSliderState(state)
    #print "Setting share_power permissions on the newly created VTK files..."
    #subprocess.call("$PROJECT_BASE/share_power .", shell=True)
    #print ""
    #print "Sleeping for 100 seconds..."
    #print ""
    #time.sleep(100)
    #
    #####

    #####
    #
    # Whew!  All done!
    #
    # NOTE: See vtkWriter.py for more details.
    #
    SOS.finalize()
    #
    #####
    print "   ...DONE!"
    print
    return