示例#1
0
    def _min_length_nozzle_setup(self):
        # Generate prantl-meyer table for calculations
        self.mach_numbers, self.mach_angles, self.prandtl_meyer = generate_table(
            self.gamma, (1.0, self.design_mach), 0.01, 4, write_csv=False)

        # Calculate theta max
        theta_max = self.prandtl_meyer[self.mach_numbers.index(
            self.design_mach)] / 2.0

        # Define characteristic line thetas
        self.characteristic_thetas = [
            round(x, 8) for x in np.linspace(self.start_angle, theta_max,
                                             self.characteristics)
        ]

        # Generate list of point idx for given number of characteristic lines
        self.points = range(self._number_of_points(self.characteristics))

        # Create empty array for MoC data
        self.moc_table_headers = [
            'R+', 'R-', 'theta', 'nu', 'M', 'm_angle', 'theta+m_angle',
            'theta-m_angle', 'x', 'y'
        ]
        self.moc_table = np.zeros(len(self.points),
                                  dtype=[(header, 'f')
                                         for header in self.moc_table_headers])

        # Last two points form line that is last expansion before uniform flow
        self.moc_table['M'][self.points[-1]] = self.design_mach
        self.moc_table['M'][self.points[-2]] = self.design_mach

        for idx, theta in enumerate(self.characteristic_thetas):
            # From definitions
            self.moc_table['theta'][idx] = theta
            self.moc_table['R+'][
                idx] = 0  # characteristic lines start at top of throat (0, 1) - radius 1
            self.moc_table['x'][idx] = 0
            self.moc_table['y'][idx] = 1

            # Calculate nu (nu at throat = theta) and hence R- (R- = nu + theta)
            nu = theta
            self.moc_table['nu'][idx] = nu
            self.moc_table['R-'][idx] = nu + theta

            # Retrieve Mach number and angle from Prandtl-Meyer table
            nu_idx = min(enumerate(self.prandtl_meyer),
                         key=lambda y: abs(y[1] - nu))  # Get nearest idx to nu
            m_angle = self.mach_angles[nu_idx[0]]
            self.moc_table['M'][idx] = self.mach_numbers[nu_idx[0]]
            self.moc_table['m_angle'][idx] = m_angle
            self.moc_table['theta+m_angle'][idx] = theta + m_angle
            self.moc_table['theta-m_angle'][idx] = theta - m_angle

            # Remove idx from points list
            self.points.remove(idx)
def main():
    print("[INFO]: ############################################################")
    print("[INFO]: # Starting integirty checker")
    print("[INFO]: ############################################################\n")

    print("[INFO]: Sending request ...")
    answer = requests.get(base_url+request, headers=headers)

    if answer.status_code == 200:
        print("[INFO]: Request successfull...")
    else:
        print("[ERR]: Request failed! Received {0} status code from codebeamer".format(answer.status_code))
        sys.exit(1)

    content = json.loads(answer.content)
    items = parse_items_to_named_dict(content['items'])

    if TABLE_DISABLED == 'true':
        print("[INFO]: Skipping table generation ...")
    else:
        generate_table(items)

    message = ""
    if LINK_CHECKER_DISABLED == 'true':
        print("[INFO]: Skipping link checker ...")
    else:
        message += check_links(items)

    if WIKI_CHECKER_DISABLED == 'true':
        print("[INFO]: Skipping wiki checker ...")
    else:
        message += check_wiki_text(items)

    print("[INFO]: Displaying message to be send ...")
    print(message)
    print("[INFO]: End of message ...")

    send_mail(TABLE_DISABLED, message)

    print("[INFO]: ############################################################")
    print("[INFO]: # Integirty checker finished")
    print("[INFO]: ############################################################\n")
示例#3
0
def build_table(param_args):
    """Generates a string representing a table in html format.

        param_args - a dictionary that has the parameters for building up the
            html table. The dictionary includes the following:

            'attributes' - a dictionary of html table attributes. The attribute
                    name is the key which gets set to the value of the key.
                    (optional)
                    Example: {'class': 'sorttable', 'id': 'parcel_table'}

            param_args['sortable'] - a boolean value that determines whether the
                table should be sortable (required)

            param_args['data_type'] - a string depicting the type of input to
                build the table from. Either 'shapefile', 'csv', or 'dictionary'
                (required)

            param_args['data'] - a URI to a csv or shapefile OR a list of
                dictionaries. If a list of dictionaries the data should be
                represented in the following format: (required)
                    [{col_name_1: value, col_name_2: value, ...},
                     {col_name_1: value, col_name_2: value, ...},
                     ...]

            param_args['key'] - a string that depicts which column (csv) or
                field (shapefile) will be the unique key to use in extracting
                the data into a dictionary. (required for 'data_type'
                'shapefile' and 'csv')

            param_args['columns'] - a list of dictionaries that defines the
                    column structure for the table (required). The order of
                    the columns from left to right is depicted by the index
                    of the column dictionary in the list. Each dictionary
                    in the list has the following keys and values:
                        'name' - a string for the column name (required)
                        'total' - a boolean for whether the column should be
                            totaled (required)
                        'attr' - a dictionary that has key value pairs for
                            optional tag attributes (optional). Ex:
                            'attr': {'class': 'offsets'}
                        'td_class' - a String to assign as a class name to
                            the table data tags under the column. Each
                            table data tag under the column will have a class
                            attribute assigned to 'td_class' value (optional)

            param_args['total'] - a boolean value where if True a constant
                total row will be placed at the bottom of the table that sums
                the columns (required)

        returns - a string that represents an html table
    """
    LOGGER.debug('Building Table Structure')
    # Initialize an intermediate dictionary which will hold the physical data
    # elements of the table
    data_dict = {}

    # Initialize the final dictionary which will have the data of the table as
    # well as parameters needed to build up the html table
    table_dict = {}

    # Get the data type of the input being passed in so that it can properly be
    # pre-processed
    data_type = param_args['data_type']

    # Get a handle on the input data being passed in, whether it a URI to a
    # shapefile / csv file or a list of dictionaries
    input_data = param_args['data']

    # Depending on the type of input being passed in, pre-process it
    # accordingly
    if data_type == 'shapefile':
        key = param_args['key']
        data_dict = natcap.invest.pygeoprocessing_0_3_3.geoprocessing.extract_datasource_table_by_key(
            input_data, key)
        # Convert the data_dict to a list of dictionaries where each dictionary
        # in the list represents a row of the table
        data_list = data_dict_to_list(data_dict)
    elif data_type == 'csv':
        key = param_args['key']
        data_dict = natcap.invest.pygeoprocessing_0_3_3.geoprocessing.get_lookup_from_csv(input_data, key)
        # Convert the data_dict to a list of dictionaries where each dictionary
        # in the list represents a row of the table
        data_list = data_dict_to_list(data_dict)
    else:
        data_list = input_data

    #LOGGER.debug('Data Collected from Input Source: %s', data_list)
    LOGGER.debug('Data Collected from Input Source')

    # Add the columns data to the final dictionary that is to be passed
    # off to the table generator
    table_dict['cols'] = param_args['columns']

    # Add the properly formatted row data to the final dictionary that is
    # to be passed to the table generator
    table_dict['rows'] = data_list

    # If a totals row is present, add it to the final dictionary
    if 'total' in param_args:
        table_dict['total'] = param_args['total']

    # If table attributes were passed in check to see if the 'sortable' class
    # needs to be added to that list
    if 'attributes' in param_args:
        table_dict['attributes'] = param_args['attributes']
        if param_args['sortable']:
            try:
                class_list = table_dict['attributes']['class'] + ' sortable'
                table_dict['attributes']['class'] = class_list
            except KeyError:
                table_dict['attributes']['class'] = 'sortable'
    else:
        # Attributes were not passed in, however if sortable is True
        # create attributes key and dictionary to pass in to table
        # handler
        if param_args['sortable']:
            table_dict['attributes'] = {'class': 'sortable'}

    # If a checkbox column is wanted pass in the table dictionary
    if 'checkbox' in param_args and param_args['checkbox']:
        table_dict['checkbox'] = True
        if 'checkbox_pos' in param_args:
            table_dict['checkbox_pos'] = param_args['checkbox_pos']

    LOGGER.debug('Calling table_generator')
    # Call generate table passing in the final dictionary and attribute
    # dictionary. Return the generate string
    return table_generator.generate_table(table_dict)
示例#4
0
This will generate an assembly file containing the definition of the sine table.
This will have the name "sine_table" and be stored in the program space.

Usage:
    python gen_sine_table.py <c file> <h file> <inc file>
"""
import sys
import math
import table_generator

# Number of entries in the table
SINE_TABLE_LENGTH = 2048

# Maximum value for table entries
MAX_VALUE = 0x7FFF

# Variable type for C data structure
VAR_TYPE = '__attribute__((space(psv))) const int16_t'

# Name of the array in C
VAR_NAME = 'sine_table'

# Calculate sine table values
sine_values = []
for i in range(SINE_TABLE_LENGTH):
    radians = i * 2 * math.pi / SINE_TABLE_LENGTH
    value = int(round(math.sin(radians) * MAX_VALUE))
    sine_values.append('{:#06x}'.format(value))

table_generator.generate_table(sine_values, VAR_TYPE, VAR_NAME)
示例#5
0
OCTAVE_SCALAR = 2

# Sample rate in samples per second
SAMPLE_RATE = 44101

# Sine table length
SINE_TABLE_LENGTH = 2048

# Variable type for C data structure
VAR_TYPE = '__attribute__((section(".text"))) const uint32_t'

# Name of the array in C
VAR_NAME = 'note_freqs'

freq_values = []
for note_num in range(MIDI_NOTES):

    # Calculate note frequency in Hz
    freq_ratio = (note_num - MIDDLE_A_NOTE_NUM) / NOTES_PER_OCTAVE
    freq_hz = (OCTAVE_SCALAR ** freq_ratio) * TUNING

    # Convert note frequency to sine table entries per frame.
    freq_ipf = freq_hz * (1 / SAMPLE_RATE) * SINE_TABLE_LENGTH

    # Convert to 16.16 fixed point
    freq_ipf_fixed = round(freq_ipf * (1 << 16))

    freq_values.append('{:#010x}'.format(freq_ipf_fixed))

table_generator.generate_table(freq_values, VAR_TYPE, VAR_NAME)
示例#6
0
    best_tab = [[1, 2, 3, 3, 2, 1], [3, 4, 5, 5, 4, 3], [7, 9, 11, 11, 9, 7],
                [9, 10, 13, 13, 10, 9], [7, 9, 11, 11, 9, 7],
                [3, 4, 5, 5, 4, 3], [1, 2, 3, 3, 2, 1]]
    nb_players = 5

    with open("fights.txt", "a") as log:

        player = AIPlayer(best_tab)
        player.name = "champion"
        dicc[player.name] = {"victoires": 0, "player": player}

        log.write("\nPlayer : " + player.name)
        log.write("\n" + str(best_tab))

        for players in range(nb_players):
            tab = generate_table()

            player = AIPlayer(tab)
            player.name = "player" + str(players)
            dicc[player.name] = {"victoires": 0, "player": player}

            log.write("\n\nPlayer : " + player.name)
            log.write("\n" + str(tab))

        print("total combats : " + str(nb_players * nb_players / 2))

        num_combat = 1
        for i in range(nb_players):
            for j in range(i + 1, nb_players):
                print("combat " + str(num_combat))
                num_combat += 1
示例#7
0
def main():
    persistence.init_database()
    current_season_years = firstSeason

    while current_season_years != lastSeason:
        season_years = current_season_years.split("-")

        current_season = season.Season(current_season_years, dict(), dict(),
                                       set(), False)
        season_id = str(season_years[0]) + "-" + str(season_years[1])
        season_repository.save_season(season_id, str(season_years[0]),
                                      str(season_years[1]), teams_by_season)

        if season_repository.is_season_completed(season_id):
            logging.info("Season " + season_id + " is already completed")

        else:

            csv_url = 'https://www.football-data.co.uk/mmz4281/' + str(
                season_years[0]) + str(season_years[1]) + '/SP1.csv'
            logging.debug("Calling %s", csv_url)
            # newSeason
            try:
                season_data = pd.read_csv(csv_url)
            except:
                columns = [
                    'Div', 'Date', 'HomeTeam', 'AwayTeam', 'FTHG', 'FTAG',
                    'FTR', 'HTHG', 'HTAG', 'HTR', 'B365H', 'B365D', 'B365A',
                    'BWH', 'BWD', 'BWA', 'GBH', 'GBD', 'GBA', 'IWH', 'IWD',
                    'IWA', 'LBH', 'LBD', 'LBA', 'SBH', 'SBD', 'SBA', 'WHH',
                    'WHD', 'WHA', 'GB>2.5', 'GB<2.5', 'B365>2.5', 'B365<2.5',
                    'GBAHH', 'GBAHA', 'GBAH', 'LBAHH', 'LBAHA', 'LBAH',
                    'B365AHH', 'B365AHA', 'B365AH'
                ]
                season_data = pd.read_csv(csv_url, usecols=columns)

            all_odds = []
            for row in season_data.itertuples():
                current_match = match.Match(season_id, row.Date, row.HomeTeam,
                                            row.AwayTeam, str(row.HTHG),
                                            str(row.HTAG), str(row.FTHG),
                                            str(row.FTAG))

                try:
                    match_stats = matchstats.MatchStats(
                        current_match, row.HS, row.AS, row.HST, row.AST,
                        row.HHW, row.AHW, row.HC, row.AC, row.HF, row.AF,
                        row.HO, row.AO, row.HR, row.AR, row.HR, row.AY)
                    print("Match stats ", match_stats)
                except:
                    print("No Match stats found")

                add_match(current_season, current_match)
                all_odds.append(
                    odd_reader.read_odds(current_match.get_id(), row))

            persistence.save(current_season.get_matches())
            odd_repository.save(all_odds)
            table_generator.generate_table(current_season)
            if (len(current_season.get_matches()) == (teams_by_season / 2) *
                ((teams_by_season - 1) * 2)):
                current_season.set_completed(True)
                season_repository.season_completed(season_id)

            logging.info("Season " + current_season.years + " is completed: " +
                         str(current_season.is_completed))

        nextYear = nextSeasonYear(int(season_years[1]))
        current_season_years = str(season_years[1]) + "-" + nextYear
        logging.info("Season %s", current_season)