def addColumnsToItems(items, config): configMeta = config["metadata"] columns = configMeta["cols"] # create sets for those calls that need indices sets = {} for col in columns: if "asIndex" in col and col["asIndex"]: prop = col["fromKey"] sets[col["toKey"]] = sorted( lu.unique([item[prop] for item in items if prop in item])) for i, item in enumerate(items): for col in columns: if "fromKey" not in col: continue fromKey = col["fromKey"] toKey = col["toKey"] value = item[fromKey] if fromKey in item else "" if "type" in col and col["type"] == "int": value = int(value) elif "type" in col and col["type"] == "float": value = float(value) if "asIndex" in col and col["asIndex"] and toKey in sets: value = sets[toKey].index( value) if value in sets[toKey] else -1 items[i][toKey] = value return (sets, items)
import lib.io_utils as io import lib.item_utils as tu import lib.list_utils as lu import lib.math_utils as mu # input parser = argparse.ArgumentParser() parser.add_argument("-config", dest="CONFIG_FILE", default="config-sample.json", help="Config file") a = parser.parse_args() config = io.readJSON(a.CONFIG_FILE) configViews = config["views"] layouts = lu.unique([view["layout"] for key, view in configViews.items()]) PRECISION = 5 OUTPUT_DIR = "apps/{appname}/".format(appname=config["name"]) OUTPUT_POS_DIR_REL = "data/positions/" OUTPUT_POS_DIR = OUTPUT_DIR + OUTPUT_POS_DIR_REL CONFIG_FILE = OUTPUT_DIR + "js/config/config.positions.js" # Make sure output dirs exist io.makeDirectories([OUTPUT_POS_DIR, CONFIG_FILE]) # Remove existing data io.removeFiles(OUTPUT_POS_DIR + "*.json") sets, items = tu.getItems(config) itemCount = len(items)
# Make sure output dirs exist io.makeDirectories([a.OUTPUT_FILE]) if len(a.DETAILED_OUTPUT_FILE) > 0: io.makeDirectories([a.DETAILED_OUTPUT_FILE]) items = [] fieldNames = [] if "*" in a.INPUT_FILE: files = glob.glob(a.INPUT_FILE) for fn in files: fFieldNames, fItems = io.readCsv(fn) fieldNames += fFieldNames items += fItems fieldNames = lu.unique(fieldNames) else: fieldNames, items = io.readCsv(a.INPUT_FILE) # make unique based on id items = list({item['Catalog No']: item for item in items}.values()) itemCount = len(items) # Parse lists for i, item in enumerate(items): for field in FIELDS_LISTS: items[i][field] = [ value.strip() for value in item[field].strip().split(",") ]
parser.add_argument('-gwidth', dest="ITEM_GROUP_WIDTH", default=4.0, type=float, help="Max width of the an item group as a percentage of min(year width, region height)") parser.add_argument('-plot', dest="PLOT", action="store_true", help="Plot the data?") parser.add_argument('-probe', dest="PROBE", action="store_true", help="Just output details?") a = parser.parse_args() COLORS = ["#612323", "#204f1c", "#4d1e59", "#112e6b", "#4b5713", "#571330"] colorCount = len(COLORS) # Make sure output dirs exist io.makeDirectories([a.OUTPUT_FILE, a.CACHE_FILE]) font = ImageFont.truetype(font="fonts/Open_Sans/OpenSans-Regular.ttf", size=a.FONT_SIZE) print("Reading data...") fieldNames, items = io.readCsv(a.INPUT_FILE) yLabels = lu.unique([item[a.Y_AXIS] for item in items]) if a.Y_AXIS == "Region": items = [item for item in items if item["Region"] != "Europe"] itemsByRegion = lu.groupList(items, "Region") for i, region in enumerate(itemsByRegion): itemsByRegion[i]["lat"] = np.mean([item["Latitude"] for item in region["items"] if -90 <= item["Latitude"] <= 90]) itemsByRegion = sorted(itemsByRegion, key=lambda region: -region["lat"]) yLabels = [region["Region"] for region in itemsByRegion] else: yLabels = sorted(yLabels) yLabelCount = len(yLabels) xLabels = [] yearStart = yearEnd = None if "Year" in a.X_AXIS: items = [item for item in items if item[a.X_AXIS] < 9999]