def one_of_condition_column_from_a1(worksheet, categories, start_cell, count): validation_rule = gf.DataValidationRule( gf.BooleanCondition("ONE_OF_LIST", categories), showCustomUi=True, strict=True ) start_row, start_col = gspread.utils.a1_to_rowcol(start_cell) end_cell = gspread.utils.rowcol_to_a1(start_row + count - 1, start_col) cell_range = start_cell + ":" + end_cell gf.set_data_validation_for_cell_range(worksheet, cell_range, validation_rule)
def checkbox_condition(worksheet, cell_range): first_cell = worksheet.range(cell_range)[0].address validation_rule = gf.DataValidationRule( gf.BooleanCondition("BOOLEAN", []), showCustomUi=True, strict=True, inputMessage="Použijte prosím zašrktávací políčko.", ) gf.set_data_validation_for_cell_range(worksheet, cell_range, validation_rule)
def at_most_n_chars_condition(worksheet, n, cell_range, input_message): first_cell = worksheet.range(cell_range)[0].address validation_rule = gf.DataValidationRule( gf.BooleanCondition( "CUSTOM_FORMULA", ["=(LEN(" + first_cell + ") <= " + str(n) + ")"] ), showCustomUi=True, strict=True, inputMessage=input_message, ) gf.set_data_validation_for_cell_range(worksheet, cell_range, validation_rule)
def createSheetFromFile(filePath): # create the spreadsheet gc = gspread.service_account() spreadsheet = gc.create(filePath) spreadsheet.share(ownerEmail, perm_type='user', role='writer') # edit the spreadsheet items = gc.open(filePath).sheet1 # creating/configuring headers items.update('A1', 'Item:') gsf.set_column_width(items, 'A', 450) items.update('B1', 'Status:') gsf.set_column_width(items, 'B', 200) # data validation for status column defaultItemStatus = '❌ INCOMPLETE' validation_rule = gsf.DataValidationRule(gsf.BooleanCondition( 'ONE_OF_LIST', [defaultItemStatus, '⚠️ WIP', '✅ COMPLETE']), showCustomUi=True) toAdd = getItems(filePath) # how many cells large is the gap between title and list listUpperPadding = 1 columnACounter = listUpperPadding columnBCounter = listUpperPadding # create the list for i in range(0, len(toAdd)): columnACounter += 1 columnBCounter += 1 itemCell = str('A' + str(columnACounter + 1)) statusCell = str('B' + str(columnBCounter + 1)) items.update(itemCell, toAdd[i]) items.update(statusCell, defaultItemStatus) columnBRange = str('B3:B' + str(columnBCounter + 1)) gsf.set_data_validation_for_cell_range(items, columnBRange, validation_rule)
def ends_with_str_condition(worksheet, ending, cell_range, input_message): first_cell = worksheet.range(cell_range)[0].address validation_rule = gf.DataValidationRule( gf.BooleanCondition( "CUSTOM_FORMULA", [ "=IF(ISBLANK(" + first_cell + "),TRUE, IF(ISNUMBER(" + first_cell + "),REGEXMATCH(TEXT(" + first_cell + ',"0"),".*' + ending + '$"), FALSE))' ], ), showCustomUi=True, strict=True, inputMessage=input_message, ) gf.set_data_validation_for_cell_range(worksheet, cell_range, validation_rule)
def fetch(verbose=False): """Fetch all sheets from project spreadsheet to .cogs/ directory.""" set_logging(verbose) cogs_dir = validate_cogs_project() config = get_config(cogs_dir) gc = get_client_from_config(config) spreadsheet = gc.open_by_key(config["Spreadsheet ID"]) # Get the remote sheets from spreadsheet sheets = spreadsheet.worksheets() remote_sheets = get_remote_sheets(sheets) tracked_sheets = get_tracked_sheets(cogs_dir, include_no_id=True) id_to_title = { int(details["ID"]): sheet_title for sheet_title, details in tracked_sheets.items() if details.get("ID") } # Get details about renamed sheets renamed_local = get_renamed_sheets(cogs_dir) renamed_remote = {} # Format ID to format for cell formatting id_to_format = get_format_dict(cogs_dir) if id_to_format: # Format to format ID format_to_id = { json.dumps(v, sort_keys=True): k for k, v in id_to_format.items() } # Next ID for new formats format_ids = list(id_to_format.keys()) format_ids.sort() next_fmt_id = int(format_ids[-1]) + 1 else: format_to_id = {} next_fmt_id = 1 # Export the sheets as TSV to .cogs/ # We also collect the formatting and note data for each sheet during this step sheet_formats = {} sheet_notes = {} sheet_dv_rules = {} sheet_frozen = {} # Lines to add to sheet.tsv of sheets to ignore new_ignore = [] for sheet in sheets: remote_title = sheet.title if remote_title in tracked_sheets and tracked_sheets[remote_title].get( "Ignore") == "True": logging.info(f"Skipping ignored sheet '{remote_title}'...") continue # Download the sheet as the renamed sheet if necessary if remote_title in renamed_local: st = renamed_local[remote_title]["new"] logging.info( f"Downloading sheet '{remote_title}' as {st} (renamed locally)" ) else: st = remote_title if sheet.id in id_to_title: local_title = id_to_title[sheet.id] if local_title != remote_title: # The sheet title has been changed remotely # This will be updated in tracking but the local sheet will remain old_path = tracked_sheets[local_title]["Path"] logging.warning( f"Local sheet '{local_title}' has been renamed to '{st}' remotely:" f"\n - '{local_title}' is removed from tracking and replaced with '{st}'" f"\n - {old_path} will not be updated when running `cogs pull` " f"\n - changes to {old_path} will not be pushed to the remote spreadsheet" ) renamed_remote[local_title] = { "new": st, "path": re.sub(r"[^A-Za-z0-9]+", "_", st.lower()) + ".tsv", } logging.info( f"Downloading sheet '{local_title}' as '{st}' (renamed remotely)" ) else: logging.info(f"Downloading sheet '{st}'") else: # Add as an ignored sheet new_ignore.append({ "ID": sheet.id, "Title": st, "Ignore": True }) logging.info(f"Ignoring new remote sheet '{st}'") continue # Get frozen rows & columns sheet_frozen[st] = { "row": sheet.frozen_row_count, "col": sheet.frozen_col_count, } # Get the cells with format, value, and note from remote sheet cells = get_cell_data(cogs_dir, sheet) # Create a map of rule -> locs for data validation dv_rules = {} str_to_rule = {} for loc, cell_data in cells.items(): if "data_validation" not in cell_data: continue data_validation = cell_data["data_validation"] condition = data_validation["condition"] bc = gf.BooleanCondition(condition, data_validation["value"]) dv = gf.DataValidationRule(bc) if str(dv) not in str_to_rule: str_to_rule[str(dv)] = dv if str(dv) in dv_rules: locs = dv_rules[str(dv)] else: locs = [] locs.append(loc) dv_rules[str(dv)] = locs # Aggregate by location and format for validate.tsv dv_rows = clean_data_validation_rules(dv_rules, str_to_rule) sheet_dv_rules[st] = dv_rows # Get the ending row & col that have values # Otherwise we end up with a bunch of empty rows/columns # Also get the data validation max_row = 0 max_col = 0 for c in cells.keys(): row, col = gspread.utils.a1_to_rowcol(c) if row > max_row: max_row = row if col > max_col: max_col = col # Cell label to format dict cell_to_format = { cell: data["format"] for cell, data in cells.items() if "format" in data } # Create a cell to format ID dict based on the format dict for each cell cell_to_format_id = {} last_fmt = None cell_range_start = None cell_range_end = None for cell, fmt in cell_to_format.items(): if not fmt: if last_fmt: if not cell_range_end or cell_range_start == cell_range_end: cell_to_format_id[cell_range_start] = last_fmt else: cell_to_format_id[ f"{cell_range_start}:{cell_range_end}"] = last_fmt last_fmt = None cell_range_start = None cell_range_end = None continue key = json.dumps(fmt, sort_keys=True) if key in format_to_id: # Format already exists, assign that ID fmt_id = format_to_id[key] else: # Assign new ID fmt_id = next_fmt_id format_to_id[key] = fmt_id id_to_format[fmt_id] = fmt next_fmt_id += 1 if last_fmt and fmt_id == last_fmt: # The last cell had a format and the this cell's format is the same as the last # so we increase the range cell_range_end = cell elif last_fmt and fmt_id != last_fmt: # The last cell had a format but it was different than the current format if cell_range_start == cell_range_end or not cell_range_end: # Not a range, just a single cell (the previous cell) cell_to_format_id[cell_range_start] = last_fmt else: cell_to_format_id[ f"{cell_range_start}:{cell_range_end}"] = last_fmt # Restarting a new range at this cell cell_range_start = cell cell_range_end = None else: # No last formatting to compare to, start a new range cell_range_start = cell cell_range_end = cell last_fmt = fmt_id if cell_to_format_id: sheet_formats[st] = cell_to_format_id # Add the cell to note cell_to_note = { cell: data["note"] for cell, data in cells.items() if "note" in data } if cell_to_note: sheet_notes[st] = cell_to_note # Write values to .cogs/tracked/{sheet title}.tsv cached_path = get_cached_path(cogs_dir, st) with open(cached_path, "w") as f: lines = sheet.get_all_values() writer = csv.writer(f, delimiter="\t", lineterminator="\n") writer.writerows(lines) # Write or rewrite formats JSON with new dict with open(f"{cogs_dir}/formats.json", "w") as f: f.write(json.dumps(id_to_format, sort_keys=True, indent=4)) # Update local sheets details in sheet.tsv with new IDs & details for current tracked sheets all_sheets = get_updated_sheet_details(tracked_sheets, remote_sheets, sheet_frozen) removed_titles = remove_sheets(cogs_dir, sheets, tracked_sheets, renamed_local, renamed_remote) # Add renamed-remote for old_title, details in renamed_remote.items(): with open(f"{cogs_dir}/renamed.tsv", "a") as f: new_title = details["new"] new_path = details["path"] f.write(f"{old_title}\t{new_title}\t{new_path}\tremote\n") # Rewrite format.tsv and note.tsv with current remote formats & notes update_format(cogs_dir, sheet_formats, removed_titles) update_note(cogs_dir, sheet_notes, removed_titles) # Remove old data validation rules and rewrite with new with open(f"{cogs_dir}/validation.tsv", "w") as f: f.write("Sheet\tRange\tCondition\tValue\n") update_data_validation(cogs_dir, sheet_dv_rules, removed_titles) # Then update sheet.tsv all_sheets.extend(new_ignore) update_sheet(cogs_dir, all_sheets, removed_titles)