def __init__(self, required_labels, sheets_factory=None): self.required_labels = required_labels if sheets_factory: self._sheets_factory = sheets_factory elif not self._sheets_factory: # Permit class-wide sheets-fact. self._sheets_factory = xleash.SheetsFactory() self.headers = [] self.tables = [] self.ref_fpath = None self.ref_sh_name = None
mydir = osp.dirname(__file__) _sync_fname = 'datasync.xlsx' _synced_fname = 'datasync.sync.xlsx' def _abspath(fname): return osp.join(mydir, _sync_fname) def _file_url(fname): return ('file://' + _abspath(fname)).replace('\\', '/') _shfact = xleash.SheetsFactory() @ddt.ddt class TTables(unittest.TestCase): """UnitTest TCs for datasync.""" @classmethod def setUpClass(cls): os.chdir(mydir) @ddt.data( (('datasync.xlsx#Sheet1!', ), ('datasync.xlsx#Sheet1!', 'datasync.xlsx#Sheet2!', 'datasync.xlsx#Sheet3!')), (('datasync.xlsx', ), ('datasync.xlsx#Sheet1!', 'datasync.xlsx#Sheet2!',
def prepare_data(raw_data, variation, input_file_name, overwrite_cache, output_folder, timestamp, type_approval_mode, modelconf): """ Prepare the data to be processed. :param raw_data: Raw data from the input file. :type raw_data: dict :param variation: Variations to be applied. :type variation: dict :param input_file_name: Input file name. :type input_file_name: str :param overwrite_cache: Overwrite saved cache? :type overwrite_cache: bool :param output_folder: Output folder. :type output_folder: str :param timestamp: Run timestamp. :type timestamp: str :param type_approval_mode: Is launched for TA? :type type_approval_mode: bool :param modelconf: Path of modelconf that has modified the defaults. :type modelconf: str :return: Prepared data. :rtype: dict """ has_plan = 'plan' in raw_data and (not raw_data['plan'].empty) match = { 'scope': 'plan' if has_plan else 'base', } r = {} sheets_factory = xleash.SheetsFactory() from co2mpas.io import check_xlasso for k, v in excel._parse_values(variation, match, "in variations"): if isinstance(v, str) and check_xlasso(v): v = xleash.lasso(v, sheets_factory, url_file=input_file_name) dsp_utl.get_nested_dicts(r, *k[:-1])[k[-1]] = v if 'plan' in r: if has_plan: plan = raw_data['plan'].copy() for k, v in dsp_utl.stack_nested_keys(r['plan'], 4): plan['.'.join(k)] = v else: gen = dsp_utl.stack_nested_keys(r['plan'], 4) plan = pd.DataFrame([{'.'.join(k): v for k, v in gen}]) excel._add_index_plan(plan, input_file_name) r['plan'] = plan has_plan = True if 'base' in r: r['base'] = dsp_utl.combine_nested_dicts(raw_data.get('base', {}), r['base'], depth=4) if 'flag' in r: r['flag'] = dsp_utl.combine_nested_dicts(raw_data.get('flag', {}), r['flag'], depth=1) data = dsp_utl.combine_dicts(raw_data, r) if type_approval_mode: variation, has_plan = {}, False if not schema._ta_mode(data): return {}, pd.DataFrame([]) flag = data.get('flag', {}).copy() if 'run_base' not in flag: flag['run_base'] = not has_plan if 'run_plan' not in flag: flag['run_plan'] = has_plan flag['type_approval_mode'] = type_approval_mode flag['output_folder'] = output_folder flag['overwrite_cache'] = overwrite_cache if modelconf: flag['modelconf'] = modelconf if timestamp is not None: flag['timestamp'] = timestamp flag = schema.validate_flags(flag) if flag is dsp_utl.NONE: return {}, pd.DataFrame([]) schema.check_data_version(flag) res = { 'flag': flag, 'variation': variation, 'input_file_name': input_file_name, } res = dsp_utl.combine_dicts(flag, res) base = dsp_utl.combine_dicts(res, {'data': data.get('base', {})}) plan = dsp_utl.combine_dicts(res, {'data': data.get('plan', pd.DataFrame([]))}) return base, plan