def check_update(): nonlocal self new_version, changelog = util.check_update(self.get_version()) if new_version: self.send_channel_msg( tr('new_version_found', new_version=new_version, changelog=changelog))
str(file)).split('/foreign_trade/')[1] if 'Municipal' in val and '.csv' in val: mun.append(val) elif 'State' in val and '.csv' in val: ent.append(val) elif 'National' in val and '.csv' in val: nat.append(val) print('nat files: {}, mun files: {}, ent files: {}'.format( len(nat), len(mun), len(ent))) # For each table (names defined for cubes), we check if there are new updates comparing with files already ingested in clickhouse database. # This can be done by the "check_update" function, which returns just new files stored in GCP. # We run the script "foreign_trade_pipeline" with the specific parameters. for table in ['economy_foreign_trade_', 'economy_foreign_trade_unanonymized_']: nat_ = check_update(nat, table) for url in nat_: type_, name_, level_name_ = get_level(url, LEVELS) os.system( 'bamboo-cli --folder . --entry foreign_trade_pipeline --url={} --type={} --name={} --table={}' .format(url, type_, name_, table)) ent_ = check_update(ent, table) for url in ent_: type_, name_, level_name_ = get_level(url, LEVELS) os.system( 'bamboo-cli --folder . --entry foreign_trade_pipeline --url={} --type={} --name={} --table={}' .format(url, type_, name_, table)) mun_ = check_update(mun, table) for url in mun_: