def test_translate_datetime_timestamp(self): timestamp = 1511219002 date_string = "2017-11-20T23:03:22" datetime_ = datetime(2017, 11, 20, 23, 03, 22) self.assertEqual(TimeUtils.datetime2localtimestamp(datetime_), timestamp) self.assertEqual(TimeUtils.timestamp2datetime(timestamp), datetime_) self.assertEqual(datetime_.isoformat('T'), date_string)
def test_override(self): TimeUtils.set_override_time(self.override_time_struct) self.assertEqual(TimeUtils.current_tsecs(), self.override_time_secs) self.assertFalse( TimeUtils.has_happened_yet(self.override_time_secs - 1)) self.assertTrue(TimeUtils.has_happened_yet(self.override_time_secs + 1))
def test_has_happened_yet(self): special_parser = CsvParseSpecial(**self.special_parser_args) special_parser.analyse_file(self.spec_path) TimeUtils.set_override_time( time.strptime("2018-01-01", TimeUtils.wp_date_format)) eofy_special = special_parser.rule_groups.get('EOFY2016') eofy_start_time = TimeUtils.datetime2utctimestamp( eofy_special.start_time) self.assertLess(eofy_start_time, TimeUtils.current_tsecs()) self.assertTrue(eofy_special.has_started) self.assertTrue(eofy_special.has_finished) self.assertFalse(eofy_special.is_active)
def setUp(self): self.local_work_dir = TESTS_DATA_DIR # assert os.path.isdir(self.local_work_dir) self.newmeta = { 'title': u'TITLE \xa9 \u2014', 'description': unicode(TimeUtils.get_ms_timestamp()) } Registrar.DEBUG_ERROR = False Registrar.DEBUG_WARN = False Registrar.DEBUG_MESSAGE = False Registrar.DEBUG_PROGRESS = False
def test_determine_groups(self): special_parser = CsvParseSpecial(**self.special_parser_args) special_parser.analyse_file(self.spec_path) # Registrar.DEBUG_SPECIAL = True # Registrar.DEBUG_MESSAGE = True override_groups = special_parser.determine_current_spec_grps( 'override', 'EOFY2016') self.assertEquals(override_groups, [special_parser.rule_groups.get('EOFY2016')]) TimeUtils.set_override_time( time.strptime("2018-01-01", TimeUtils.wp_date_format)) auto_next_groups = special_parser.determine_current_spec_grps( 'auto_next') self.assertEquals(auto_next_groups, []) TimeUtils.set_override_time( time.strptime("2016-08-11", TimeUtils.wp_date_format)) auto_next_groups = special_parser.determine_current_spec_grps( 'auto_next') self.assertEquals(auto_next_groups, [special_parser.rule_groups.get('SP2016-08-12')]) TimeUtils.set_override_time( time.strptime("2016-06-11", TimeUtils.wp_date_format)) auto_next_groups = special_parser.determine_current_spec_grps( 'auto_next') self.assertEquals(auto_next_groups, [special_parser.rule_groups.get('EOFY2016')]) TimeUtils.set_override_time( time.strptime("2016-06-13", TimeUtils.wp_date_format)) auto_next_groups = special_parser.determine_current_spec_grps( 'auto_next') self.assertEquals(auto_next_groups, [special_parser.rule_groups.get('EOFY2016')])
def setUp(self): self.import_name = TimeUtils.get_ms_timestamp() self.settings = self.settings_namespace_class() self.settings.local_work_dir = self.local_work_dir self.settings.local_live_config = None self.settings.local_test_config = self.config_file if self.debug: self.settings.verbosity = 3 self.settings.quiet = False logging.basicConfig(level=logging.DEBUG) else: self.settings.verbosity = 0 self.settings.quiet = True logging.basicConfig(level=logging.WARN) Registrar.DEBUG_MESSAGE = False Registrar.DEBUG_PROGRESS = False Registrar.DEBUG_TRACE = False
def setUp(self): self.import_name = TimeUtils.get_ms_timestamp() self.settings = self.settings_namespace_class() self.settings.local_work_dir = self.local_work_dir self.settings.local_live_config = None self.settings.local_test_config = self.config_file self.settings.init_settings(self.override_args) # with open(yaml_path) as stream: # config = yaml.load(stream) # merge_mode = config.get('merge-mode', 'sync') # master_name = config.get('master-name', 'MASTER') # slave_name = config.get('slave-name', 'SLAVE') # default_last_sync = config.get('default-last-sync') # # SyncUpdateUsr.set_globals( # master_name, slave_name, merge_mode, default_last_sync) Registrar.DEBUG_ERROR = False Registrar.DEBUG_WARN = False Registrar.DEBUG_MESSAGE = False Registrar.DEBUG_PROGRESS = False if self.debug: # FieldGroup.perform_post = True # FieldGroup.DEBUG_WARN = True # FieldGroup.DEBUG_MESSAGE = True # FieldGroup.DEBUG_ERROR = True # SyncUpdateUsr.DEBUG_WARN = True # SyncUpdateUsr.DEBUG_MESSAGE = True # SyncUpdateUsr.DEBUG_ERROR = True Registrar.DEBUG_ERROR = True Registrar.DEBUG_WARN = True Registrar.DEBUG_MESSAGE = True Registrar.DEBUG_PROGRESS = True Registrar.DEBUG_UPDATE = True
import yaml import os import paramiko from coldata import ColDataUser from parsing.flat import CsvParseUser, UsrObjList from woogenerator.utils import TimeUtils, SanitationUtils from itertools import chain src_folder = "../source/" in_folder = "../input/" remote_export_folder = "act_usr_exp/" yaml_path = "merger_config.yaml" import_name = TimeUtils.get_ms_timestamp() dateStamp = TimeUtils.get_datestamp() with open(yaml_path) as stream: config = yaml.load(stream) m_ssh_user = config.get('test_m_ssh_user') m_ssh_pass = config.get('test_m_ssh_pass') m_ssh_host = config.get('test_m_ssh_host') m_ssh_port = config.get('test_m_ssh_port', 22) m_db_host = config.get('test_m_db_host', '127.0.0.1') m_db_user = config.get('test_m_db_user') m_db_pass = config.get('test_m_db_pass') m_db_name = config.get('test_m_db_name') m_command = config.get('test_m_command') exportFilename = "act_x_test_" + import_name + ".csv" remote_export_path = os.path.join(remote_export_folder, exportFilename)
def datetimes_simultaneous(self, datetimes): first_dt = datetimes.pop(0) first_utctimestamp = TimeUtils.datetime2utctimestamp(first_dt) for dt in datetimes: self.assertEqual(TimeUtils.datetime2utctimestamp(dt), first_utctimestamp)
def test_normalize_timestamp(self): utc_timestamp = 1485907200 wp_timestamp = 1485910800 act_timestamp = 1485910800 gdrive_timestamp = 1485914400 xero_timestamp = 1485914400 local_timestamp = 1485918000 utc_dt = TimeUtils.normalize_timestamp(utc_timestamp) naiive_dt = utc_dt.replace(tzinfo=None) wp_dt = TimeUtils.normalize_timestamp_wp(wp_timestamp) act_dt = TimeUtils.normalize_timestamp_act(act_timestamp) gdrive_dt = TimeUtils.normalize_timestamp_gdrive(gdrive_timestamp) xero_dt = TimeUtils.normalize_timestamp_xero(xero_timestamp) local_dt = TimeUtils.normalize_timestamp_local(local_timestamp) self.assertEqual(TimeUtils.datetime2utctimestamp(naiive_dt), utc_timestamp) self.assertEqual(TimeUtils.datetime2utctimestamp(wp_dt), utc_timestamp) self.assertEqual(TimeUtils.datetime2utctimestamp(act_dt), utc_timestamp) self.assertEqual(TimeUtils.datetime2utctimestamp(gdrive_dt), utc_timestamp) self.assertEqual(TimeUtils.datetime2utctimestamp(xero_dt), utc_timestamp) self.assertEqual(TimeUtils.datetime2utctimestamp(local_dt), utc_timestamp)
def test_normalize_iso8601_dst(self): utc_iso8601 = '2017-04-01T00:00:00' wp_iso8601 = '2017-04-01 01:00:00' act_iso8601 = '2017-04-01T02:00:00' gdrive_iso8601 = '2017-04-01 02:00:00' xero_iso8601 = '2017-04-01T03:00:00' local_iso8601 = '2017-04-01T03:00:00' utc_dt = TimeUtils.normalize_iso8601(utc_iso8601) naiive_dt = utc_dt.replace(tzinfo=None) wp_dt = TimeUtils.normalize_iso8601_wp(wp_iso8601) act_dt = TimeUtils.normalize_iso8601_act(act_iso8601) gdrive_dt = TimeUtils.normalize_iso8601_gdrive(gdrive_iso8601) xero_dt = TimeUtils.normalize_iso8601_xero(xero_iso8601) local_dt = TimeUtils.normalize_iso8601_local(local_iso8601) self.datetimes_simultaneous( [utc_dt, naiive_dt, wp_dt, act_dt, gdrive_dt, xero_dt, local_dt]) self.assertEqual(TimeUtils.denormalize_iso8601(utc_dt), utc_iso8601) with self.assertRaises(AssertionError): TimeUtils.denormalize_iso8601(naiive_dt) self.assertEqual(TimeUtils.denormalize_iso8601_wp(wp_dt), wp_iso8601) self.assertEqual(TimeUtils.denormalize_iso8601_act(act_dt), act_iso8601) self.assertEqual(TimeUtils.denormalize_iso8601_gdrive(gdrive_dt), gdrive_iso8601) self.assertEqual(TimeUtils.denormalize_iso8601_xero(xero_dt), xero_iso8601)
def setUp(self): import_name = TimeUtils.get_ms_timestamp() self.master_parser_args = { 'import_name': import_name, 'cols': ColDataProductMeridian.get_import_cols(), 'defaults': ColDataProductMeridian.get_defaults(), 'taxo_depth': 3, 'item_depth': 2, 'schema': 'CA' } # print("PPA: %s" % self.master_parser_args) # self.master_parser_args = { # 'taxo_depth': 3, # 'cols': [ # 'WNR', 'RNR', 'DNR', 'weight', 'length', 'width', 'height', # 'HTML Description', 'PA', 'VA', 'D', 'E', 'DYNCAT', 'DYNPROD', # 'VISIBILITY', 'SCHEDULE', 'RPR', 'WPR', 'DPR', 'CVC', 'stock', # 'stock_status', 'Images', 'Updated', 'post_status' # ], # 'defaults': { # 'SCHEDULE': '', # 'post_status': 'publish', # 'manage_stock': 'no', # 'catalog_visibility': 'visible', # 'Images': '', # 'CVC': 0 # }, # 'import_name': '2017-07-21_09-17-50', # 'item_depth': 2, # 'schema': 'CA' # } self.gen_path = os.path.join(TESTS_DATA_DIR, "generator_master_dummy.csv") self.analysis_kwargs = { 'file_name': self.gen_path, 'encoding': 'utf8', 'dialect_suggestion': 'SublimeCsvTable', 'limit': 10 } for var in ['self.master_parser_args']: pass # print var, eval(var) Registrar.DEBUG_ERROR = False Registrar.DEBUG_WARN = False Registrar.DEBUG_MESSAGE = False Registrar.DEBUG_PROGRESS = False Registrar.DEBUG_PROGRESS = True Registrar.DEBUG_MESSAGE = True Registrar.DEBUG_ERROR = True Registrar.DEBUG_WARN = True Registrar.DEBUG_SHOP = True # Registrar.DEBUG_MRO = True # Registrar.DEBUG_TREE = True Registrar.DEBUG_PARSER = True # Registrar.DEBUG_GEN = True # Registrar.DEBUG_ABSTRACT = True # Registrar.DEBUG_WOO = True # Registrar.DEBUG_API = True CsvParseTT.do_images = False CsvParseTT.do_specials = False CsvParseTT.do_dyns = False