def add_arguments(self, parser): parser.add_argument( "--reload-all", action="store_true", help="Reload all FABS transactions. Does not clear out USAspending " "FABS transactions beforehand. If omitted, all submissions from " "the last successful run will be loaded. THIS SETTING SUPERSEDES " "ALL OTHER PROCESSING OPTIONS.", ) parser.add_argument( "--ids", default=[], metavar="ID", nargs="+", type=int, help= "A list of Broker transaction IDs (published_award_financial_assistance_id) to " "reload. IDs provided here will be combined with any --afa-id-file and --afa-ids " "IDs provided. Nonexistent IDs will be ignored.", ) parser.add_argument( "--afa-ids", default=[], metavar="AFA_ID", nargs="+", help="A list of Broker transaction IDs (afa_generated_unique) to " "reload. IDs provided here will be combined with any --afa-id-file and --ids " "IDs provided. Nonexistent IDs will be ignored. If any AFA IDs start " "with a dash or other special shell character, use the --afa-id-file " "option.", ) parser.add_argument( "--afa-id-file", metavar="FILEPATH", help= "A file containing only Broker transaction IDs (afa_generated_unique) " "to reload, one ID per line. IDs provided here will be combined with any " "--afa-ids and --ids IDs provided. Nonexistent IDs will be ignored.", ) parser.add_argument( "--start-datetime", type=datetime_command_line_argument_type( naive=True), # Broker date/times are naive. help="Processes transactions updated on or after the UTC date/time " "provided. yyyy-mm-dd hh:mm:ss is always a safe format. Wrap in " "quotes if date/time contains spaces.", ) parser.add_argument( "--end-datetime", type=datetime_command_line_argument_type( naive=True), # Broker date/times are naive. help="Processes transactions updated prior to the UTC date/time " "provided. yyyy-mm-dd hh:mm:ss is always a safe format. Wrap in " "quotes if date/time contains spaces.", )
def add_arguments(self, parser): parser.add_argument( "--reload-all", action="store_true", help="Reload all FABS transactions. Does not clear out USAspending " "FABS transactions beforehand. If omitted, all submissions from " "the last successful run will be loaded. THIS SETTING SUPERSEDES " "ALL OTHER PROCESSING OPTIONS.", ) parser.add_argument( "--submission-ids", metavar="ID", nargs="+", type=int, help= "Broker submission IDs to reload. Nonexistent IDs will be ignored.", ) parser.add_argument( "--afa-id-file", metavar="FILEPATH", type=str, help= "A file containing only broker transaction IDs (afa_generated_unique) " "to reload, one ID per line. Nonexistent IDs will be ignored.", ) parser.add_argument( "--start-datetime", type=datetime_command_line_argument_type( naive=True), # Broker date/times are naive. help="Processes transactions updated on or after the UTC date/time " "provided. yyyy-mm-dd hh:mm:ss is always a safe format. Wrap in " "quotes if date/time contains spaces.", ) parser.add_argument( "--end-datetime", type=datetime_command_line_argument_type( naive=True), # Broker date/times are naive. help="Processes transactions updated prior to the UTC date/time " "provided. yyyy-mm-dd hh:mm:ss is always a safe format. Wrap in " "quotes if date/time contains spaces.", ) parser.add_argument( "--do-not-log-deletions", action="store_true", help= "Disable the feature that creates a file of deleted FABS transactions for clients to download.", )
def add_arguments(self, parser): parser.add_argument( "--reload-all", action="store_true", help="Reload all FABS transactions. Does not clear out USAspending " "FABS transactions beforehand. If omitted, all submissions from " "the last successful run will be loaded. THIS SETTING SUPERSEDES " "ALL OTHER PROCESSING OPTIONS." ) parser.add_argument( "--submission-ids", metavar="ID", nargs="+", type=int, help="Broker submission IDs to reload. Nonexistent IDs will be ignored." ) parser.add_argument( "--afa-id-file", metavar="FILEPATH", type=str, help='A file containing only broker transaction IDs (afa_generated_unique) ' 'to reload, one ID per line. Nonexistent IDs will be ignored.' ) parser.add_argument( "--start-datetime", type=datetime_command_line_argument_type(naive=True), # Broker date/times are naive. help="Processes transactions updated on or after the UTC date/time " "provided. yyyy-mm-dd hh:mm:ss is always a safe format. Wrap in " "quotes if date/time contains spaces." ) parser.add_argument( "--end-datetime", type=datetime_command_line_argument_type(naive=True), # Broker date/times are naive. help="Processes transactions updated prior to the UTC date/time " "provided. yyyy-mm-dd hh:mm:ss is always a safe format. Wrap in " "quotes if date/time contains spaces." ) parser.add_argument( "--do-not-log-deletions", action="store_true", help="Disable the feature that creates a file of deleted FABS " "transactions for clients to download." )
def add_arguments(self, parser): parser.add_argument( "fiscal_years", nargs="+", type=str, metavar="fiscal-years", help= "Provide a list of fiscal years to process. For convenience, provide 'all' for FY2008 to current FY", ) parser.add_argument( "--process-deletes", action="store_true", help="When this flag is set, the script will include the process to " "obtain records of deleted transactions from S3 and remove from the index", ) parser.add_argument( "--dir", default=str(Path(__file__).resolve().parent), type=str, help="Set for a custom location of output files", dest="directory", ) parser.add_argument( "--skip-counts", action="store_true", help= "When this flag is set, the ETL process will skip the record counts to reduce operation time", ) parser.add_argument( "--index-name", type=str, help= "Provide name for new index about to be created. Only used when --create-new-index is provided", ) parser.add_argument( "--create-new-index", action="store_true", help= "It needs a new unique index name and set aliases used by API logic to the new index", ) parser.add_argument( "--snapshot", action="store_true", help= "Create a new Elasticsearch snapshot of the current index state which is stored in S3", ) parser.add_argument( "--start-datetime", type=datetime_command_line_argument_type(naive=False), help= "Processes transactions updated on or after the UTC date/time provided. yyyy-mm-dd hh:mm:ss is always " "a safe format. Wrap in quotes if date/time contains spaces.", ) parser.add_argument( "--skip-delete-index", action="store_true", help= "When creating a new index skip the step that deletes the old indexes and swaps the aliases. " "Only used when --create-new-index is provided.", )
def add_arguments(self, parser): mutually_exclusive_group = parser.add_mutually_exclusive_group( required=True) mutually_exclusive_group.add_argument( "--submission-ids", help= ("Optionally supply one or more Broker submission_ids to be created or updated." ), nargs="+", type=int, ) mutually_exclusive_group.add_argument( "--incremental", action="store_true", help="Loads newly created or updated submissions.", ) mutually_exclusive_group.add_argument( "--start-datetime", type=datetime_command_line_argument_type( naive=True), # Broker date/times are naive. help= ("Manually set the date from which to start loading submissions. This was originally designed " "to be used for testing, but there are definitely real world usages for it... just be careful." ), ) parser.add_argument( "--list-ids-only", action="store_true", help= "Only list submissions to be loaded. Do not actually load them.", )
def add_arguments(self, parser): mutually_exclusive_group = parser.add_mutually_exclusive_group(required=True) mutually_exclusive_group.add_argument( "--ids", nargs="+", type=int, help="Load/Reload transactions using this detached_award_procurement_id list (space-separated)", ) mutually_exclusive_group.add_argument( "--date", dest="date", type=datetime_command_line_argument_type(naive=True), # Broker date/times are naive. help="Load/Reload all FPDS records from the provided datetime to the script execution start time.", ) mutually_exclusive_group.add_argument( "--since-last-load", action="store_true", help="Equivalent to loading from date, but date is drawn from last update date recorded in DB", ) mutually_exclusive_group.add_argument( "--file", metavar="FILEPATH", type=str, help="Load/Reload transactions using the detached_award_procurement_id list stored at this file path (one ID per line)" "to reload, one ID per line. Nonexistent IDs will be ignored.", ) mutually_exclusive_group.add_argument( "--reload-all", action="store_true", help="Script will load or reload all FPDS records in source tables, from all time. This does NOT clear the USAspending database first", )
def add_arguments(self, parser): parser.add_argument( "fiscal_years", nargs="+", type=str, metavar="fiscal-years", help= "Provide a list of fiscal years to process. For convenience, provide 'all' for FY2008 to current FY", ) parser.add_argument( "--deleted", action="store_true", help="When this flag is set, the script will include the process to " "obtain records of deleted transactions from S3 and remove from the index", dest="provide_deleted", ) parser.add_argument( "--dir", default=os.path.dirname(os.path.abspath(__file__)), type=str, help="Set for a custom location of output files", dest="directory", ) parser.add_argument( "--fast", action="store_true", help= "When this flag is set, the ETL process will skip the record counts to reduce operation time", ) parser.add_argument( "--index-name", type=str, help= "Provide the target index which will be indexed with the new data and process deletes (if --deleted)", required=True, ) parser.add_argument( "--reload-all", action="store_true", help="Load all transactions. It will close existing indexes " "and set aliases used by API logic to the new index", ) parser.add_argument( "--snapshot", action="store_true", help= "Create a new Elasticsearch snapshot of the current index state which is stored in S3", ) parser.add_argument( "--start-datetime", type=datetime_command_line_argument_type(naive=False), help="Processes transactions updated on or after the UTC date/time " "provided. yyyy-mm-dd hh:mm:ss is always a safe format. Wrap in " "quotes if date/time contains spaces.", )
def add_arguments(self, parser): mutually_exclusive_group = parser.add_mutually_exclusive_group(required=True) mutually_exclusive_group.add_argument( "--submission-ids", help=( "One or more Broker submission_ids to be reloaded. These submissions are added to " "the submission queue and processing begins on them immediately. Due to the " "asynchronous, multiprocessing nature of the submission queue, it is possible that " "another loader might nab and/or complete one or more of these submissions before " "we get to them. This is just the nature of the beast. The logs will document " "when this happens. Submissions loaded in this manner will be fully reloaded unless " "another process is currently loading the submission." ), nargs="+", type=int, ) mutually_exclusive_group.add_argument( "--incremental", action="store_true", help=( "Loads new or updated submissions in Broker since the most recently published " "submission in USAspending. Submissions loaded in this manner will be updated " "where possible. Otherwise they will be fully reloaded." ), ) mutually_exclusive_group.add_argument( "--start-datetime", type=datetime_command_line_argument_type(naive=True), # Broker date/times are naive. help=( "Loads new or updated submissions in Broker since the timestamp provided. This is " "effectively the same as the --incremental option except the start date/time is " "specified on the command line." ), ) mutually_exclusive_group.add_argument( "--report-queue-status-only", action="store_true", help="Just reports the queue status. Nothing is loaded.", ) parser.add_argument( "--file-c-chunk-size", type=int, default=self.file_c_chunk_size, help=( f"Controls the number of File C records processed in a single batch. Theoretically, " f"bigger should be faster... right up until you run out of memory. Balance carefully. " f"Default is {self.file_c_chunk_size:,}." ), ) parser.epilog = ( "And to answer your next question, yes this can be run standalone. The parallelization " "code is pretty minimal and should not add significant time to the overall run time of " "serial submission loads." )
def add_arguments(self, parser): mutually_exclusive_group = parser.add_mutually_exclusive_group( required=True) mutually_exclusive_group.add_argument( "--ids", nargs="+", help= f"Load/Reload transactions using this {self.shared_pk} list (space-separated)", ) mutually_exclusive_group.add_argument( "--date", dest="datetime", type=datetime_command_line_argument_type( naive=True), # Broker date/times are naive. help= "Load/Reload records from the provided datetime to the script execution start time.", ) mutually_exclusive_group.add_argument( "--since-last-load", dest="incremental_date", action="store_true", help= "Equivalent to loading from date, but date is drawn from last update date recorded in DB.", ) mutually_exclusive_group.add_argument( "--file", dest="file", type=filepath_command_line_argument_type( chunk_count=self.chunk_size), help= (f"Load/Reload transactions using {self.shared_pk} values stored at this file path" f" (one ID per line) {SCHEMA_HELP_TEXT}"), ) mutually_exclusive_group.add_argument( "--reload-all", action="store_true", help= (f"Script will load or reload all {self.broker_source_table_name} records from broker database," " from all time. This does NOT clear the USASpending database first." ), ) parser.add_argument( "--process-deletes", action="store_true", help= ("If not in local mode, process deletes before beginning the upsert operations." " This shouldn't be used with --file or --ids parameters"), )
def add_arguments(self, parser): parser.add_argument( "--date", dest="datetime", required=True, type=datetime_command_line_argument_type( naive=True), # Broker and S3 date/times are naive. help= "Load/Reload records from the provided datetime to the script execution start time.", ) parser.add_argument( "--dry-run", dest="skip_deletes", action="store_true", help= "Obtain the list of removed transactions, but skip the delete step.", )
def add_arguments(self, parser): mutually_exclusive_group = parser.add_mutually_exclusive_group(required=True) mutually_exclusive_group.add_argument( "--ids", nargs="+", type=int, help=f"Delete transactions using this {self.shared_pk} list (space-separated)", ) mutually_exclusive_group.add_argument( "--date", dest="datetime", type=datetime_command_line_argument_type(naive=True), # Broker and S3 date/times are naive. help="Load/Reload records from the provided datetime to the script execution start time.", ) parser.add_argument( "--dry-run", action="store_true", help="Obtain the list of removed transactions, but skip the delete step.", ) parser.add_argument( "--skip-upload", action="store_true", help="Don't store the list of IDs for downline ETL. Automatically skipped if --dry-run is provided", )
def test_datetime_command_line_argument_type(): assert datetime_command_line_argument_type(True)('2000-01-02') == datetime(2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)('2000-01-02') == datetime(2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2 jan 2000') == datetime(2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)('2 jan 2000') == datetime(2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('jan 2 2000') == datetime(2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)('jan 2 2000') == datetime(2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('1/2/2000') == datetime(2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)('1/2/2000') == datetime(2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2000-01-02 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)('2000-01-02 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2 jan 2000 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)('2 jan 2000 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('1/2/2000 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)('1/2/2000 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2000-01-02 7:04:05.06+0400') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)('2000-01-02 7:04:05.06+0400') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2000-01-02 3:04:05Z') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=None) assert datetime_command_line_argument_type(False)('2000-01-02 3:04:05Z') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2000-01-02T3:04:05Z') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=None) assert datetime_command_line_argument_type(False)('2000-01-02T3:04:05Z') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2000-1-2 3:4:5') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=None) assert datetime_command_line_argument_type(False)('2000-1-2 3:4:5') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)(None) with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)(None) with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)('a') with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)('a') with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)('#') with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)('#') with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)('2000-01-35') with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)('2000-01-35')
def add_arguments(self, parser): parser.add_argument( "--process-deletes", action="store_true", help="When this flag is set, the script will include the extra steps" "to calculate deleted records and remove from the target index", ) parser.add_argument( "--deletes-only", action="store_true", help= "When this flag is set, the script will skip any steps not related" "to deleting records from target index", ) parser.add_argument( "--index-name", type=str, help= "Provide name for new index about to be created. Only used when --create-new-index is provided", metavar="", ) parser.add_argument( "--create-new-index", action="store_true", help= "It needs a new unique index name and set aliases used by API logic to the new index", ) parser.add_argument( "--start-datetime", type=datetime_command_line_argument_type(naive=False), help= "Processes transactions updated on or after the UTC date/time provided. yyyy-mm-dd hh:mm:ss " "is always a safe format. Wrap in quotes if date/time contains spaces.", metavar="", ) parser.add_argument( "--skip-delete-index", action="store_true", help= "When creating a new index, skip the steps that delete old indexes and swap aliases. " "Only applicable when --create-new-index is provided.", ) parser.add_argument( "--load-type", type=str, required=True, help="Select which data the ETL will process.", choices=["transaction", "award", "covid19-faba"], ) parser.add_argument( "--processes", type=int, help= "Number of parallel processes to operate. psycopg2 kicked the bucket with 100.", default=10, choices=range(1, 101), metavar="[1-100]", ) parser.add_argument( "--partition-size", type=int, help="Set the batch-size of a single partition of data to process.", default=10000, metavar="(default: 10,000)", ) parser.add_argument( "--drop-db-view", action="store_true", help= "After completing the ETL, drop the SQL view used for the data extraction", )
def test_datetime_command_line_argument_type(): assert datetime_command_line_argument_type(True)("2000-01-02") == datetime( 2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)( "2000-01-02") == datetime(2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)("2 jan 2000") == datetime( 2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)( "2 jan 2000") == datetime(2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)("jan 2 2000") == datetime( 2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)( "jan 2 2000") == datetime(2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)("1/2/2000") == datetime( 2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)("1/2/2000") == datetime( 2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)( "2000-01-02 3:04:05.06") == datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)( "2000-01-02 3:04:05.06") == datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)( "2 jan 2000 3:04:05.06") == datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)( "2 jan 2000 3:04:05.06") == datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)( "1/2/2000 3:04:05.06") == datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)( "1/2/2000 3:04:05.06") == datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)( "2000-01-02 7:04:05.06+0400") == datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)( "2000-01-02 7:04:05.06+0400") == datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)( "2000-01-02 3:04:05Z") == datetime(2000, 1, 2, 3, 4, 5, tzinfo=None) assert datetime_command_line_argument_type(False)( "2000-01-02 3:04:05Z") == datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)( "2000-01-02T3:04:05Z") == datetime(2000, 1, 2, 3, 4, 5, tzinfo=None) assert datetime_command_line_argument_type(False)( "2000-01-02T3:04:05Z") == datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)( "2000-1-2 3:4:5") == datetime(2000, 1, 2, 3, 4, 5, tzinfo=None) assert datetime_command_line_argument_type(False)( "2000-1-2 3:4:5") == datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)(None) with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)(None) with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)("a") with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)("a") with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)("#") with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)("#") with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)("2000-01-35") with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)("2000-01-35")
def test_datetime_command_line_argument_type(): assert datetime_command_line_argument_type(True)('2000-01-02') == datetime( 2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)( '2000-01-02') == datetime(2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2 jan 2000') == datetime( 2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)( '2 jan 2000') == datetime(2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('jan 2 2000') == datetime( 2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)( 'jan 2 2000') == datetime(2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('1/2/2000') == datetime( 2000, 1, 2, tzinfo=None) assert datetime_command_line_argument_type(False)('1/2/2000') == datetime( 2000, 1, 2, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2000-01-02 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)('2000-01-02 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2 jan 2000 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)('2 jan 2000 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('1/2/2000 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)('1/2/2000 3:04:05.06') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2000-01-02 7:04:05.06+0400') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=None) assert datetime_command_line_argument_type(False)('2000-01-02 7:04:05.06+0400') == \ datetime(2000, 1, 2, 3, 4, 5, 60000, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2000-01-02 3:04:05Z') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=None) assert datetime_command_line_argument_type(False)('2000-01-02 3:04:05Z') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2000-01-02T3:04:05Z') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=None) assert datetime_command_line_argument_type(False)('2000-01-02T3:04:05Z') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) assert datetime_command_line_argument_type(True)('2000-1-2 3:4:5') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=None) assert datetime_command_line_argument_type(False)('2000-1-2 3:4:5') == \ datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)(None) with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)(None) with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)('a') with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)('a') with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)('#') with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)('#') with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(True)('2000-01-35') with pytest.raises(ArgumentTypeError): assert datetime_command_line_argument_type(False)('2000-01-35')