def elapsed_time(self, subjobs: Sequence[dict]): """ returns the time (in seconds) that the subjobs took, if complete """ completed_dates = [ subjob["CompletedDate"] for subjob in subjobs if subjob.get("CompletedDate") ] if completed_dates: most_recently_completed = max(completed_dates) completed_date = parse_api_datetime(most_recently_completed) else: completed_date = datetime.now() created_date = parse_api_datetime( min(subjob["CreatedDate"] for subjob in subjobs)) td = completed_date - created_date return td.total_seconds()
def org_connect(config, org_name, sandbox, login_url, default, global_org): config.check_org_overwrite(org_name) connected_app = config.keychain.get_service("connected_app") if sandbox: login_url = "https://test.salesforce.com" oauth_capture = CaptureSalesforceOAuth( client_id=connected_app.client_id, client_secret=connected_app.client_secret, callback_url=connected_app.callback_url, auth_site=login_url, scope="web full refresh_token", ) oauth_dict = oauth_capture() org_config = OrgConfig(oauth_dict, org_name) org_config.load_userinfo() org_config._load_orginfo() if org_config.organization_sobject["TrialExpirationDate"] is None: org_config.config["expires"] = "Persistent" else: org_config.config["expires"] = parse_api_datetime( org_config.organization_sobject["TrialExpirationDate"]).date() config.keychain.set_org(org_config, global_org) if default: config.keychain.set_default_org(org_name) click.echo(f"{org_name} is now the default org")
def _poll_action(self): # get batch status if not self.original_created_date: query_results = self.tooling.query( self._batch_query(date_limit=None)) if not query_results["records"]: raise SalesforceException( f"No {self.options['class_name']} job found.") self.original_created_date = parse_api_datetime( query_results["records"][0]["CreatedDate"]) else: query_results = self.tooling.query( self._batch_query(date_limit=self.original_created_date)) self.subjobs = query_results["records"] current_subjob = self.subjobs[0] summary = self.summarize_subjobs(self.subjobs) if len(self.subjobs) > 1: subjob_info = f" in {len(self.subjobs)} sub-jobs." else: subjob_info = "" self.logger.info( f"{self.options['class_name']}: " f"Job: {current_subjob['Id']} " f"{summary['JobItemsProcessed']} of {summary['TotalJobItems']} " f"({summary['NumberOfErrors']} failures)" + subjob_info) self.poll_complete = summary["Completed"]
def calculate_org_days(info): """Returns the difference in days between created_date (ISO 8601), and expiration_date (%Y-%m-%d)""" created_date = parse_api_datetime(info["created_date"]).date() expires_date = datetime.strptime(info["expiration_date"], "%Y-%m-%d").date() return abs((expires_date - created_date).days)
def org_import(config, username_or_alias, org_name): org_config = {"username": username_or_alias} scratch_org_config = ScratchOrgConfig(org_config, org_name) scratch_org_config.config["created"] = True info = scratch_org_config.scratch_info scratch_org_config.config["days"] = calculate_org_days(info) scratch_org_config.config["date_created"] = parse_api_datetime( info["created_date"]) config.keychain.set_org(scratch_org_config) click.echo("Imported scratch org: {org_id}, username: {username}".format( **scratch_org_config.scratch_info))
def _process_response(self, response): metadata = [] tags = [ "createdById", "createdByName", "createdDate", "fileName", "fullName", "id", "lastModifiedById", "lastModifiedByName", "lastModifiedDate", "manageableState", "namespacePrefix", "type", ] # These tags will be interpreted into dates parse_dates = ["createdDate", "lastModifiedDate"] for result in parseString(response.content).getElementsByTagName("result"): result_data = {} # Parse fields for tag in tags: result_data[tag] = self._get_element_value(result, tag) # Parse dates for key in parse_dates: if result_data[key]: try: result_data[key] = parse_api_datetime(result_data[key]) except Exception as e: raise MetadataParseError( "Could not parse a datetime in the MDAPI response: {}, {}".format( str(e), str(result) ), response=response, ) metadata.append(result_data) self.metadata[self.metadata_type].extend(metadata) return self.metadata
def _process_response(self, response): metadata = [] tags = [ "createdById", "createdByName", "createdDate", "fileName", "fullName", "id", "lastModifiedById", "lastModifiedByName", "lastModifiedDate", "manageableState", "namespacePrefix", "type", ] # These tags will be interpreted into dates parse_dates = ["createdDate", "lastModifiedDate"] for result in parseString( response.content).getElementsByTagName("result"): result_data = {} # Parse fields for tag in tags: result_data[tag] = self._get_element_value(result, tag) # Parse dates for key in parse_dates: if result_data[key]: try: result_data[key] = parse_api_datetime(result_data[key]) except Exception as e: raise MetadataParseError( "Could not parse a datetime in the MDAPI response: {}, {}" .format(str(e), str(result)), response=response, ) metadata.append(result_data) self.metadata[self.metadata_type].extend(metadata) return self.metadata
def test_parse_api_datetime__bad(self): bad_str = "2018-08-07T16:00:56.000-20000" with self.assertRaises(AssertionError): utils.parse_api_datetime(bad_str)
def test_parse_api_datetime__good(self): good_str = "2018-08-07T16:00:56.000+0000" dt = utils.parse_api_datetime(good_str) self.assertAlmostEqual( dt, datetime(2018, 8, 7, 16, 0, 56), delta=timedelta(seconds=1) )
def delta(self): """ returns the time (in seconds) that the batch took, if complete """ completed_date = parse_api_datetime(self.batch["CompletedDate"]) created_date = parse_api_datetime(self.batch["CreatedDate"]) td = completed_date - created_date return td.total_seconds()
def test_parse_api_datetime__bad(self): bad_str = "2018-08-07T16:00:56.000-20000" with pytest.raises(AssertionError): utils.parse_api_datetime(bad_str)
def test_parse_api_datetime__good(self): good_str = "2018-08-07T16:00:56.000+0000" dt = utils.parse_api_datetime(good_str) assert dt == datetime(2018, 8, 7, 16, 0, 56)