def test_parse_datetime_delta(self): # Arrange datetime_deltas = [ '1y', '1y1m', '1y1m1d', '1y1m1d1H', '1y1m1d1H1M', '1y1m1d1H1M1S', '1m1d1H1M1S', '1d1H1M1S', '1H1M1S', '1M1S', '1S', 'ymdHMS', '11y11m11d11H11M11S', '' ] rd = relativedelta # Alias for improved readability expected = [ rd(years=1, months=0, days=0, hours=0, minutes=0, seconds=0), rd(years=1, months=1, days=0, hours=0, minutes=0, seconds=0), rd(years=1, months=1, days=1, hours=0, minutes=0, seconds=0), rd(years=1, months=1, days=1, hours=1, minutes=0, seconds=0), rd(years=1, months=1, days=1, hours=1, minutes=1, seconds=0), rd(years=1, months=1, days=1, hours=1, minutes=1, seconds=1), rd(years=0, months=1, days=1, hours=1, minutes=1, seconds=1), rd(years=0, months=0, days=1, hours=1, minutes=1, seconds=1), rd(years=0, months=0, days=0, hours=1, minutes=1, seconds=1), rd(years=0, months=0, days=0, hours=0, minutes=1, seconds=1), rd(years=0, months=0, days=0, hours=0, minutes=0, seconds=1), rd(years=0, months=0, days=0, hours=0, minutes=0, seconds=0), rd(years=11, months=11, days=11, hours=11, minutes=11, seconds=11), rd(years=0, months=0, days=0, hours=0, minutes=0, seconds=0), ] # Act actual = list() for datetime_delta in datetime_deltas: actual.append(utilities.parse_datetime_delta(datetime_delta)) # Assert self.assertCountEqual(expected, actual)
def run(self, project_id, repository_root): rresults = dict() repository_home = os.path.join(repository_root, str(project_id)) outq = multiprocessing.Queue(maxsize=1) try: self.database.connect() repository_path = None if self.requires_source: repository_path = self._init_repository( project_id, repository_home ) for attribute in self.attributes: bresult = False rresult = None if not attribute.enabled: continue with self.database.cursor() as cursor: if hasattr(attribute.reference, 'init'): attribute.reference.init(cursor) with self.database.cursor() as cursor: timeout = utilities.parse_datetime_delta(attribute.timeout) process = multiprocessing.Process( target=attribute.run, args=(project_id, repository_path, cursor, outq) ) process.start() process.join(timeout=timeout.total_seconds()) if not outq.empty(): (bresult, rresult) = outq.get() else: sys.stderr.write( ( ' \033[91mWARNING\033[0m [{0:10d}] ' '{1} timed out\n' ).format(project_id, attribute.name) ) if process.is_alive(): process.terminate() rresults[attribute.name] = rresult except: sys.stderr.write('Exception\n\n') sys.stderr.write(' Project ID {0}\n'.format(project_id)) extype, exvalue, extrace = sys.exc_info() traceback.print_exception(extype, exvalue, extrace) finally: self.database.disconnect() if self.cleanup: self._cleanup(repository_home) return rresults
def run(project_id, repo_path, cursor, **options): bresult = False rresult = 'dormant' last_commit_date = getLastCommitDate(project_id) if last_commit_date is not None: today = options.get('today', datetime.today().date()) if isinstance(today, str): today = datetime.strptime(today, '%Y-%m-%d') last_commit_date_formatted = tuple( map(int, last_commit_date.split("-"))) delta = dateutil.relativedelta(today, datetime(*last_commit_date_formatted)) threshold = utilities.parse_datetime_delta(options['threshold']) bresult = delta <= threshold if bresult: rresult = 'active' print("----- METRIC: STATE -----") print('state: ', rresult, ",", bresult) return bresult, rresult
def run(project_id, repo_path, cursor, **options): bresult = False rresult = 'dormant' cursor.execute(QUERY.format(project_id)) result = cursor.fetchone() last_commit_date = result[0] if last_commit_date is not None: # Compute the delta between the last commit in the database and today. # Note: today may be the date the GHTorrent dump was published by # ghtorrent.org today = options.get('today', datetime.today().date()) if isinstance(today, str): today = datetime.strptime(today, '%Y-%m-%d') delta = dateutil.relativedelta(today, last_commit_date) threshold = utilities.parse_datetime_delta(options['threshold']) bresult = delta <= threshold if bresult: rresult = 'active' return bresult, rresult
def run(self, project_id, repository_root): invalidated = False score = 0 rresults = dict() repository_home = os.path.join(repository_root, str(project_id)) outq = multiprocessing.Queue(maxsize=1) try: self.database.connect() repository_path = self._init_repository( project_id, repository_home ) for attribute in self.attributes: bresult = False rresult = None if not attribute.enabled: continue with self.database.cursor() as cursor: if hasattr(attribute.reference, 'init'): attribute.reference.init(cursor) with self.database.cursor() as cursor: timeout = utilities.parse_datetime_delta(attribute.timeout) process = multiprocessing.Process( target=attribute.run, args=(project_id, repository_path, cursor, outq) ) process.start() process.join(timeout=timeout.total_seconds()) if not outq.empty(): (bresult, rresult) = outq.get() else: sys.stderr.write( ( ' \033[91mWARNING\033[0m [{0:10d}] ' '{1} timed out\n' ).format(project_id, attribute.name) ) if process.is_alive(): process.terminate() rresults[attribute.name] = rresult if not bresult and attribute.essential: score = 0 invalidated = True if not invalidated: score += bresult * attribute.weight except: sys.stderr.write('Exception\n\n') sys.stderr.write(' Project ID {0}\n'.format(project_id)) extype, exvalue, extrace = sys.exc_info() traceback.print_exception(extype, exvalue, extrace) finally: self.database.disconnect() if self.cleanup: self._cleanup(repository_home) return (score, rresults)