def runWorker(status): #This causes this thread to ignore interrupt signals so theya re only handled by parent signal.signal(signal.SIGINT, signal.SIG_IGN) #Loop will be closed externally while status.value != WorkerStatus.Dead: try: cloneCount = redis.llen(RepoQueues.Cloning) parseCount = redis.llen(RepoQueues.Parsing) except: log(WarningLevels.Fatal, "Cloning Worker unable to reach Redis") break if cloneCount > 0 and parseCount < int(settings.maxParseQueueCount): repoKey = redis.lpop(RepoQueues.Cloning) repo = Repo() repo.loadFromKey(repoKey) #sanity check our loaded key assert repo.key() == repoKey, "Bad repo saved in cloning Queue! Key %s not found!"%(repoKey) #clone the repo and add it to the parse queue src.todoMelvin.checkoutRepo(repo) redis.rpush(RepoQueues.Parsing, repoKey) else: sleepTime = float(settings.clonerSleepTime) log(WarningLevels.Debug, "Cloning Worker going to sleep...") #Set to sleeping for faster shutdown status.value = WorkerStatus.Sleeping time.sleep(sleepTime) status.value = WorkerStatus.Working
def runWorker(status): # This causes this thread to ignore interrupt signals so theya re only handled by parent signal.signal(signal.SIGINT, signal.SIG_IGN) # Loop will be closed externally while status.value != WorkerStatus.Dead: try: postCount = redis.llen(RepoQueues.Posting) except: log(WarningLevels.Fatal, "Posting Worker unable to reach Redis") break if postCount > 0: repoKey = redis.lpop(RepoQueues.Posting) repo = Repo() repo.loadFromKey(repoKey) # sanity check our loaded key assert repo.key() == repoKey, "Bad repo saved in posting Queue! Key %s not found!" % (repoKey) for todo in repo.Todos: if len(todo.issueURL) == 0: repo.lastTodoPosted = todo.key(repo) repo.lastTodoPostDate = datetime.now().strftime("%m/%d/%Y %H:%M:%S") # Generate the issue data = src.todoIssueGenerator.buildIssue(todo, repo) # post the damn issue and save the url issue = None if settings.debug.lower() == "true": issue = gh.issues.create(data, "p4r4digm", "todo-helper") # post to our todo-helper else: issue = gh.issues.create(data, repo.userName, repo.repoName) pass todo.issueURL = issue.url # put todo in todo graveyard redis.rpush(RepoQueues.TodoGY, todo.key(repo)) repo.save() log(WarningLevels.Info, "Issue posted to Github!") break # throw repo into graveyard redis.rpush(RepoQueues.RepoGY, repo.key()) else: sleepTime = float(settings.posterSleepTime) log(WarningLevels.Debug, "Posting Worker going to sleep...") # Set to sleeping for faster shutdown status.value = WorkerStatus.Sleeping time.sleep(sleepTime) status.value = WorkerStatus.Working
def runWorker(status): #This causes this thread to ignore interrupt signals so theya re only handled by parent signal.signal(signal.SIGINT, signal.SIG_IGN) #Loop will be closed externally while status.value != WorkerStatus.Dead: try: parseCount = redis.llen(RepoQueues.Parsing) except: log(WarningLevels.Fatal, "Parsing Worker unable to reach Redis") break if parseCount > 0: repoKey = redis.lpop(RepoQueues.Parsing) repo = Repo() repo.loadFromKey(repoKey) #sanity check our loaded key assert repo.key() == repoKey, "Bad repo saved in parsing Queue! Key %s not found!"%(repoKey) #Parse repo for todos and then deletelocal content parser = multiprocessing.Process(target = parseRepo, args = (repo,)) startTime = time.time() parser.start() while parser.is_alive(): time.sleep(0.5) if time.time() - startTime > float(settings.parserRepoTimeout): parser.terminate() parser.join() log(WarningLevels.Warn, "Parse timed out, skipping the rest of the parse.") redis.delete(repoKey) break if status.value == WorkerStatus.Dead: #Worker was killed during parsing, cleanup parser.terminate() parser.join() log(WarningLevels.Debug, "Parsing Interrupted, returning to parsing queue.") redis.rpush(RepoQueues.Parsing, repoKey) return #Skip the rest and kill the process src.todoMelvin.deleteLocalRepo(repo) else: sleepTime = float(settings.parserSleepTime) log(WarningLevels.Debug, "Parsing Worker going to sleep...") #Set to sleeping for faster shutdown status.value = WorkerStatus.Sleeping time.sleep(sleepTime) status.value = WorkerStatus.Working
class TestUnicode(): def setup(self): # get a repo with a unicode author string targetRepo = GithubRepo('nnombela','graph.js') # self.repo = Repo() # print "repoexists" gh = Github(login = settings.ghLogin, password = settings.ghPassword) loaded = repoExists(targetRepo.user, targetRepo.repo) # loaded = self.repo.loadFromKey('repos::%s/%s' % ('nnombela', 'graph.js')) if loaded == False: print "cloning" ghr = getGithubRepos(gh, [targetRepo])[0] self.repo = addRepoToRedis(ghr) # if self.repo: # checkoutRepo(repo) # parseRepoForTodos(repo) # deleteLocalRepo(repo) else: self.repo = Repo() self.repo.loadFromKey('repos::%s/%s' % targetRepo) def test_unicodeAuthor(self): pass if self.repo: checkoutRepo(self.repo) parseRepoForTodos(self.repo) print "the todos!!" print self.repo.Todos for todo in self.repo.Todos: data = buildTemplateData(todo) print "----" print data['BlameUserName']
def test_loadFromKey(self): r = Repo() badRepo = GithubRepo('testingderp', 'derp') print 'repos::%s/%s' % badRepo loaded = r.loadFromKey('repos::%s/%s' % badRepo) assert loaded == False