def main(): # Connect to LP print("connecting to launchpad") try: Launchpad.login_with('openstack-releasing', 'production') except Exception as error: abort(2, 'Could not connect to Launchpad: ' + str(error))
def __init__(self, *args, **kwargs): IBugtracker.__init__(self, *args, **kwargs) self.lp = None # A word to the wise: # The Launchpad API is much better than the /+text interface we currently use, # it's faster and easier to get the information we need. # The current /+text interface is not really maintained by Launchpad and most, # or all, of the Launchpad developers hate it. For this reason, we are dropping # support for /+text in the future in favour of launchpadlib. # Terence Simpson (tsimpson) 2010-04-20 try: # Attempt to use launchpadlib, python bindings for the Launchpad API from launchpadlib.launchpad import Launchpad cachedir = os.path.join(conf.supybot.directories.data.tmp(), 'lpcache') if hasattr(Launchpad, 'login_anonymously'): self.lp = Launchpad.login_anonymously("Ubuntu Bots - Bugtracker", 'production', cachedir) else: #NOTE: Most people should have a launchpadlib new enough for .login_anonymously self.lp = Launchpad.login("Ubuntu Bots - Bugtracker", '', '', 'production', cahedir) except ImportError: # Ask for launchpadlib to be installed supylog.warning("Please install python-launchpadlib, the old interface is deprecated") except Exception: # Something unexpected happened self.lp = None supylog.exception("Unknown exception while accessing the Launchpad API")
def _lp_login(self, access_level=['READ_PRIVATE']): """ internal LP login code """ self._logger.debug("lp_login") # use cachedir cachedir = SOFTWARE_CENTER_CACHE_DIR if not os.path.exists(cachedir): os.makedirs(cachedir) # login into LP with GUI try: self._launchpad = Launchpad.login_with( 'software-center', SERVICE_ROOT, cachedir, allow_access_levels=access_level, authorizer_class=AuthorizeRequestTokenFromThread) self.display_name = self._launchpad.me.display_name except Exception as e: if type(e) == UserCancelException: return self._logger.exception("Launchpad.login_with()") # remove token on failure, it may be e.g. expired # FIXME: store the token in a different place and to avoid # having to use _get_paths() (service_root, launchpadlib_dir, cache_path, service_root_dir) = Launchpad._get_paths(SERVICE_ROOT, cachedir) credentials_path = os.path.join(service_root_dir, 'credentials') consumer_credentials_path = os.path.join(credentials_path, 'software-center') # --- if os.path.exists(consumer_credentials_path): os.remove(consumer_credentials_path) self._lp_login(access_level) return self.login_state = LOGIN_STATE_SUCCESS self._logger.debug("/done %s" % self._launchpad)
def get_launchpad(use_staging=False, use_cred_file=os.path.expanduser(CRED_FILE_PATH)): '''Get THE Launchpad''' global launchpad if not launchpad: if use_staging: server = 'staging' else: server = 'production' # as launchpadlib isn't multiproc, fiddling the cache dir if any launchpadlib_dir = os.getenv("JOB_NAME") if launchpadlib_dir: launchpadlib_dir = os.path.join(COMMON_LAUNCHPAD_CACHE_DIR, launchpadlib_dir) if use_cred_file: launchpad = Launchpad.login_with('cupstream2distro', server, allow_access_levels=["WRITE_PRIVATE"], version='devel', # devel because copyPackage is only available there credentials_file=use_cred_file, launchpadlib_dir=launchpadlib_dir) else: launchpad = Launchpad.login_with('cupstream2distro', server, allow_access_levels=["WRITE_PRIVATE"], version='devel', # devel because copyPackage is only available there launchpadlib_dir=launchpadlib_dir) return launchpad
def __init__(self, config_filename): with open(config_filename, "r") as f: self.config = yaml.load(f.read()) self.teams = self.config['teams'] self.trunc = self.config['trunc_report'] cache_dir = self.config['cache_dir'] if self.config['use_auth']: lp = Launchpad.login_with( 'lp-report-bot', 'production', cache_dir, version='devel' ) else: lp = Launchpad.login_anonymously( 'lp-report-bot', 'production', version='devel' ) #import pdb; pdb.set_trace() self.projects = [lp.projects[prj] for prj in self.config['project']] # for backward compatibility #self.project = lp.projects[self.config['project'][0]] self.blueprint_series = {}
def set_authorization(self, application=None, server=None): if application is not None and server is not None: self.launchpad = Launchpad.login_with(application, server, self.__cachedir, credential_save_failed=self.__no_credential, version='devel') else: self.launchpad = Launchpad.login_anonymously('just testing', 'production', self.__cachedir, version='devel')
def get_lp(login=True): if not login: return Launchpad.login_anonymously('review-queue', 'production') return Launchpad.login_with( 'review-queue', 'production', credentials_file='lp-creds', credential_save_failed=lambda: None)
def get_lp(login=False): def no_creds(): pass if not login: return Launchpad.login_anonymously('review-queue', 'production') return Launchpad.login_with('review-queue', 'production', credentials_file='lp-creds', credential_save_failed=no_creds)
def main(args): parser = ArgumentParser("Generate red / green chart for release tracking") parser.add_argument('series') args = parser.parse_args() series = args.series try: lp = Launchpad.login_with("pypypy", "production", version="devel") except: print "** ERROR: Could not open LP " sys.exit(1) try: project = lp.projects["juju-core"] except KeyError: print "** ERROR: Project name does not exist: juju-core" sys.exit(1) # We need to find both the trunk and the specified series found = False lpSeries = project.series for ls in lpSeries: if ls.name == "trunk": trunk = ls if ls.name == series: repSeries = ls found = True if not found: print "** ERROR: Unable to find series: %s" % series sys.exit(1) writeSeriesFile(series, repSeries, trunk)
def main(): description = """ Command line tool to operate with bugs and blueprints """ parser = argparse.ArgumentParser(epilog=description) parser.add_argument('project', type=str) parser.add_argument('cmd', type=str, choices=['get', 'set']) parser.add_argument('item_type', type=str, choices=['bp', 'bug']) parser.add_argument('item_id', type=str, nargs='+') parser.add_argument('--milestone', type=str) parser.add_argument('--series', type=str) parser.add_argument('--approve', dest='approved', action='store_true') parser.add_argument('--create', action='store_true') parser.add_argument('--delete', action='store_true') parser.add_argument('--priority', type=str) parser.add_argument('--status', type=str) params, other_params = parser.parse_known_args() global lp global prj lp = Launchpad.login_with('lp-client', 'production', version='devel') prj = lp.projects[params.project] if params.cmd == 'set': for item_id in params.item_id: if params.item_type == 'bp': update_bp(item_id, params) if params.item_type == 'bug': update_bug(item_id, params)
def main_(options, args): logging.basicConfig(level=log_level(options.verbose), format='%(levelname)s:%(message)s') if len(args): charm_dir = args[0] else: charm_dir = os.getcwd() proof_charm(charm_dir, options.force, options.ignore_warnings) logging.debug('login with %s launchpad:', options.lp_instance) lp = Launchpad.login_with('promulgate', options.lp_instance) if options.unpromulgate: logging.info('unpromulgating...') charm_branch = None # makes LP delete the source package. else: logging.info('promulgating...') charm_branch = find_branch_to_promulgate(lp, charm_dir, options.branch) if not is_valid_owner(charm_branch, options.promulgate_owner_branch): logging.error(" Invalid branch owner: %s", branch_owner(charm_branch)) logging.error(" Branch push location must be owned by '~charmers'\n" " use `bzr push --remember lp:~charmers/charms/" "<series>/<charm-name>/trunk`\n or override this " "behavior using the '--owner-branch'" " option") return 1 update_official_charm_branch(lp, options.series, charm_branch, charm_name_from_metadata(charm_dir)) return 0
def remove_tags_from_launchpad_bug(bug_id, tags): launchpad = Launchpad.login_with('Filch', 'production', version='devel') bug = launchpad.bugs[bug_id] for tag in tags: bug.tags.remove(tag) bug.tags = bug.tags bug.lp_save()
def main(): args = parse_options() logging.basicConfig(level=log_level(args.verbose), format='%(levelname)s:%(message)s') review_message = get_message(args.message) # before connecting to lp logging.debug('login with %s launchpad:', args.lp_instance) lp = Launchpad.login_with('charm-pilot', args.lp_instance) bug_id = args.bug_id logging.debug('find bug %s:', bug_id) bug = lp.bugs[bug_id] if bug: logging.debug('found bug') if args.skip_prompt or prompt_to_continue(bug_id): logging.debug('adding comment') # TODO check return or catch exception bug.newMessage(content=review_message) else: logging.debug('not adding comment') else: logging.error("no bug: %s", bug_id) return 1 return 0
def fetch_gui_release(origin, version): """Retrieve a Juju GUI release. Return the release tarball local path. The release file can be retrieved from: - an arbitrary URL (if origin is "url"); - the local releases repository (if origin is "local" or if a release version is specified and the corresponding file is present locally); - Launchpad (in all the other cases). """ log('Retrieving Juju GUI release.') if origin == 'url': # "version" is a url. _, _, extension = version.rpartition('.') if extension not in ('tgz', 'xz'): extension = 'xz' return download_release(version, 'url-release.' + extension) if origin == 'local': path = get_release_file_path() log('Using a local release: {}'.format(path)) return path # Handle "stable" and "trunk" origins. if version is not None: # If the user specified a version, before attempting to download the # requested release from Launchpad, check if that version is already # stored locally. path = get_release_file_path(version) if path is not None: log('Using a local release: {}'.format(path)) return path # Retrieve a release from Launchpad. launchpad = Launchpad.login_anonymously('Juju GUI charm', 'production') project = launchpad.projects['juju-gui'] url, filename = get_launchpad_release(project, origin, version) return download_release(url, filename)
def _get_suggested_ppa_message(user): try: msg = [] from launchpadlib.launchpad import Launchpad lp = Launchpad.login_anonymously(lp_application_name, "production") try: user_inst = lp.people[user] entity_name = _("team") if user_inst.is_team else _("user") if len(user_inst.ppas) > 0: # Translators: %(entity)s is either "team" or "user" msg.append(_("The %(entity)s named '%(user)s' has no PPA named '%(ppa)s'") % { 'entity' : entity_name, 'user' : user, 'ppa' : ppa_name}) msg.append(_("Please choose from the following available PPAs:"******" * '%(name)s': %(displayname)s") % { 'name' : ppa.name, 'displayname' : ppa.displayname}) else: # Translators: %(entity)s is either "team" or "user" msg.append(_("The %(entity)s named '%(user)s' does not have any PPA") % { 'entity' : entity_name, 'user' : user}) return '\n'.join(msg) except KeyError: return '' except ImportError: return _("Please check that the PPA name or format is correct.")
def take_action(self, parsed_args): self.log.debug('connecting to Launchpad') lp = Launchpad.login_with( 'lp-report-bot', 'production', version='devel') teams_map = {} for team_filter in parsed_args.substring: for team in lp.people.findTeam(text=team_filter): self.log.debug("found team: %s" % team.name) teams_map[team.name] = [] self.log.debug("Getting teams members") i = 0 for t in teams_map: i += 1 self.log.debug("%d/%d %s" % (i, len(teams_map), t)) for p in lp.people[t].members: try: teams_map[t] += [p.name] except KeyError: teams_map[t] = [p.name] df_teams = pd.DataFrame(columns=teams_map.keys()) for t in teams_map.keys(): for p in teams_map[t]: try: df_teams.loc[p][t] = t except KeyError: df_teams.loc[p] = float('nan') df_teams.loc[p][t] = t self.log.debug("Saving data to %s" % parsed_args.outfile) df_teams.to_csv(parsed_args.outfile, encoding='utf-8')
def main(): args = _parse_args() lp = Launchpad.login_with('openstack-releasing', 'production') for project in args.projects: _cleanup_project(lp, project)
def __init__(self, categorizer, **kwargs): self.gerrit_port = kwargs.pop("gerrit_port") self.trusted = kwargs.get("trusted", []) self.tag = kwargs.get("tag") query = kwargs.get("query") if not query: project = kwargs.get("project") if project: self.query = "status:open project:%s" % project else: self.query = "status:open" message_text = kwargs.pop("message_text") if message_text: self.query = "%s message:%s" % (self.query, message_text) else: self.query = query self.launchpad = Launchpad.login_anonymously("anon", "https://api.launchpad.net/", CACHE_DIR) self.categorizer = categorizer self.gerrit = datasources.gerrit.Gerrit(self.query, self.gerrit_port, self.categorizer) self._data = []
def _run(self): global lxml try: from launchpadlib.launchpad import Launchpad import lxml.html except ImportError: sys.stderr.write("For this developer profile to work, you need launchpadlib and lxml to be installed.\n") return a = raw_input("Please only use this script if you want to update the openteacherAuthors module, since this script puts a lot of load on launchpad and your internet connection, and takes a lot of time to execute. Continue (y/n)? ") if a != "y": return lp = Launchpad.login_anonymously("Get OpenTeacher translators", "production", "~/.launchpadlib/cache/") links = self._translationFileLinks(lp) persons = dict( (href, info) for link in links for href, info in self._personsForFile(link) ) print "\nResults:\n" for l, p in sorted(persons.values()): print ' r.add(a.registerAuthor(_("Translator (%%s)") %% "%s", u"%s"))' % (l, p)
def main(): parser = ArgumentParser( description="Calculate some statistics about project bugs.", epilog=dedent("""\ Known caveats: Historical data uses the current task metadata rather than historical values. This is primarily due to performance considerations with the LP API and may be rectified in future (e.g. by mirroring the data persistently). As an example, if a bug is currently set to 'critical', it will show as critical in all time periods rather than progressing through different categories as it is triaged. """)) parser.add_argument( '-p', '--project', default='projects/nova.json', help='JSON file describing the project to generate stats for.') args = parser.parse_args() projects = utils.get_projects_info(args.project, False) lp_project_listeners = {} listeners = set() if not projects: sys.stderr.write('No projects found: please specify one or more.\n') return 1 launchpad = Launchpad.login_with( 'openstack-releasing', 'production', credentials_file='.lpcreds') for project in projects: lp_projects = project.get('lp_projects', []) if not lp_projects: print "Please specify a project." return 1 listener = Listener(project['name'], lp_projects) listeners.add(listener) for lp_project in project.get('lp_projects', []): lp_project_listeners.setdefault(lp_project, []).append(listener) statuses = ['New', 'Incomplete', 'Opinion', 'Invalid', "Won't Fix", 'Confirmed', 'Triaged', 'In Progress', "Fix Committed", "Fix Released"] bugs_by_bug_link = {} for lp_project, receivers in lp_project_listeners.items(): proj = launchpad.projects[lp_project] # Sort by id to make creating time periods easy. bugtasks = proj.searchTasks(status=statuses, order_by="id") for task in bugtasks: if task.bug_link not in bugs_by_bug_link: bugs_by_bug_link[task.bug_link] = task.bug bug = bugs_by_bug_link[task.bug_link] for receiver in receivers: receiver.categorise_task(task, bug) for listener in listeners: sys.stdout.write("Project: %s\n" % listener.name) sys.stdout.write("LP Projects: %s\n" % listener.lp_projects) table = prettytable.PrettyTable( ('Period', 'critical', 'high', 'undecided', 'other', 'total', 'created', 'closed', 'critical-tags')) for period in listener.summarise(): table.add_row(period) sys.stdout.write("%s\n" % table)
def main(args): global launchpad global dist usage = """%s: bug-id %s""" % (sys.argv[0], __doc__) parser = OptionParser(usage=usage) parser.add_option( '-n', '--dryrun', action='store_true', help='Describe what the script would do without doing it.') parser.add_option( '-p', '--project', type="string", action="store", dest="project", default="juniperopenstack", help="launchpad project to work on") parser.add_option( '-e', '--series', type="string", action="store", dest="series", default="all", help="Edit or create series. default is 'all'") (options, args) = parser.parse_args(args=args) if len(args) < 2: parser.print_usage() return 1 for bug_id in args[1:]: launchpad = Launchpad.login_with('delete_scope', 'production', version='devel') dist = launchpad.distributions[options.project] bug = launchpad.bugs[bug_id] if options.series is not None: delete_scope(bug, options)
def get_launchpad_object(self, filename=None, staging=False): '''Return a Launchpad object for making API requests.''' # XXX: rockstar - 2009 Dec 13 - Ideally, we should be using # Launchpad.login_with, but currently, it doesn't support the option of # putting the credentials file somewhere other than where the cache # goes, and that's kinda nasty (and a security issue according to # Kees). if not filename: filename = self.config.CREDENTIALS if staging: SERVICE_ROOT = STAGING_SERVICE_ROOT else: SERVICE_ROOT = LPNET_SERVICE_ROOT self.logger.debug( "Connecting to the Launchpad API at {0}".format(SERVICE_ROOT)) self.logger.debug(" Loading credentials from {0}".format(filename)) if not os.path.exists(filename): self.logger.debug(" No existing API credentials were found") self.logger.debug(" Fetching new credentials from {0}".format( SERVICE_ROOT)) launchpad = Launchpad.login_with( u'Tarmac', service_root=SERVICE_ROOT, credentials_file=filename, launchpadlib_dir=self.config.CACHE_HOME) self.logger.debug("Connected") return launchpad
def get_npm_cache_archive_url(Launchpad=Launchpad): """Figure out the URL of the most recent NPM cache archive on Launchpad.""" launchpad = Launchpad.login_anonymously('Juju GUI charm', 'production') project = launchpad.projects['juju-gui'] # Find the URL of the most recently created NPM cache archive. npm_cache_url, _ = get_launchpad_release(project, 'npm-cache', None) return npm_cache_url
def main(): # Argument parsing parser = argparse.ArgumentParser(description='Rename Launchpad milestones') parser.add_argument('project', help='Project the milestone is defined in') parser.add_argument('from_milestone', help='Milestone to rename') parser.add_argument('to_milestone', help='New milestone name') parser.add_argument("--test", action='store_const', const='staging', default='production', help='Use LP staging server to test') args = parser.parse_args() # Connect to Launchpad print("Connecting to Launchpad...") try: launchpad = Launchpad.login_with('openstack-releasing', args.test) except Exception as error: abort(2, 'Could not connect to Launchpad: ' + str(error)) # Retrieve project try: project = launchpad.projects[args.project] except KeyError: abort(2, 'Could not find project: %s' % args.project) # Retrieve milestone milestone = project.getMilestone(name=args.from_milestone) if milestone is None: abort(2, 'Could not find milestone %s in project %s' % ( args.from_milestone, args.project)) milestone.name = args.to_milestone milestone.code_name = '' milestone.lp_save() print("Renamed")
def new_bug_notify_ircbot_process_hook(config, log, irc): """ Monitor LaunchPad for new bugs, and post to irc. """ lp_ids = [] first_run = True while True: log.debug('checking LaunchPad for new bugs') lp = Launchpad.login_anonymously('ius-tools', 'production') ius = lp.projects.search(text='ius')[0] tasks = ius.searchTasks() for task in tasks: bugid = task.bug.id if first_run and bugid not in lp_ids: # just append all ids to the list log.debug('Adding %s to known ids' % bugid) lp_ids.append(bugid) elif not first_run and bugid not in lp_ids: # if not first run post to channel url = shorten_url(unicode(task.web_link)) reply = "New %s - %s" % (task.title, url) irc.send_to_channel(reply) log.debug('Adding %s to known ids' % bugid) lp_ids.append(bugid) first_run = False sleep(300)
def main(): parser = argparse.ArgumentParser() parser.add_argument('hook') #common parser.add_argument('--change', default=None) parser.add_argument('--change-url', default=None) parser.add_argument('--project', default=None) parser.add_argument('--branch', default=None) parser.add_argument('--commit', default=None) #change-merged parser.add_argument('--submitter', default=None) # patchset-created parser.add_argument('--uploader', default=None) parser.add_argument('--patchset', default=None) args = parser.parse_args() launchpad = Launchpad.login_with('Gerrit User Sync', LPNET_SERVICE_ROOT, GERRIT_CACHE_DIR, credentials_file = GERRIT_CREDENTIALS, version='devel') conn = MySQLdb.connect(user = DB_USER, passwd = DB_PASS, db = DB_DB) find_specs(launchpad, conn, args)
def main(): parser = argparse.ArgumentParser( description="Release Ceilometer bugs for a milestone.") parser.add_argument( '--milestone', help="The name of the milestone to release for.", required=True) args = parser.parse_args() lp = Launchpad.login_with( "ceilometer-bug-release-script", SERVICE_ROOT) the_project = lp.projects[PROJECT_NAME] milestone = lp.load( "%s/+milestone/%s" % (the_project.self_link, args.milestone)) bugs_for_milestone = the_project.searchTasks( status=PRE_RELEASE_STATUS, milestone=milestone) bug_count = len(bugs_for_milestone) if bug_count == 0: print("No bugs to release for milestone %s" % milestone.name) sys.exit(0) mark_released = raw_input(RELEASE_PROMPT.format( bug_count=bug_count, pre_release_status=PRE_RELEASE_STATUS, milestone_title=milestone.name)) if mark_released.lower() != "y": print("Not releasing bugs.") sys.exit(0) for bug_task in bugs_for_milestone: # We re-load the bugtask to avoid having bug 369293 bite us. bug_task = lp.load(bug_task.self_link) sys.stdout.write("Updating %s..." % bug_task.bug.id) bug_task.status = "Fix Released" bug_task.lp_save() bug_task.bug.newMessage( MESSAGE_TEMPLATE.format(milestone_title=milestone.title)) sys.stdout.write("DONE\n")
def _login_lp(self, staging=False, saved_credential=False): cachedir = os.path.join(os.environ["HOME"], ".launchpadlib/cache") if saved_credential and os.path.exists(CONFIG_PATH): if staging: fatal(_("Cannot use saved credential on staging server.")); else: staging_level = "production" parser = SafeConfigParser() parser.read(CONFIG_PATH) credentials = Credentials(parser.get('credential', 'consumer')) content = ''.join(['oauth_token=', parser.get('credential', 'token'), '&oauth_token_secret=', parser.get('credential', 'secret'), '&lp.context=', parser.get('credential', 'context')]) credentials.access_token = AccessToken.from_string(content) lp = Launchpad(credentials, None, None, service_root="production") else: if staging: staging_level = "staging" else: staging_level = "production" lp = Launchpad.login_with(sys.argv[0], staging_level, cachedir , version="devel") logging.info("Use {0} server".format(staging_level)) if not lp: fatal(_("failed to connect to launchpad")); return lp
def is_bug_open(cls, bug_id): """Checks whether the Launchpad bug for the given bug id is open. An issue is considered open if its status is either "Fix Committed" or "Fix Released." """ config = LaunchpadTrackerConfig() log = logging.getLogger('RunnerLog') launchpad = Launchpad.login_anonymously(consumer_name='test', service_root='production') try: bug = launchpad.bugs[bug_id] except KeyError as error: # Invalid bug ID log.info('Invalid bug ID. Key Error: {0}'.format(error)) return False tasks = bug.bug_tasks entries = tasks.entries for bug_entry in entries: if bug_entry['bug_target_name'] == config.project: return bug_entry['status'] not in ('Fix Committed', 'Fix Released') log.info('Bug does not affect project {0} ' 'or project name is not correct.'.format(config.project)) return False
def main(): parser = argparse.ArgumentParser() parser.add_argument('hook') #common parser.add_argument('--change', default=None) parser.add_argument('--change-url', default=None) parser.add_argument('--project', default=None) parser.add_argument('--branch', default=None) parser.add_argument('--commit', default=None) #change-merged parser.add_argument('--submitter', default=None) #patchset-created parser.add_argument('--uploader', default=None) parser.add_argument('--patchset', default=None) args = parser.parse_args() # Connect to Launchpad launchpad = Launchpad.login_with('Gerrit User Sync', LPNET_SERVICE_ROOT, GERRIT_CACHE_DIR, credentials_file=GERRIT_CREDENTIALS, version='devel') # Get git log git_log = extract_git_log(args) # Process bugtasks found in git log for bugtask in find_bugs(launchpad, git_log, args): process_bugtask(launchpad, bugtask, git_log, args)
#!/usr/bin/python import time from calendar import timegm from datetime import datetime from launchpadlib.launchpad import Launchpad # For what we need anonymous access should be sufficient cachedir = './cache' launchpad = Launchpad.login_anonymously('bug report', 'production', cachedir, version='devel') # set the project to openstack-ansible project = launchpad.projects('openstack/openstack-ansible') # must have UTC time start_date = 'Aug-01-2017Z' end_date = 'Sep-01-2017Z' start_timestamp = timegm( time.strptime(start_date.replace('Z', 'UTC'), '%b-%d-%Y%Z')) end_timestamp = timegm( time.strptime(end_date.replace('Z', 'UTC'), '%b-%d-%Y%Z')) # get the bug list bugs = project.searchTasks( status=[ "New", "Confirmed", "Triaged", "Opinion", "Invalid", "In Progress", "Won't Fix", "Expired", "Fix Committed", "Fix Released" ],
def __init__(self, *args, **kwargs): super(LaunchpadPlugin, self).__init__(*args, **kwargs) self.lp = Launchpad.login_anonymously(self.login, 'production')
# Add scripts/lib/ to the PYTHONPATH import os sys.path.append( os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'lib'))) from helpers import LocoTeamPortal try: api = dbusapi.Accomplishments() f = api.get_extra_information("ubuntu-community", "launchpad-email") if bool(f[0]["launchpad-email"]) == False: sys.exit(2) else: email = f[0]["launchpad-email"] lp = Launchpad.login_anonymously('ubuntu-community accomplishments', 'production') me = lp.people.getByEmail(email=email) if me is None: sys.exit(1) ltp = LocoTeamPortal() # Get group membership member_groups = ltp.getCollection('groups', user__username=me.name) member_teams = [ member_groups[group_id]['name'] for group_id in member_groups ] # Get all attended events attended_teams = ltp.getCollection( 'teams', teamevent__attendee__attendee_profile__user__username=me.name,
def publish_release(version): launchpad = Launchpad.login_with('Releasebot', 'production', 'lpcache') chunky = launchpad.projects['chunky'] # check if release exists release = None for r in chunky.releases: if r.version == version.milestone: release = r print "Previous %s release found: proceeding to upload additional files." \ % version.milestone break is_new_release = release is None if release is None: # check if milestone exists milestone = None for ms in chunky.all_milestones: if ms.name == version.milestone: milestone = ms break # create milestone (and series) if needed if milestone is None: series = None for s in chunky.series: if s.name == version.series: series = s break if series is None: series = chunky.newSeries( name=version.series, summary= "The current stable series for Chunky. NB: The code is maintained separately on GitHub." ) print "Series %s created. Please manually update the series summary:" % version.series print series milestone = series.newMilestone(name=version.milestone) print "Milestone %s created." % version.milestone # create release print "TODO: include proper changelog and release notes" release = milestone.createProductRelease( release_notes=version.release_notes, changelog=version.changelog, date_released=datetime.today()) milestone.is_active = False print "Release %s created" % version.milestone assert release is not None # upload release files tarball_url = lp_upload_file(version, release, 'chunky-%s.tar.gz' % version.full, 'Source Code', 'application/x-tar', 'tarball') assert tarball_url print tarball_url zip_url = lp_upload_file(version, release, 'Chunky-%s.zip' % version.full, 'Binaries', 'application/zip', 'installer') assert zip_url print zip_url exe_url = lp_upload_file(version, release, 'Chunky-%s.exe' % version.full, 'Windows Installer', 'application/octet-stream', 'installer') assert exe_url print exe_url return (is_new_release, exe_url, zip_url)
def get_launchpad_project(): cachedir = "/home/%s/.launchpadlib/cache/" % getpass.getuser() # Connect to Launchpad launchpad = Launchpad.login_anonymously('helioviewer', 'edge', cachedir) return launchpad.projects['helioviewer.org']
def __init__(self, bug_id): self.launchpad = Launchpad.login_anonymously('just testing', 'production', '.cache') self.bug = self.launchpad.bugs[int(bug_id)]
def launchpad(): return Launchpad.login_with("pop-os/pop", "production", "scripts/__lpcache__", version="devel")
def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) url = self.url pname = None pname = self.__get_project_from_url() bugsdb = get_database(DBLaunchpadBackend()) printdbg(url) # launchpad needs a temp directory to store cached data homedir = pwd.getpwuid(os.getuid()).pw_dir cachedir = os.path.join(homedir, ".cache/bicho/") if not os.path.exists(cachedir): os.makedirs(cachedir) cre_file = os.path.join(cachedir + 'launchpad-credential') self.lp = Launchpad.login_with('Bicho', 'production', credentials_file=cre_file) aux_status = [ "New", "Incomplete", "Opinion", "Invalid", "Won't Fix", "Expired", "Confirmed", "Triaged", "In Progress", "Fix Committed", "Fix Released", "Incomplete (with response)", "Incomplete (without response)" ] # still useless bugsdb.insert_supported_traker("launchpad", "x.x") trk = Tracker(url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(trk) last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id) if last_mod_date: bugs = self.lp.projects[pname].searchTasks( status=aux_status, omit_duplicates=False, order_by='date_last_updated', modified_since=last_mod_date) else: bugs = self.lp.projects[pname].searchTasks( status=aux_status, omit_duplicates=False, order_by='date_last_updated') printdbg("Last bug already cached: %s" % last_mod_date) nbugs = len(bugs) if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) analyzed = [] for bug in bugs: if bug.web_link in analyzed: continue # for the bizarre error #338 try: issue_data = self.analyze_bug(bug) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: ' \ '%s and Bug: %s" % (url, bug)) raise try: # we can have meta-trackers but we want to have the original #tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except NotFoundError: printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue)) print e analyzed.append(bug.web_link) # for the bizarre error #338 time.sleep(self.delay)
except (errors.NoSuchRevision, errors.GhostRevisionsHaveNoRevno, errors.UnsupportedOperation): pass # Find all fixed bugs in those revisions bugs = [] for rev in release_revisions: for bug in rev.iter_bugs(): bugs.append(bug[0][27:]) bugs = sorted(bugs) # Connect to launchpad lp = Launchpad.login_anonymously("Compiz Release Script", "production") # Create a pretty looking formatted list of bugs bugs_formatted = [] for bug in bugs: lpBug = lp.bugs[bug] bugTitle = lpBug.title bugTitleWords = bugTitle.split(" ") maximumLineLength = 65 currentLineLength = 0 line = 0 lineParts = [""] for word in bugTitleWords:
"Authorization": f"Token {GITHUB_PAT}", } s = requests.Session() s.headers.update(github_headers) r = s.get(base_workflows_url) if not r.ok: raise Exception("workflows request failed") workflows = r.json()["workflows"] cache_dir = f"{Path.home()}/.launchpadlib/cache/" launchpad = Launchpad.login_anonymously("read-only", "production", cache_dir, version="devel") launchpad_project = launchpad.projects[LAUNCHPAD_PROJECT] pypi_releases = {} for build, config in matrix.items(): print(f"checking '{build}' package(s)") github_branch_name = config.get("github_branch", build) print(f" github_branch = {github_branch_name}") launchpad_ppa_name = config.get("launchpad_ppa", build) print(f" launchpad_ppa = {launchpad_ppa_name}") launchpad_ppa = list( filter(lambda x: x.name == launchpad_ppa_name, launchpad_project.ppas))[0]
#!/usr/bin/python3 import os import tempfile from launchpadlib.launchpad import Launchpad from github import Github github = Github(os.getenv("GITHUB_TOKEN")) github_repo = github.get_repo("canonical-web-and-design/maas-ui") with tempfile.NamedTemporaryFile(mode="w") as f: f.write(os.getenv("LAUNCHPAD_CREDENTIALS")) f.flush() launchpad = Launchpad.login_with( service_root="production", credentials_file=f.name, application_name="prod-design-maas-ui-lp-bot", ) project = launchpad.projects["maas"] ui_project = launchpad.projects["maas-ui"] def generate_open_bugs(): for task in project.searchTasks(status=["New", "Confirmed", "Triaged"], tags=["ui"]): has_ui_task = any(t.target == ui_project for t in task.related_tasks) if not has_ui_task: yield task.bug
def take_action(self, parsed_args): err_count = 0 logging.getLogger("requests").setLevel(logging.WARNING) self.log.info('Connecting to Launchpad') self.lp = Launchpad.login_with('lp-report-bot', 'production', version='devel') self.tr = TrelloClient(api_key=parsed_args.trello_key, api_secret=parsed_args.trello_secret, token=parsed_args.trello_token, token_secret=parsed_args.trello_token_secret) try: self.board = [ board for board in self.tr.list_boards() if board.name == parsed_args.board ][0] except IndexError: if parsed_args.create_board: self.board = self.tr.add_board(parsed_args.board) # for label in self.board.all_lists(): # #label.delete() # # self.client.fetch_json( # # '/cards/' + self.id, # # http_method='DELETE') for list in self.board.open_lists(): list.close() else: raise Exception( "Board {0} doesn't exist. Use --create-board argument" + " in order to create it".format(parsed_args.board)) self.log.info("Working with board {0}".format(self.board)) self.tag_labels = parsed_args.use_labels self.cards = dict() self.untouched_cards = dict() for card in self.board.open_cards(): groups = re.search('(\d+)', card.name) if not (groups is None): bug_id = groups.group(0) if bug_id not in self.cards: self.untouched_cards[bug_id] = card self.log.debug( "Found existing card for bug {0}".format(bug_id)) self.cards[bug_id] = card else: self.log.info( "Killing duplicate card for bug {0}".format(bug_id)) card.delete() self.log.info("Found {0} existing cards".format( len(self.untouched_cards))) for prj_name in parsed_args.project: prj = self.lp.projects[prj_name] for f in parsed_args.filter: self.log.debug(f) filt = json.loads(f) if filt['milestone']: filt['milestone'] = prj.getMilestone( name=filt['milestone']) if 'assignee' in filt: filt['assignee'] = self.lp.people[filt['assignee']] if 'status' not in filt: filt['status'] = [ 'New', 'Incomplete', 'Opinion', 'Invalid', 'Won\'t Fix', 'Expired', 'Confirmed', 'Triaged', 'In Progress', 'Fix Committed', 'Fix Released' ] self.log.debug(filt) self.log.info("Searching for tasks in project %s" % prj_name) for task in prj.searchTasks(**filt): self.log.info("Proceeding task %s" % task) retries = 3 for i in range(retries): try: self.proceed_task(task) except Exception as e: if i < retries: self.log.exception(e) self.log.warning( "Got an exception for task %s, retrying" % task) continue else: self.log.exception(e) self.log.warning("Failed to proceed task %s" % task) err_count += 1 break for series in prj.series: self.log.info("Searching for tasks in {0}:{1}".format( str(prj.name), str(series.name))) for task in series.searchTasks(**filt): self.log.info("Proceeding task %s" % task) retries = 3 for i in range(retries): try: self.proceed_task(task) except Exception as e: if i < retries: continue else: self.log.exception(e) self.log.warning( "Failed to proceed task %s" % task) err_count += 1 break if self.untouched_cards: self.log.info("%d cards are out of scope" % len(self.untouched_cards)) try: out_of_scope_list = [ list for list in self.board.open_lists() if list.name == 'Trash/Out of scope' ][0] except IndexError: out_of_scope_list = self.board.add_list('Trash/Out of scope') for card in self.untouched_cards.values(): card.change_list(out_of_scope_list.id) self.log.info("Finished with %d errors" % err_count) if err_count > 0: return 1 return 0
#!/usr/bin/env python from launchpadlib.launchpad import Launchpad import requests, json url = "https://launchpad.net/~staticfloat/+archive/julianightlies" lp = Launchpad.login_anonymously('ppastats', 'production') archive = lp.people['staticfloat'].getPPAByName(name="julianightlies") binaries = archive.getPublishedBinaries(status='Published') timestr = min([z['date_published'] for z in binaries.entries]) requests.post("http://status.julialang.org/put/nightly", data=json.dumps({ 'target': 'Ubuntu', 'time': timestr, 'url': url, 'log_url': url }))
parser = argparse.ArgumentParser('') parser.add_argument('project') parser.add_argument('--quiet', action='store_true') parser.add_argument('--stat-file') args = parser.parse_args() if args.stat_file: try: with open(args.stat_file) as f: saved_data = json.loads(f.read()) except FileNotFoundError: saved_data = {} lp = Launchpad.login_anonymously('just testing', 'production') proj = lp.projects[args.project] bugtasks = proj.searchTasks() stat = {} for bugtask in bugtasks: status = bugtask.status priority = bugtask.importance if status not in stat: stat[status] = collections.defaultdict(int) stat[status][priority] += 1 # Calculate total per status
def __init__(self, options): self.lp = Launchpad.login_anonymously( 'oops-prune', options.lpinstance, version='devel')
def launchpad_anon(): return Launchpad.login_anonymously("pop-os/pop", "production", "scripts/__lpcache__", version="devel")
def main(args=None): global jira_server opt_parser = argparse.ArgumentParser( description="Report the status of all active LaunchPad bug " "imported into a JIRA project with lp-to-jira", formatter_class=argparse.RawTextHelpFormatter, epilog=textwrap.dedent('''\ Examples: lp-to-jira-report FR lp-to-jira-report --csv results.csv FR lp-to-jira-report --json results.json FR lp-to-jira-report --html results.html FR ''') ) opt_parser.add_argument( 'project', type=str, help="The JIRA project string key") opt_parser.add_argument( '--csv', dest='csv', help='export the results of the report into FILE in csv format', ) opt_parser.add_argument( '--html', dest='html', help='export the results of the report into FILE in html format', ) opt_parser.add_argument( '--json', dest='json', help='export the results of the report into FILE in json format', ) opt_parser.add_argument( '--sync', dest='sync', action='store_true', help='Sync JIRA items with corresponding bugs in LP', ) opts = opt_parser.parse_args(args) jira_project = opts.project # 1. Initialize JIRA API api = jira_api() jira_server = api.server jira = JIRA(api.server, basic_auth=(api.login, api.token)) # TODO: catch exception if the Launchpad API isn't open # 2. Initialize Launchpad API # Connect to Launchpad API # TODO: catch exception if the Launchpad API isn't open snap_home = os.getenv("SNAP_USER_COMMON") if snap_home: credential_store = UnencryptedFileCredentialStore( "{}/.lp_creds".format(snap_home)) else: credential_store = UnencryptedFileCredentialStore( os.path.expanduser("~/.lp_creds")) lp = Launchpad.login_with( 'foundations', 'production', version='devel', credential_store=credential_store) print( "Searching for JIRA issues in project %s imported with lp-to-jira..." % opts.project, flush=True ) # Create a Database of all JIRA issues imported by lp-to-jira jira_lp_db = find_issues_in_project(jira, jira_project) print("Found %s issues" % len(jira_lp_db)) # For each issue retrieve latest lp data and sync if required merge_lp_data_with_jira_issues(jira, lp, jira_lp_db, opts.sync) # Create a table version of the database jira_lp_db_table = [] if jira_lp_db: jira_lp_db_table.append(list(jira_lp_db[0].keys())) jira_lp_db_table += [list(x.values()) for x in jira_lp_db] else: return 1 # Display the content of jira_lp_db based on the output option if opts.json: with open(opts.json, 'w') as fp: json.dump(jira_lp_db, fp, indent=2) print("JSON report saved as %s" % opts.json) if opts.html: print_html_report(jira_lp_db, opts.html) print("HTML report saved as %s" % opts.html) if opts.csv: print_table(jira_lp_db_table, sep=";", limit=1024, align=False, draw_title=False, file=opts.csv) print("CSV report saved as %s" % opts.csv) if not opts.csv and not opts.html and not opts.json: print_table(jira_lp_db_table, sep=" | ", limit=60, align=True, draw_title=True, file="/dev/stdout") return 0
#!/usr/bin/python import os import sys from launchpadlib.launchpad import Launchpad cachedir = os.path.expanduser("~/.launchpadlib/cache/") launchpad = Launchpad.login_anonymously('just testing', 'production', cachedir) args = sys.argv[1:] statuses = [ 'Work in progress', 'Approved', 'Needs review', 'Rejected', 'Merged', 'Code failed to merge', 'Queued', 'Superseded' ] if len(args) > 2: dist = launchpad.distributions[args[0]] project = dist.getSourcePackage(name=args[1]) merge_id = args[2] else: project = launchpad.projects[args[0]] merge_id = args[1] proposals = project.getMergeProposals(status=statuses) result = None for prop in proposals: url = prop.web_link prop_merge_id = os.path.basename(url)
def get_launchpad(self): if self.launchpad is None: client_id = 'rtfd-client-%s' % self.repo.name self.launchpad = Launchpad.login_anonymously( client_id, 'production') return self.launchpad
def do_periodic_import(self): """ See GenericBackend for an explanation of this function. Connect to launchpad and updates the state of GTG tasks to reflect the bugs on launchpad. """ # IMPORTANT NOTE! # Bugs can be splitted in bug tasks (such as, you can assign a single # bug to multiple projects: you have one bug and several bug tasks). # At least, one bug contains a bug task (if it's referring to a single # project). # Here, we process bug tasks, since those are the ones that get # assigned to someone. # To avoid having multiple GTG Tasks for the same bug (because we use # bug tasks, this may happen if somebody is working at the same bug for # different projects), we use the bug self_link for indexing the tasks. # Connecting to Launchpad CACHE_DIR = os.path.join(SYNC_CACHE_DIR, self.get_id()) try: self.cancellation_point() self.launchpad = Launchpad.login_anonymously( GTG_NAME, EDGE_SERVICE_ROOT, CACHE_DIR) except: # The connection is not working (the exception type can be # anything) BackendSignals().backend_failed(self.get_id(), BackendSignals.ERRNO_NETWORK) return # Getting the user data try: self.cancellation_point() me = self.launchpad.people[self._parameters["username"]] except KeyError: self.quit(disable=True) BackendSignals().backend_failed( self.get_id(), BackendSignals.ERRNO_AUTHENTICATION) return # Fetching the bugs self.cancellation_point() my_bugs_tasks = me.searchTasks(assignee=me, status=[ "New", "Incomplete", "Confirmed", "Triaged", "In Progress", "Fix Committed" ]) # Adding and updating for bug_task in my_bugs_tasks: self.cancellation_point() self._process_launchpad_bug(bug_task) # removing the old ones last_bug_list = self.sync_engine.get_all_remote() new_bug_list = [bug.self_link for bug in my_bugs_tasks] for bug_link in set(last_bug_list).difference(set(new_bug_list)): self.cancellation_point() # we make sure that the other backends are not modifying the task # set with self.datastore.get_backend_mutex(): tid = self.sync_engine.get_local_id(bug_link) self.datastore.request_task_deletion(tid) try: self.sync_engine.break_relationship(remote_id=bug_link) except KeyError: pass
sys.exit(1) if not os.path.isfile(configpath): print('%s does not contain config.js' % basepath, file=sys.stderr) sys.exit(1) cachedir = os.path.expanduser("~/.launchpadlib/cache/") if not os.path.exists(cachedir): os.makedirs(cachedir, 0o0700) with open(configpath, 'r') as configfile: config = json.load(configfile) projects = config['projects'] rotation = config.get('rotation') daily = config.get('daily') daily_rotation = config.get('daily_rotation') openstack_status = config.get('openstack_status') # Create files in output directory, if needed create_files(templatepath, outputpath, projects, config) # Refresh JSON stats files launchpad = Launchpad.login_anonymously('bugdaystats', 'production', cachedir) for p in projects: update_stats(outputpath, p['project'], rotation) if (daily): update_stats(outputpath, p['project'], daily_rotation, daily=True)
#!/usr/bin/python import sys import re import subprocess from launchpadlib.launchpad import Launchpad if sys.version_info[0] > 2: xrange = range cachedir = '/.launchpadlib/cache/' launchpad = Launchpad.login_anonymously('grab build info', 'production', cachedir, version='devel') processed_packages = [] deb_file_list = [] def get_url(pkg, distro): build_ref = launchpad.archives.getByReference( reference='ubuntu').getPublishedBinaries( binary_name=pkg[0], distro_arch_series='https://api.launchpad.net/devel/ubuntu/' + distro + '/amd64', version=pkg[1], exact_match=True, order_by_date=True).entries[0] build_link = build_ref['build_link'] deb_name = '{}_{}_{}.deb'.format(
import sys import datetime from launchpadlib.launchpad import Launchpad if len(sys.argv) != 2: print 'Usage:\n\t%s projectname' % sys.argv[0] sys.exit(1) projectname = sys.argv[1] sys.stderr.write('Logging in...\n') cachedir = '/tmp/launchpadlib-cache' launchpad = Launchpad.login_with('openstack-lp-scripts', 'production', cachedir, version='devel') statuses = ['New', 'Incomplete', 'Confirmed', 'Triaged', 'In Progress', 'Fix Committed'] sys.stderr.write('Retrieving project...\n') proj = launchpad.projects[projectname] sys.stderr.write('Considering bugs changed in the last 30 days...\n') now = datetime.datetime.now() since = datetime.datetime(now.year, now.month, now.day) since -= datetime.timedelta(days=30) triagers = {}
try: notices = subprocess.check_output([CHECK_NOTICES_PATH] + snaps) logger.debug("Got check_notices output:\n%s", notices.decode()) except subprocess.CalledProcessError as e: logger.error("Failed to check notices:\n%s", e.output) else: notices = json.loads(notices) return notices if __name__ == '__main__': try: lp = Launchpad.login_with( APPLICATION, LAUNCHPAD, version="devel", authorization_engine=RequestTokenAuthorizationEngine( LAUNCHPAD, APPLICATION), credential_store=UnencryptedFileCredentialStore( os.path.expanduser("~/.launchpadlib/credentials"))) except NotImplementedError: raise RuntimeError("Invalid credentials.") check_notices = (os.path.isfile(CHECK_NOTICES_PATH) and os.access(CHECK_NOTICES_PATH, os.X_OK) and CHECK_NOTICES_PATH) if not check_notices: logger.info("`review-tools` not found, skipping USN checks…") ubuntu = lp.distributions["ubuntu"] logger.debug("Got ubuntu: %s", ubuntu)
def take_action(self, parsed_args): self.log.debug('connecting to Launchpad') lp = Launchpad.login_with('lp-report-bot', 'production', version='devel') prj = lp.projects[parsed_args.project] if parsed_args.milestone: milestone = prj.getMilestone(name=parsed_args.milestone) else: milestone = None if parsed_args.open_only: search_states = [ 'Incomplete', 'Confirmed', 'Triaged', 'In Progress', 'Incomplete (with response)', 'Incomplete (without response)', ] else: search_states = [ 'New', 'Incomplete', 'Confirmed', 'Triaged', 'In Progress', 'Incomplete (with response)', 'Incomplete (without response)', 'Opinion', 'Invalid', 'Won\'t Fix', 'Expired', 'Fix Committed', 'Fix Released', ] text_fields = [ 'title', 'heat', 'message_count', 'tags', 'private', 'security_related', 'users_affected_count', 'number_of_duplicates', 'users_unaffected_count', 'users_affected_count_with_dupes' ] person_fields = ['owner'] date_fields = ['date_created', 'date_last_updated'] collection_size_fields = [ 'activity_collection', 'attachments_collection', 'bug_tasks_collection', 'bug_watches_collection', 'cves_collection' ] bt_text_fields = ['importance', 'status', 'is_complete'] bt_person_fields = ['assignee'] bt_date_fields = [ 'date_assigned', 'date_closed', 'date_confirmed', 'date_created', 'date_fix_committed', 'date_fix_released', 'date_in_progress', 'date_incomplete', 'date_left_closed', 'date_left_new', 'date_triaged' ] def get_user_id_by_link(link): # Guess id from link in order to avoid extra network request # Link looks like https://api.launchpad.net/devel/~dpyzhov if link is None: return None return link[link.find('~') + 1:] def get_project_name_by_link(link): # Guess project from project or series link for speedup return link.split('/')[4] def get_milestone_name_by_link(link): # Save some time. Link looks like this: # https://api.launchpad.net/devel/fuel/+milestone/8.0 if link is None: return None return link.split('/')[6] def get_bug_by_link(link): # https://api.launchpad.net/devel/bugs/1523901 if link is None: return None return link.split('/')[5] def collect_bug(bug_task): bug = bug_task.bug s = pd.Series(name=bug.id) for f in text_fields: s[f] = getattr(bug, f) for f in date_fields: s[f] = str(getattr(bug, f)) for f in person_fields: s[f] = get_user_id_by_link(getattr(bug, f + '_link')) s['duplicates'] = get_bug_by_link(bug.duplicate_of_link) if parsed_args.add_collections: for f in collection_size_fields: s[f + '_size'] = len(getattr(bug, f)) for bt in bug.bug_tasks: prj_name = get_project_name_by_link(bt.target_link) ms_name = get_milestone_name_by_link(bt.milestone_link) col_prefix = '%s_%s_' % (prj_name, ms_name) for f in bt_text_fields: s[col_prefix + f] = getattr(bt, f) for f in bt_person_fields: s[col_prefix + f] = get_user_id_by_link( getattr(bt, f + '_link')) for f in bt_date_fields: s[col_prefix + f] = str(getattr(bt, f)) return s df = pd.DataFrame() collection = prj.searchTasks(status=search_states, milestone=milestone, modified_since=parsed_args.updated_since, omit_duplicates=False) s = len(collection) self.log.info("Found %d bugs" % s) i = 0 for bt in collection: i += 1 series = collect_bug(bt) self.log.debug("%s: %d/%d %s" % (prj.name, i, s, series.title)) df = df.append(series) self.log.debug("Report size is %d lines" % len(df)) if milestone: lp_series = milestone.series_target # For some reason I can't get access to methods if I don't access # some property before. Magic stuff lp_series.name collection = lp_series.searchTasks( status=search_states, milestone=milestone, modified_since=parsed_args.updated_since, omit_duplicates=False) s = len(collection) self.log.info("Found %d bugs on %s series" % (s, lp_series.name)) i = 0 for bt in collection: i += 1 series = collect_bug(bt) self.log.debug("%s: %d/%d %s" % (prj.name, i, s, series.title)) df = df.append(series) self.log.debug("Report size is %d lines" % len(df)) self.log.debug("Saving data to %s" % parsed_args.outfile) df.to_csv(parsed_args.outfile, encoding='utf-8')
def publish_launchpad(version): # check that required files exist check_file_exists(version.jar_file()) check_file_exists(version.tar_file()) check_file_exists(version.zip_file()) check_file_exists(version.exe_file()) if raw_input('Publish to production? [y/N] ') == "y": server = 'production' else: server = 'staging' launchpad = Launchpad.login_with('Releasebot', server, 'lpcache') chunky = launchpad.projects['chunky'] # check if release exists release = None for r in chunky.releases: if r.version == version.milestone: release = r print "Previous %s release found: proceeding to upload additional files." \ % version.milestone break is_new_release = release is None if release is None: # check if milestone exists milestone = None for ms in chunky.all_milestones: if ms.name == version.milestone: milestone = ms break # create milestone (and series) if needed if milestone is None: series = None for s in chunky.series: if s.name == version.series: series = s break if series is None: series = chunky.newSeries( name=version.series, summary= "The current stable series for Chunky. NB: The code is maintained separately on GitHub." ) print "Series %s created. Please manually update the series summary:" % version.series print series milestone = series.newMilestone(name=version.milestone) print "Milestone %s created." % version.milestone # create release release = milestone.createProductRelease( release_notes=version.release_notes, changelog=version.changelog, date_released=datetime.today()) milestone.is_active = False print "Release %s created" % version.milestone assert release is not None # upload release files jar_url = lp_upload_file(version, release, version.jar_file(), 'Core Library', 'application/java-archive', 'installer') assert jar_url print jar_url tarball_url = lp_upload_file(version, release, version.tar_file(), 'Source Code', 'application/x-tar', 'tarball') assert tarball_url print tarball_url zip_url = lp_upload_file(version, release, version.zip_file(), 'Binaries', 'application/zip', 'installer') assert zip_url print zip_url exe_url = lp_upload_file(version, release, version.exe_file(), 'Windows Installer', 'application/octet-stream', 'installer') assert exe_url print exe_url return (is_new_release, exe_url, zip_url, jar_url)
def publish_launchpad(version): # Check that required files exist. check_file_exists(version.jar_file()) check_file_exists(version.tar_file()) check_file_exists(version.zip_file()) check_file_exists(version.exe_file()) check_file_exists(version.dmg_file()) if raw_input('Publish to production? [y/N] ') == "y": server = 'production' service = launchpadlib.uris.LPNET_SERVICE_ROOT else: server = 'staging' service = launchpadlib.uris.STAGING_SERVICE_ROOT app_name = 'Releasebot' launchpad = Launchpad.login_with(app_name, server, 'lpcache') chunky = launchpad.projects['chunky'] # Check if release exists. release = None for r in chunky.releases: if r.version == version.milestone: release = r print( "Previous %s release found: will to upload additional files." % version.milestone) break is_new_release = release is None if release is None: # Check if milestone exists. milestone = None for ms in chunky.all_milestones: if ms.name == version.milestone: milestone = ms break # Create milestone (and series) if needed. if milestone is None: series = None for s in chunky.series: if s.name == version.series: series = s break if series is None: series = chunky.newSeries( name=version.series, summary= "The current stable series for Chunky. NB: The code is maintained separately on GitHub." ) print( "Series %s created. Please manually update the series summary:" % version.series) print(series) milestone = series.newMilestone(name=version.milestone) print("Milestone %s created." % version.milestone) # Create release. release = milestone.createProductRelease( release_notes=version.release_notes, changelog=version.changelog, date_released=datetime.today()) milestone.is_active = False print("Release %s created" % version.milestone) assert release is not None # Upload release files. jar_url = lp_upload_file(version, release, version.jar_file(), 'Core Library', 'application/java-archive', 'installer') assert jar_url print(jar_url) tarball_url = lp_upload_file(version, release, version.tar_file(), 'Source Code', 'application/x-tar', 'tarball') assert tarball_url print(tarball_url) zip_url = lp_upload_file(version, release, version.zip_file(), 'Binaries', 'application/zip', 'installer') assert zip_url print(zip_url) dmg_url = lp_upload_file(version, release, version.dmg_file(), 'Mac Bundle', 'application/octet-stream', 'installer') assert dmg_url print(dmg_url) exe_url = lp_upload_file(version, release, version.exe_file(), 'Windows Installer', 'application/octet-stream', 'installer') assert exe_url print(exe_url) return (is_new_release, exe_url, dmg_url, zip_url, jar_url)
# Checks launchpad for an orig.tar.gz # first argument: upstream version # second argument: full path of destination to save file if it's found import sys import os from launchpadlib.launchpad import Launchpad import urllib cachedir = os.path.join(os.environ['HOME'], '.launchpadlib', 'cache') launchpad = Launchpad.login_anonymously('mythtv daily builder', 'production', cachedir) ubuntu = launchpad.distributions["ubuntu"] archive = ubuntu.main_archive series = ubuntu.current_series full_version = archive.getPublishedSources( exact_match=True, source_name="mythtv", distro_series=series)[0].source_package_version upstream_version = full_version.split(':')[1].split('-')[0] print "Current version in the archive is: %s" % upstream_version if len(sys.argv) > 1 and sys.argv[1] == upstream_version: urls = archive.getPublishedSources( exact_match=True, source_name="mythtv")[0].sourceFileUrls() for url in urls: if 'orig.tar.gz' in url: if len(sys.argv) > 2: destination = sys.argv[2] else: destination = os.path.basename(url) print "Fetching %s to %s" % (url, destination) urllib.urlretrieve(url, destination)
if environ.get('PKG_LOG') is not None: pkglog = environ.get('PROJ_ROOT') + environ.get('LOG_DIR') + environ.get( 'PKG_LOG') else: print("no log file for getpkgurls.py") launchpad_spec = util.find_spec("launchpadlib") if util.find_spec("launchpadlib") == "": print("You need to install launchpadlib for python") sys.exit(1) from launchpadlib.launchpad import Launchpad try: launchpad = Launchpad.login_anonymously('rpm_maker.sh', 'production') team = launchpad.people[lp_team] ubuntu = launchpad.distributions["ubuntu"] ppa = team.getPPAByName(distribution=ubuntu, name=lp_ppa) ds1 = ubuntu.getSeries(name_or_version="trusty") ds2 = ubuntu.getSeries(name_or_version="lucid") ds3 = ubuntu.getSeries(name_or_version="xenial") ds4 = ubuntu.getSeries(name_or_version="bionic") d_s = [ds1, ds2, ds3, ds4] d_a_s = [] for i in d_s: d_a_s.append(i.getDistroArchSeries(archtag=arch))
def launchpadticket(ticket_id=None): launchpad = LP.login_anonymously("dark-reflector", "production") return launchpad.bugs[ticket_id].title