def inner(mock_listdir): mock_listdir.side_effect = OSError posix._prune_old_backups(entry) self.assertTrue(posix.logger.error.called) self.assertFalse(mock_remove.called) mock_listdir.assert_called_with(setup['ppath']) mock_listdir.reset_mock() mock_remove.reset_mock() mock_listdir.side_effect = None mock_listdir.return_value = keep + remove posix._prune_old_backups(entry) mock_listdir.assert_called_with(setup['ppath']) self.assertItemsEqual(mock_remove.call_args_list, [call(os.path.join(setup['ppath'], p)) for p in remove]) mock_listdir.reset_mock() mock_remove.reset_mock() mock_remove.side_effect = OSError posix.logger.error.reset_mock() # test to ensure that we call os.remove() for all files that # need to be removed even if we get an error posix._prune_old_backups(entry) mock_listdir.assert_called_with(setup['ppath']) self.assertItemsEqual(mock_remove.call_args_list, [call(os.path.join(setup['ppath'], p)) for p in remove]) self.assertTrue(posix.logger.error.called)
def build_repo_messages(catkin_packages, docspace, ros_distro): #we'll replace cmake files with our own since we only want to do message generation replace_catkin_cmake_files(catkin_packages) build_errors = [] #For groovy, this isn't too bad, we just set up a workspace old_dir = os.getcwd() repo_devel = os.path.join(docspace, 'build_repo') if not os.path.exists(repo_devel): os.makedirs(repo_devel) os.chdir(repo_devel) print "Removing the CMakeLists.txt file generated by rosinstall" os.remove(os.path.join(docspace, 'CMakeLists.txt')) ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) print "Calling cmake..." call("catkin_init_workspace %s" % docspace, ros_env) try: call("cmake ..", ros_env) ros_env = get_ros_env(os.path.join(repo_devel, 'devel/setup.bash')) generate_messages_catkin(ros_env) source = 'source %s' % (os.path.abspath( os.path.join(repo_devel, 'devel/setup.bash'))) except BuildException as e: print "FAILED TO CALL CMAKE ON CATKIN REPOS" print "There will be no messages in documentation and some python docs may fail" print "Exception: %s" % e source = '' build_errors.append("catkin_workspace for repository") os.chdir(old_dir) return (source, build_errors)
def create(dbg, path): """Creates a new loop device backed by the given file""" # losetup will resolve paths and 'find' needs to use string equality path = os.path.realpath(path) call(dbg, ["losetup", "-f", path ]) return find(dbg, path)
def __init__(self, ros_distro, sudo=False, no_chroot=False): self.r2a = {} self.a2r = {} self.env = os.environ self.env['ROS_DISTRO'] = ros_distro if no_chroot: print("Skip initializing and updating rosdep database") else: print("Ininitalize rosdep database") apt_get_install(['lsb-release', 'python-rosdep'], sudo=sudo) try: call("rosdep init", self.env) except: print("Rosdep is already initialized") call("rosdep update", self.env) print("Building dictionaries from a rosdep's db") raw_db = check_output("rosdep db", self.env, verbose=False).split('\n') for entry in raw_db: split_entry = entry.split(' -> ') if len(split_entry) < 2: continue ros_entry = split_entry[0] if split_entry[1]: apt_entries = split_entry[1].split(' ') else: apt_entries = [] self.r2a[ros_entry] = apt_entries for a in apt_entries: self.a2r[a] = ros_entry
def testCreateOneTag(self): call("stag", testpath("somefile1"), "atag") graph = read_or_create_graph() u = testfileuri("somefile1") self.assertEqual(graph.value(u, FOAF['topic']), TAG['atag']) self.assertNotEqual(graph.value(u, DC['issued'], None), None) self.assertEqual(graph.value(u, RDF.type), FOAF['Document'])
def build_repo_messages(catkin_packages, docspace, ros_distro): #we'll replace cmake files with our own since we only want to do message generation replace_catkin_cmake_files(catkin_packages) build_errors = [] #For groovy, this isn't too bad, we just set up a workspace old_dir = os.getcwd() repo_devel = os.path.join(docspace, 'build_repo') if not os.path.exists(repo_devel): os.makedirs(repo_devel) os.chdir(repo_devel) print "Removing the CMakeLists.txt file generated by rosinstall" os.remove(os.path.join(docspace, 'CMakeLists.txt')) ros_env = get_ros_env('/opt/ros/%s/setup.bash' %ros_distro) print "Calling cmake..." call("catkin_init_workspace %s"%docspace, ros_env) try: call("cmake ..", ros_env) ros_env = get_ros_env(os.path.join(repo_devel, 'devel/setup.bash')) generate_messages_catkin(ros_env) source = 'source %s' % (os.path.abspath(os.path.join(repo_devel, 'devel/setup.bash'))) except BuildException as e: print "FAILED TO CALL CMAKE ON CATKIN REPOS" print "There will be no messages in documentation and some python docs may fail" print "Exception: %s" % e source = '' build_errors.append("catkin_workspace for repository") os.chdir(old_dir) return (source, build_errors)
def test_verify(self, mock_verify, mock_walk, mock_stat): entry = lxml.etree.Element("Path", name="/test", type="file") mock_stat.return_value = MagicMock() mock_verify.return_value = False self.assertFalse(self.ptool.verify(entry, [])) mock_verify.assert_called_with(entry) mock_verify.reset_mock() mock_verify.return_value = True self.assertTrue(self.ptool.verify(entry, [])) mock_verify.assert_called_with(entry) mock_verify.reset_mock() entry.set("recursive", "true") walk_rv = [("/", ["dir1", "dir2"], ["file1", "file2"]), ("/dir1", ["dir3"], []), ("/dir2", [], ["file3", "file4"])] mock_walk.return_value = walk_rv self.assertTrue(self.ptool.verify(entry, [])) mock_walk.assert_called_with(entry.get("name")) all_verifies = [call(entry)] for root, dirs, files in walk_rv: all_verifies.extend([call(entry, path=os.path.join(root, p)) for p in dirs + files]) self.assertItemsEqual(mock_verify.call_args_list, all_verifies)
def install_repo(docspace, workspace, repo, doc_conf, depends_conf): with open(os.path.join(workspace, "repo.rosinstall"), 'w') as f: print "Rosinstall for repo %s:\n%s" % (repo, doc_conf + depends_conf) yaml.safe_dump(doc_conf + depends_conf, f, default_flow_style=False) print "Created rosinstall file for repo %s, installing repo..." % repo # TODO Figure out why rosinstall insists on having ROS available when called with nobuild, but not catkin call("rosinstall %s %s --nobuild --catkin" % (docspace, os.path.join(workspace, "repo.rosinstall")))
def generate_messages_catkin(env): try: targets = call("make help", env).split('\n') except BuildException as e: return genpy_targets = [t.split()[1] for t in targets if t.endswith("genpy")] print genpy_targets for t in genpy_targets: call("make %s" % t, env)
def make(project): cmd = ['xbuild', project.solution] try: call(cmd) except subprocess.CalledProcessError: return Status.FAILURE return Status.SUCCESS
def generate_messages_catkin(env): try: targets = check_output("make help", env).split('\n') except BuildException: return genpy_targets = [t.split()[1] for t in targets if t.endswith("genpy")] print(genpy_targets) for t in genpy_targets: call("make %s" % t, env)
def create(dbg): output = call(dbg, ["tap-ctl", "spawn"]).strip() pid = int(output) output = call(dbg, ["tap-ctl", "allocate"]).strip() prefix = blktap2_prefix minor = None if output.startswith(prefix): minor = int(output[len(prefix):]) if minor is None: os.kill(pid, signal.SIGQUIT) raise (xapi.InternalError("tap-ctl allocate returned unexpected output: '%s'" % output)) call(dbg, ["tap-ctl", "attach", "-m", str(minor), "-p", str(pid) ]) return Tapdisk(minor, pid, None)
def make(project): cmd = [ 'xbuild', project.solution ] try: call(cmd) except subprocess.CalledProcessError: return Status.FAILURE return Status.SUCCESS
def make(project): try: project.bowerpath = os.path.abspath(os.path.expanduser(Settings.get('bower_path'))) except: project.bowerpath = "" cmdline = [os.path.join(project.bowerpath,"bower"), "install"] try: call(cmdline) except subprocess.CalledProcessError: return Status.FAILURE return Status.SUCCESS
def test_ReceiveData(self, mock_ReceiveDataItem, mock_write_data): # we use a simple (read: bogus) datalist here to make this # easy to test datalist = ["a", "b", "c"] probes = self.get_probes_object() client = Mock() client.hostname = "foo.example.com" probes.ReceiveData(client, datalist) self.assertItemsEqual(mock_ReceiveDataItem.call_args_list, [call(client, "a"), call(client, "b"), call(client, "c")]) mock_write_data.assert_called_with(client)
def make(project): try: project.yarnpath = os.path.abspath( os.path.expanduser(Settings.get('yarn_path'))) except: project.yarnpath = "" cmdline = [os.path.join(project.yarnpath, "yarn")] try: call(cmdline) except subprocess.CalledProcessError: return Status.FAILURE return Status.SUCCESS
def make(project): try: project.bowerpath = os.path.abspath( os.path.expanduser(Settings.get('bower_path'))) except: project.bowerpath = "" cmdline = [os.path.join(project.bowerpath, "bower"), "install"] try: call(cmdline) except subprocess.CalledProcessError: return Status.FAILURE return Status.SUCCESS
def comparison(pathToRef, pathToOutput): """Compares two directories Args: pathToRef (str): Path to the reference files. pathToOutput (str): Path to the output files. Raises: Exception: Raises IncorrectOutput when output differs from reference. """ ret = common.get_diff_files( filecmp.dircmp(pathToRef, pathToOutput, ignore=[".gitkeep"])) if ret[0] or ret[1] or ret[2]: # check the results numerically now num_diff = call("bash ../../compare_results.sh {} {}".format( pathToRef, pathToOutput)) if num_diff == 1: raise IncorrectOutput(*ret) elif num_diff != 0: raise ValueError( "compare_results.sh exited with unknown code '{}'".format( num_diff)) else: print( 'SYSTEST SUCCEEDED - Differences to referenceOutput within tolerance.' ) else: print('SYSTEST SUCCEEDED - Result files and reference are identical.')
def list(dbg): results = [] for line in call(dbg, ["tap-ctl", "list"]).split("\n"): bits = line.split() if bits == []: continue prefix = "pid=" pid = None if bits[0].startswith(prefix): pid = int(bits[0][len(prefix):]) minor = None prefix = "minor=" if bits[1].startswith(prefix): minor = int(bits[1][len(prefix):]) if len(bits) <= 3: results.append(Tapdisk(minor, pid, None)) else: prefix = "args=" args = None if bits[3].startswith(prefix): args = bits[3][len(prefix):] this = None prefix = "aio:" if args.startswith(prefix): this = image.Raw(os.path.realpath(args[len(prefix):])) results.append(Tapdisk(minor, pid, this)) prefix = "vhd:" if args.startswith(prefix): this = image.Vhd(os.path.realpath(args[len(prefix):])) results.append(Tapdisk(minor, pid, this)) return results
def _get_new_token_info(self, params): response = call('post', self.token_url, **params) token_info = response.json() token_info['expires_at'] = int(time.time()) + token_info['expires_in'] token_info['scope'] = self.scope return token_info
def __init__(self, dbg, base_device): self.name = name_of_device(base_device) t = table(base_device) existing = call(dbg, ["dmsetup", "table", self.name ]).strip() if existing <> t: log(dbg, "Device mapper device %s has table %s, expected %s" % (self.name, existing, t)) raise (xapi.InternalError("Device mapper device %s has unexpected table" % self.name))
def make(project): try: project.npmpath = os.path.abspath( os.path.expanduser(Settings.get('npm_path'))) except: project.npmpath = "" if os.path.isfile("package.json.ls") or os.path.isfile("package.lson"): try: project.lscpath = os.path.abspath( os.path.expanduser(Settings.get('lsc_path'))) except: project.lscpath = "" if os.path.isfile("package.json.ls"): lscmd = [ os.path.join(project.lscpath, "lsc"), "-jc", "package.json.ls" ] elif os.path.isfile("package.lson"): lscmd = [ os.path.join(project.lscpath, "lsc"), "-jc", "package.lson" ] try: call(lscmd) except subprocess.CalledProcessError: return Status.FAILURE #try: # project.npm_params #except: # project.npm_params = [] cmdline = [os.path.join(project.npmpath, "npm"), "install"] #if project.npm_params: # cmdline += project.npm_params #print " ".join(cmdline) try: call(cmdline) except subprocess.CalledProcessError: return Status.FAILURE return Status.SUCCESS
def make(project): try: project.npmpath = os.path.abspath(os.path.expanduser(Settings.get('npm_path'))) except: project.npmpath = "" if os.path.isfile("package.json.ls") or os.path.isfile("package.lson"): try: project.lscpath = os.path.abspath(os.path.expanduser(Settings.get('lsc_path'))) except: project.lscpath = "" if os.path.isfile("package.json.ls") : lscmd = [ os.path.join(project.lscpath,"lsc"), "-jc", "package.json.ls" ] elif os.path.isfile("package.lson"): lscmd = [ os.path.join(project.lscpath,"lsc"), "-jc", "package.lson" ] try: call(lscmd) except subprocess.CalledProcessError: return Status.FAILURE #try: # project.npm_params #except: # project.npm_params = [] cmdline = [os.path.join(project.npmpath,"npm"), "install"] #if project.npm_params: # cmdline += project.npm_params #print " ".join(cmdline) try: call(cmdline) except subprocess.CalledProcessError: return Status.FAILURE return Status.SUCCESS
def get_repo_revision(repo_folder, vcs_type): #Make sure we're in the right directory old_dir = os.getcwd() os.chdir(repo_folder) if vcs_type == 'git': rev = call("git rev-parse HEAD").split('\n')[0] elif vcs_type == 'hg': rev = call("hg id -i").split('\n')[0] elif vcs_type == 'bzr': rev = call("bzr revno").split('\n')[0] elif vcs_type == 'svn': rev = call("svnversion").split('\n')[0] else: rev = "" print >> sys.stderr, "Don't know how to get the version for vcs_type %s, doc generation will always run" % vcs_type #Make sure we go back to the original dir os.chdir(old_dir) return rev
def create(dbg, host, name): """Return an active nbd device associated with the given name, creating a fresh one if one doesn't already exist.""" existing = find(dbg, host, name) if existing: return existing used = set() try: used = set(os.listdir(persist_root)) except OSError as exc: if exc.errno == errno.ENOENT: pass else: raise all = set(filter(lambda x:x.startswith("nbd"), os.listdir("/dev"))) for nbd in all.difference(used): #try: call(dbg, ["nbd-client", host, "/dev/" + nbd, "-name", name ]) return Nbd(host, name, nbd) #except: # pass # try another one raise NoAvailableNbd()
def make(project): try: project.makepath = os.path.abspath(os.path.expanduser(Settings.get('make_path'))) except: project.makepath = "" # TODO: detect a suitable vcvars if the environment isn't init cmdline = [os.path.join(project.makepath,"msbuild")] cmdline += ["/p:Platform=Win32"] if Args.option("debug"): cmdline += ["/p:Configuration=Debug"] else: cmdline += ["/p:Configuration=Release"] if project.msbuild_params: cmdline += project.msbuild_params try: os.chdir(project.build_dir) except: pass try: call(cmdline) except subprocess.CalledProcessError: try: if project.build_dir: os.chdir("..") except: pass return Status.FAILURE try: if project.build_dir: os.chdir("..") except: pass return Status.SUCCESS
def package(project): wine = False if platform.system() == "Windows" else True if wine: try: call(["makensis", project.nsi_file]) except OSError: try: call([ "wine", "\"c:/Program Files (x86)/NSIS/makensis.exe\"", project.nsi_file ]) except OSError: return Status.UNSUPPORTED #except subprocess.CalledProcessError: # return Status.FAILURE #except subprocess.CalledProcessError: # return Status.FAILURE else: try: call(["makensis", project.nsi_file]) except OSError: return Status.UNSUPPORTED return Status.SUCCESS
def testRemoveRecentOnly(self): call("stag", testpath("oldImage.jpg"), "tag1") time.sleep(1.2) call("stag", imagePath, "tag2") call("stag_remove", "--yes", "--max_gap_time", "1") graph = read_or_create_graph() self.assertEqual(numTriples(graph, testfileuri("oldImage.jpg")), 3) self.assertEqual(numTriples(graph, imageUri), 0)
def commit_db(self, exclude=[]): if not 'tags' in exclude: self.write_folder('tags', self.tags) if not 'deps' in exclude: self.write_folder('deps', self.forward_deps) if not 'metapackages' in exclude: self.write_folder('metapackages', self.metapackages) if not 'rosinstall_hashes' in exclude: self.write_folder('rosinstall_hashes', self.rosinstall_hashes) old_dir = os.getcwd() os.chdir(self.path) changes = check_output('git status -s').strip() if changes: print("Commiting changes to tags and deps lists....") call("git add %s" % os.path.join(self.path, self.distro_name)) command = [ 'git', '-c', 'user.name=jenkins.ros.org', 'commit', '-m', 'Updating tags and deps lists for %s' % (self.distro_name) ] call_with_list(command) env = os.environ env['GIT_SSH'] = "%s/git_ssh" % self.jenkins_scripts_path #Have some tolerance for things commiting to the db at the same time num_retries = 3 i = 0 while True: try: call("git fetch origin", env) call("git merge origin/master", env) call("git push origin master", env) except BuildException as e: print("Failed to fetch and merge...") if i >= num_retries: raise e time.sleep(2) i += 1 print("Trying again attempt %d of %d..." % (i, num_retries)) continue break else: print('No changes to tags and deps lists') os.chdir(old_dir)
def make(project): try: project.dockerpath = os.path.abspath( os.path.expanduser(Settings.get('docker_path'))) except: project.dockerpath = "" #try: # project.sudocmd = os.path.abspath(os.path.expanduser(Settings.get('sudo_command'))) #except: # project.sudo_command = "sudo" cmdline = [ os.path.join(project.dockerpath, "docker"), "-D=true", "build", "--no-cache", "-t=%s" % project.name, ".", ] #try: # call(os.path.join(project.dockerpath,"docker"), stdout=None, stderr=None) #except subprocess.CalledProcessError: # pass # TODO: only do this next block if current user is not part of docker group # or is not root #print "Docker needs root permissions (C-c to cancel)" #cmdline = [project.sudo_command] + cmdline #project.event("status", "preauth") try: call(cmdline) except subprocess.CalledProcessError: return Status.FAILURE return Status.SUCCESS
def make(project): try: project.gruntpath = os.path.abspath(os.path.expanduser(Settings.get('grunt_path'))) except: project.gruntpath = "" if os.path.isfile("Gruntfile.ls"): try: project.lscpath = os.path.abspath(os.path.expanduser(Settings.get('lsc_path'))) except: project.lscpath = "" lscmd = [ os.path.join(project.lscpath,"lsc"), "-c", "Gruntfile.ls" ] try: call(lscmd) except subprocess.CalledProcessError: return Status.FAILURE #try: # project.grunt_params #except: # project.grunt_params = [] cmdline = [os.path.join(project.gruntpath,"grunt")] #if project.grunt_params: # cmdline += project.grunt_params #print " ".join(cmdline) try: call(cmdline) except subprocess.CalledProcessError: return Status.FAILURE return Status.SUCCESS
def test_install(self, mock_set_perms, mock_walk): entry = lxml.etree.Element("Path", name="/test", type="file") mock_set_perms.return_value = True self.assertTrue(self.ptool.install(entry)) mock_set_perms.assert_called_with(entry) mock_set_perms.reset_mock() entry.set("recursive", "true") walk_rv = [("/", ["dir1", "dir2"], ["file1", "file2"]), ("/dir1", ["dir3"], []), ("/dir2", [], ["file3", "file4"])] mock_walk.return_value = walk_rv mock_set_perms.return_value = True self.assertTrue(self.ptool.install(entry)) mock_walk.assert_called_with(entry.get("name")) all_set_perms = [call(entry)] for root, dirs, files in walk_rv: all_set_perms.extend([call(entry, path=os.path.join(root, p)) for p in dirs + files]) self.assertItemsEqual(mock_set_perms.call_args_list, all_set_perms) mock_walk.reset_mock() mock_set_perms.reset_mock() def set_perms_rv(entry, path=None): if path == '/dir2/file3': return False else: return True mock_set_perms.side_effect = set_perms_rv self.assertFalse(self.ptool.install(entry)) mock_walk.assert_called_with(entry.get("name")) self.assertItemsEqual(mock_set_perms.call_args_list, all_set_perms)
def make(project): try: project.dockerpath = os.path.abspath(os.path.expanduser(Settings.get('docker_path'))) except: project.dockerpath = "" #try: # project.sudocmd = os.path.abspath(os.path.expanduser(Settings.get('sudo_command'))) #except: # project.sudo_command = "sudo" cmdline = [ os.path.join(project.dockerpath,"docker"), "-D=true", "build", "--no-cache", "-t=%s"%project.name, ".", ] #try: # call(os.path.join(project.dockerpath,"docker"), stdout=None, stderr=None) #except subprocess.CalledProcessError: # pass # TODO: only do this next block if current user is not part of docker group # or is not root #print "Docker needs root permissions (C-c to cancel)" #cmdline = [project.sudo_command] + cmdline #project.event("status", "preauth") try: call(cmdline) except subprocess.CalledProcessError: return Status.FAILURE return Status.SUCCESS
def find(dbg, path): """Return the active loop device associated with the given path""" # The kernel loop driver will transparently follow symlinks, so # we must too. path = os.path.realpath(path) for line in call(dbg, ["losetup", "-a"]).split("\n"): line = line.strip() if line <> "": bits = line.split() loop = bits[0][0:-1] open_bracket = line.find('(') close_bracket = line.find(')') this_path = line[open_bracket+1:close_bracket] if this_path == path: return Loop(path, loop) return None
def commit_db(self, exclude=[]): if not 'tags' in exclude: self.write_folder('tags', self.tags) if not 'deps' in exclude: self.write_folder('deps', self.forward_deps) if not 'metapackages' in exclude: self.write_folder('metapackages', self.metapackages) if not 'rosinstall_hashes' in exclude: self.write_folder('rosinstall_hashes', self.rosinstall_hashes) old_dir = os.getcwd() os.chdir(self.path) changes = check_output('git status -s').strip() if changes: print("Commiting changes to tags and deps lists....") call("git add %s" % os.path.join(self.path, self.distro_name)) command = ['git', '-c', 'user.name=jenkins.ros.org', 'commit', '-m', 'Updating tags and deps lists for %s' % (self.distro_name)] call_with_list(command) env = os.environ env['GIT_SSH'] = "%s/git_ssh" % self.jenkins_scripts_path #Have some tolerance for things commiting to the db at the same time num_retries = 3 i = 0 while True: try: call("git fetch origin", env) call("git merge origin/master", env) call("git push origin master", env) except BuildException as e: print("Failed to fetch and merge...") if i >= num_retries: raise e time.sleep(2) i += 1 print("Trying again attempt %d of %d..." % (i, num_retries)) continue break else: print('No changes to tags and deps lists') os.chdir(old_dir)
def __init__(self, ros_distro, sudo=False, no_chroot=False, additional_rosdeps=None): self.r2a = {} self.a2r = {} self.env = os.environ self.env['ROS_DISTRO'] = ros_distro if no_chroot: print("Skip initializing and updating rosdep database") else: print("Initialize rosdep database") apt_get_install(['lsb-release', 'python-rosdep'], sudo=sudo) try: call("rosdep init", self.env) except: print("Rosdep is already initialized") if additional_rosdeps: print("Installing additional rosdeps") for (k, v) in additional_rosdeps.items(): print(" Installing additional rosdeps %s into %s" % (v, k)) dn = os.path.dirname(k) call("mkdir -p %s" % dn, self.env) call( "curl -o %s %s" % ( k, v), self.env) call("rosdep update", self.env) print("Building dictionaries from a rosdep's db") raw_db = check_output("rosdep db", self.env, verbose=False).split('\n') for entry in raw_db: split_entry = entry.split(' -> ') if len(split_entry) < 2: continue ros_entry = split_entry[0] if split_entry[1]: apt_entries = split_entry[1].split(' ') else: apt_entries = [] self.r2a[ros_entry] = apt_entries for a in apt_entries: self.a2r[a] = ros_entry
def generate_messages_dry(env, name, messages, services): try: targets = call("make help", env).split('\n') except BuildException as e: return if [t for t in targets if t.endswith("ROSBUILD_genaction_msgs")]: call("make ROSBUILD_genaction_msgs", env) if [t for t in targets if t.endswith("rospack_genmsg")] and messages: call("make rospack_genmsg", env) print "Generated messages for %s" % name if [t for t in targets if t.endswith("rospack_gensrv")] and services: call("make rospack_gensrv", env) print "Generated services for %s" % name
def comparison(pathToRef, pathToOutput): """Compares two directories Args: pathToRef (str): Path to the reference files. pathToOutput (str): Path to the output files. Raises: Exception: Raises IncorrectOutput when output differs from reference. """ ret = common.get_diff_files( filecmp.dircmp(pathToRef, pathToOutput, ignore=[".gitkeep"])) if ret[0] or ret[1] or ret[2]: # check the results numerically now num_diff = call("bash ../../compare_results.sh {} {}".format( pathToRef, pathToOutput)) if num_diff == 1: raise IncorrectOutput(*ret)
def build_repo_messages_catkin_stacks(stacks, ros_distro, local_install_path): ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) #Make sure to create the local install path if it doesn't exist if os.path.exists(local_install_path): shutil.rmtree(local_install_path) os.makedirs(local_install_path) os.makedirs(os.path.join(local_install_path, 'lib/python2.7/dist-packages')) os.makedirs(os.path.join(local_install_path, 'lib/python2.6/dist-packages')) os.makedirs(os.path.join(local_install_path, 'share')) os.makedirs(os.path.join(local_install_path, 'bin')) build_errors = [] for stack, path in stacks.iteritems(): #check to see if the stack is catkin or not cmake_file = os.path.join(path, 'CMakeLists.txt') catkin = False #If a CMakeLists.txt file doesn't exist, we assume the stack is not catkinized if os.path.isfile(cmake_file): with open(cmake_file, 'r') as f: read_file = f.read() if 'catkin_stack' in read_file: catkin = True #if the stack is a catkin stack, we want to build and install it locally if catkin: old_dir = os.getcwd() os.chdir(path) if not os.path.exists('build'): os.makedirs('build') os.chdir('build') try: call( "cmake -DCMAKE_INSTALL_PREFIX:PATH=%s .." % local_install_path, ros_env) call("make", ros_env) call("make install", ros_env) except BuildException as e: print "FAILED TO BUILD %s, messages for this package cannot be generated." % ( stack) print "Failure on %s, with env path %s" % (stack, ros_env) print "Exception: %s" % e build_errors.append(stack) os.chdir(old_dir) #We'll throw the appropriate stuff on our python path export = "export PYTHONPATH=%s/lib/python2.7/dist-packages:$PYTHONPATH" % local_install_path return (export, build_errors)
def build_repo_messages_catkin_stacks(stacks, ros_distro, local_install_path): ros_env = get_ros_env('/opt/ros/%s/setup.bash' %ros_distro) #Make sure to create the local install path if it doesn't exist if os.path.exists(local_install_path): shutil.rmtree(local_install_path) os.makedirs(local_install_path) os.makedirs(os.path.join(local_install_path, 'lib/python2.7/dist-packages')) os.makedirs(os.path.join(local_install_path, 'lib/python2.6/dist-packages')) os.makedirs(os.path.join(local_install_path, 'share')) os.makedirs(os.path.join(local_install_path, 'bin')) build_errors = [] for stack, path in stacks.iteritems(): #check to see if the stack is catkin or not cmake_file = os.path.join(path, 'CMakeLists.txt') catkin = False #If a CMakeLists.txt file doesn't exist, we assume the stack is not catkinized if os.path.isfile(cmake_file): with open(cmake_file, 'r') as f: read_file = f.read() if 'catkin_stack' in read_file: catkin = True #if the stack is a catkin stack, we want to build and install it locally if catkin: old_dir = os.getcwd() os.chdir(path) if not os.path.exists('build'): os.makedirs('build') os.chdir('build') try: call("cmake -DCMAKE_INSTALL_PREFIX:PATH=%s .." % local_install_path, ros_env) call("make", ros_env) call("make install", ros_env) except BuildException as e: print "FAILED TO BUILD %s, messages for this package cannot be generated." % (stack) print "Failure on %s, with env path %s" % (stack, ros_env) print "Exception: %s" % e build_errors.append(stack) os.chdir(old_dir) #We'll throw the appropriate stuff on our python path export = "export PYTHONPATH=%s/lib/python2.7/dist-packages:$PYTHONPATH" % local_install_path return (export, build_errors)
def build_repo_messages(catkin_packages, docspace, ros_distro, local_install_path): #we'll replace cmake files with our own since we only want to do message generation has_plain_cmake = replace_catkin_cmake_files(catkin_packages) build_errors = [] if has_plain_cmake: print('') print('Skip building packages since they contain plain-cmake packages') print('There will be no messages in documentation') print('') return '', build_errors #For groovy, this isn't too bad, we just set up a workspace old_dir = os.getcwd() repo_devel = os.path.join(docspace, 'build_repo') if not os.path.exists(repo_devel): os.makedirs(repo_devel) os.chdir(repo_devel) print("Removing the CMakeLists.txt file generated by rosinstall") os.remove(os.path.join(docspace, 'CMakeLists.txt')) ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) print("Calling cmake...") call("catkin_init_workspace %s" % docspace, ros_env) try: call("cmake -DCMAKE_INSTALL_PREFIX:PATH=%s .." % local_install_path, ros_env) ros_env = get_ros_env(os.path.join(repo_devel, 'devel/setup.bash')) generate_messages_catkin(ros_env) try: call("make install", ros_env) except BuildException as e: print("FAILED TO CALL MAKE INSTALL ON CATKIN REPOS") print("Exception: %s" % e) sys.exit(1) source = 'source %s' % (os.path.abspath( os.path.join(repo_devel, 'devel/setup.bash'))) except BuildException as e: print("FAILED TO CALL CMAKE ON CATKIN REPOS") print( "There will be no messages in documentation and some python docs may fail" ) print("Exception: %s" % e) source = '' build_errors.append("catkin_workspace for repository") os.chdir(old_dir) return (source, build_errors)
def build_repo_messages(catkin_packages, docspace, ros_distro, local_install_path): #we'll replace cmake files with our own since we only want to do message generation has_plain_cmake = replace_catkin_cmake_files(catkin_packages) build_errors = [] if has_plain_cmake: print('') print('Skip building packages since they contain plain-cmake packages') print('There will be no messages in documentation') print('') return '', build_errors #For groovy, this isn't too bad, we just set up a workspace old_dir = os.getcwd() repo_devel = os.path.join(docspace, 'build_repo') if not os.path.exists(repo_devel): os.makedirs(repo_devel) os.chdir(repo_devel) print("Removing the CMakeLists.txt file generated by rosinstall") os.remove(os.path.join(docspace, 'CMakeLists.txt')) ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) print("Calling cmake...") call("catkin_init_workspace %s" % docspace, ros_env) try: call("cmake -DCMAKE_INSTALL_PREFIX:PATH=%s .." % local_install_path, ros_env) ros_env = get_ros_env(os.path.join(repo_devel, 'devel/setup.bash')) generate_messages_catkin(ros_env) try: call("make install", ros_env) except BuildException as e: print("FAILED TO CALL MAKE INSTALL ON CATKIN REPOS") print("Exception: %s" % e) sys.exit(1) source = 'source %s' % (os.path.abspath(os.path.join(repo_devel, 'devel/setup.bash'))) except BuildException as e: print("FAILED TO CALL CMAKE ON CATKIN REPOS") print("There will be no messages in documentation and some python docs may fail") print("Exception: %s" % e) source = '' build_errors.append("catkin_workspace for repository") os.chdir(old_dir) return (source, build_errors)
def package(project): wine = False if platform.system() == "Windows" else True if wine: try: call(["makensis", project.nsi_file]) except OSError: try: call(["wine", "\"c:/Program Files (x86)/NSIS/makensis.exe\"", project.nsi_file]) except OSError: return Status.UNSUPPORTED #except subprocess.CalledProcessError: # return Status.FAILURE #except subprocess.CalledProcessError: # return Status.FAILURE else: try: call(["makensis", project.nsi_file]) except OSError: return Status.UNSUPPORTED return Status.SUCCESS
def testRemove(self): call("stag", imagePath, "tag1") call("stag_remove", "--yes") graph = read_or_create_graph() self.assertEqual(numTriples(graph, imageUri), 0)
from common import call print call({'cmd': 'g.unload', 'name': 'test'})
def document_repo(workspace, docspace, ros_distro, repo, platform, arch, homepage, no_chroot, skip_garbage, doc_conf, depends_conf, tags_db): doc_job = "doc-%s-%s" % (ros_distro, repo) #Get the list of repositories that should have documentation run on them #These are all of the repos that are not in the depends rosinsall file repos_to_doc = get_repositories_from_rosinstall(doc_conf) repo_path = os.path.realpath("%s" % (docspace)) print("Repo path %s" % repo_path) #Walk through the installed repositories and find old-style packages, new-stye packages, and stacks stacks, manifest_packages, catkin_packages, repo_map = build_repo_structure(repo_path, doc_conf, depends_conf) if ros_distro == 'indigo': if stacks or manifest_packages: print("Ignoring dry packages and stacks in '%s'" % ros_distro) stacks = {} manifest_packages = {} if not catkin_packages: raise BuildException('No catkin packages found') print("Running documentation generation on\npackages: %s" % (manifest_packages.keys() + catkin_packages.keys())) #print "Catkin packages: %s" % catkin_packages #print "Manifest packages: %s" % manifest_packages #print "Stacks: %s" % stacks #Get any non local apt dependencies ros_dep = rosdep.RosDepResolver(ros_distro, no_chroot=no_chroot) import rosdistro if ros_distro == 'electric': apt = rosdistro.AptDistro(platform, arch, shadow=False) else: apt = rosdistro.AptDistro(platform, arch, shadow=True) apt_deps = get_apt_deps(apt, ros_dep, ros_distro, catkin_packages, stacks, manifest_packages) print("Apt dependencies: %s" % apt_deps) #Get rosdistro release file if there are catkin packages to get status if catkin_packages and ros_distro not in ['electric', 'fuerte']: print("Fetch rosdistro files for: %s" % ros_distro) index = rosdistro.get_index(rosdistro.get_index_url()) rosdistro_release_file = rosdistro.get_release_file(index, ros_distro) rosdistro_source_file = rosdistro.get_source_file(index, ros_distro) else: rosdistro_release_file = None rosdistro_source_file = None #Build a local dependency graph to be used for build order local_dep_graph = build_local_dependency_graph(catkin_packages, manifest_packages) doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro)) if os.path.exists(doc_path): shutil.rmtree(doc_path) #Write stack manifest files for all stacks, we can just do this off the #stack.xml files write_stack_manifests(stacks, docspace, ros_distro, repo_map, tags_db, doc_job, homepage) #Need to make sure to re-order packages to be run in dependency order build_order = get_dependency_build_order(local_dep_graph) print("Build order that honors deps:\n%s" % build_order) #We'll need the full list of apt_deps to get tag files full_apt_deps = get_full_apt_deps(apt_deps, apt) if not no_chroot: print("Installing all dependencies for %s" % repo) # XXX this is a really ugly hack to make the hydro doc job for ros_comm pass # otherwise roslisp pulls in the rosgraph_msgs package as a Debian dependency # which then break catkin_basic since it include the msgs CMake multiple files # resulting in duplicate target names (https://github.com/ros/ros_comm/issues/471) if repo == 'ros_comm' and 'ros-hydro-roslisp' in apt_deps: apt_deps.remove('ros-hydro-roslisp') if apt_deps: call("apt-get install %s --yes" % (' '.join(apt_deps))) print("Done installing dependencies") #Set up the list of things that need to be sourced to run rosdoc_lite #TODO: Hack for electric if ros_distro == 'electric': #lucid doesn't have /usr/local on the path by default... weird sources = ['export PATH=/usr/local/sbin:/usr/local/bin:$PATH'] sources.append('source /opt/ros/fuerte/setup.bash') sources.append('export ROS_PACKAGE_PATH=/opt/ros/electric/stacks:$ROS_PACKAGE_PATH') else: sources = ['source /opt/ros/%s/setup.bash' % ros_distro] #We assume that there will be no build errors to start build_errors = [] #Everything that is after fuerte supports catkin workspaces, so everything #that has packages with package.xml files local_install_path = os.path.join(docspace, 'local_installs') if os.path.exists(local_install_path): shutil.rmtree(local_install_path) #Make sure to create some subfolders under the local install path def makedirs(path): if not os.path.exists(path): os.makedirs(path) makedirs(os.path.join(local_install_path, 'bin')) makedirs(os.path.join(local_install_path, 'lib/python2.7/dist-packages')) makedirs(os.path.join(local_install_path, 'share')) if catkin_packages \ and not 'rosdoc_lite' in catkin_packages.keys() and not 'catkin' in catkin_packages.keys(): source, errs = build_repo_messages(catkin_packages, docspace, ros_distro, local_install_path) build_errors.extend(errs) if source: sources.append(source) #For fuerte catkin, we need to check if we should build catkin stacks source, errs = build_repo_messages_catkin_stacks(stacks, ros_distro, local_install_path) build_errors.extend(errs) sources.append(source) #For all our manifest packages (dry or fuerte catkin) we want to build #messages. Note, for fuerte catkin, we have to build all the code and #install locally to get message generation source, errs = build_repo_messages_manifest(manifest_packages, build_order, ros_distro) build_errors.extend(errs) sources.append(source) #We want to pull all the tagfiles available once from the server tags_location = os.path.join(workspace, ros_distro) if os.path.exists(tags_location): shutil.rmtree(tags_location) command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qrz [email protected]:/home/rosbot/docs/%s/tags %s' % (ros_distro, tags_location)] call_with_list(command) repo_tags = document_packages(manifest_packages, catkin_packages, build_order, repos_to_doc, sources, tags_db, full_apt_deps, ros_dep, repo_map, repo_path, docspace, ros_distro, homepage, doc_job, tags_location, doc_path, rosdistro_release_file, rosdistro_source_file) #Copy the files to the appropriate place folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys())) if folders: dsts = ['%s/api/%s' % (doc_path, f) for f in folders] for dst in dsts: with open(os.path.join(dst, 'stamp'), 'w'): pass command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr --delete %s [email protected]:/home/rosbot/docs/%s/api' % (' '.join(dsts), ros_distro)] call_with_list(command) folders = ['%s/changelogs' % doc_path, '%s/tags' % doc_path] folders = [f for f in folders if os.path.exists(f)] if folders: command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s [email protected]:/home/rosbot/docs/%s' % (' '.join(folders), ros_distro)] call_with_list(command) if not skip_garbage: #Remove the autogenerated doc files since they take up a lot of space if left on the server shutil.rmtree(tags_location) shutil.rmtree(doc_path) #Write the new tags to the database if there are any to write for name, tags in repo_tags.iteritems(): #Get the apt name of the current stack/repo if ros_dep.has_ros(name): deb_name = ros_dep.to_apt(name)[0] else: deb_name = "ros-%s-%s" % (ros_distro, name.replace('_', '-')) #We only want to write tags for packages that have a valid deb name #For others, the only way to get cross referencing is to document everything #together with a rosinstall file if apt.has_package(deb_name): tags_db.set_tags(deb_name, tags) #Make sure to write changes to tag files and deps #We don't want to write hashes on an unsuccessful build excludes = ['rosinstall_hashes'] if build_errors else [] tags_db.commit_db(excludes) tags_db.delete_tag_index_repo() #Tell jenkins that we've succeeded print("Preparing xml test results") try: os.makedirs(os.path.join(workspace, 'test_results')) print("Created test results directory") except Exception: pass if build_errors: import yaml copy_test_results(workspace, docspace, """Failed to generate messages by calling cmake for %s. Look in the console for cmake failures, search for "CMake Error" Also, are you sure that the rosinstall files are pulling from the right branch for %s? Check the repos below, you can update information the %s.rosinstall and %s-depends.rosinstall files by submitting a pull request at https://github.com/ros/rosdistro/%s Documentation rosinstall:\n%s Depends rosinstall:\n%s""" % (build_errors, ros_distro, repo, repo, ros_distro, yaml.safe_dump(doc_conf, default_flow_style=False), yaml.safe_dump(depends_conf, default_flow_style=False)), "message_generation_failure") else: copy_test_results(workspace, docspace)
def get_events(self, keywords, country='fi'): event_data = [] url = EVENTS_FULL_URL + '&locale=' + country for keyword in keywords: full_url = url + '&keyword=' + keyword response = call('get', full_url) response_json = response.json() if response_json['page']['totalElements'] == 0: continue time.sleep(1) for event in response_json['_embedded']['events']: venue_list = [] event_dict = { 'name': event['name'], 'artist_name': keyword, 'url': event['url'], } if 'dates' in event.keys(): event_dict['datetime'] = event['dates']['start'][ 'localDate'] or '' + " " + event['dates']['start'][ 'localTime'] or '' if 'images' in event.keys(): event_dict['image_url'] = event['images'][0]['url'] or '' if '_embedded' in event.keys(): for venue in event['_embedded']['venues']: venue_dict = {} if 'name' in venue.keys(): venue_dict['name'] = venue['name'] or '' if 'city' in venue.keys(): venue_dict['city'] = venue['city']['name'] or '' if 'country' in venue.keys(): venue_dict[ 'country'] = venue['country']['name'] or '' if 'address' in venue.keys(): venue_dict[ 'address'] = venue['address']['line1'] or '' if 'location' in venue.keys(): venue_dict['longitude'] = venue['location'][ 'longitude'] or '' venue_dict['latitude'] = venue['location'][ 'latitude'] or '' if venue_dict: venue_list.append(venue_dict) event_dict['venues'] = venue_list event_data.append(event_dict) return event_data
def build_repo_messages_manifest(manifest_packages, build_order, ros_distro): #Now, we go through all of our manifest packages and try to generate #messages, or add them to the pythonpath if they turn out to be catkin ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) path_string = '' build_errors = [] #Make sure to build with our special cmake file to only do message generation replace_manifest_cmake_files(manifest_packages) #Make sure to build in dependency order for name in build_order: if not name in manifest_packages or name in ['rosdoc_lite', 'catkin']: continue path = manifest_packages[name] cmake_file = os.path.join(path, 'CMakeLists.txt') if os.path.isfile(cmake_file): catkin = False messages = False services = False #Check to see whether the package is catkin or not #Also check whether we actually need to build messages #and services since rosbuild creates the build targets #no matter what with open(cmake_file, 'r') as f: read_file = f.read() if 'catkin_project' in read_file: catkin = True if 'rosbuild_genmsg' in read_file: messages = True if 'rosbuild_gensrv' in read_file: services = True #If it is catkin, then we'll do our best to put the right things on the python path #TODO: Note that this will not generate messages, we can try to put this in later #but fuerte catkin makes it kind of hard to do correctly if catkin: print("Not doing anything for catkin package") #print "Creating an export line that guesses the appropriate python paths for each package" #print "WARNING: This will not properly generate message files within this repo for python documentation." #if os.path.isdir(os.path.join(path, 'src')): # path_string = "%s:%s" %(os.path.join(path, 'src'), path_string) #If it's not catkin, then we'll generate python messages else: old_dir = os.getcwd() os.chdir(path) if not os.path.exists('build'): os.makedirs('build') os.chdir('build') ros_env['ROS_PACKAGE_PATH'] = '%s:%s' % ( path, ros_env['ROS_PACKAGE_PATH']) try: call("cmake ..", ros_env) generate_messages_dry(ros_env, name, messages, services) except BuildException as e: print( "FAILED TO CALL CMAKE ON %s, messages for this package cannot be generated." % (name)) print( "Are you sure that the package specifies its dependencies correctly?" ) print("Failure on %s, with env path %s" % (name, ros_env)) print("Exception: %s" % e) build_errors.append(name) os.chdir(old_dir) else: #If the package does not have a CmakeLists.txt file, we still want #to add it to our package path because other packages may depend on it ros_env['ROS_PACKAGE_PATH'] = '%s:%s' % ( path, ros_env['ROS_PACKAGE_PATH']) if path_string: return ("export PYTHONPATH=%s:$PYTHONPATH" % path_string, build_errors) return ("export PYTHONPATH=$PYTHONPATH", build_errors)
def testCreateTwoTags(self): call("stag", imagePath, "tag1", "tag2") graph = read_or_create_graph() self.assertEqual(Set(graph.objects(imageUri, FOAF['topic'])), Set([TAG['tag1'], TAG['tag2']]))
def testCreateImageTag(self): call("stag", imagePath, "atag") graph = read_or_create_graph() self.assertEqual(graph.value(imageUri, RDF.type), FOAF['Image'])
from common import call print call({'cmd': 'g.list.get', 'name': 'test', 'list': 'list'})
from common import call print call({'cmd': 'g.disable', 'name': 'test'})
def make(project): # relink a broken tmpfs-based obj dir if os.path.islink('obj') and not os.path.exists('obj'): os.unlink('obj') prefix = 'sg-' + os.environ.get('LOGNAME') + '-' + project.name + '-' os.symlink(tempfile.mkdtemp(prefix=prefix), 'obj') try: project.makepath = os.path.abspath(os.path.expanduser(Settings.get('make_path'))) except: project.makepath = "" try: project.makefile_params except: project.makefile_params = [] cores = multiprocessing.cpu_count() threads = int(cores * 1.5 + 0.5) # to prevent ram overusage (and thus HD hits), cap thread count to # GB of ram / 2 try: import psutil threads = int(min(threads, math.ceil(psutil.virtual_memory().total/1024/1024/1024.0)/1.5)) except ImportError: print "WARNING: psutil package not found, threaded compiles may be unstable" pass project.makefile_params += ["-j" + str(threads)] # example makefile params (add in project sg file): # makefile_params="CXX=\'clang++\'" # makefile_params="CXX=\'gccfilter -c -a g++\'" cmdline = [os.path.join(project.makepath,"make")] if project.makefile_params: cmdline += project.makefile_params try: os.chdir(project.build_dir) except: pass try: call(cmdline) except subprocess.CalledProcessError: try: if project.build_dir: os.chdir("..") except: pass return Status.FAILURE try: if project.build_dir: os.chdir("..") except: pass #os.system("%s%s" % # (os.path.join(project.makepath,"make"), # " %s" % project.makefile_params # ) #) return Status.SUCCESS
os.chdir(repo_path) with chdir(repo_path): ccall("git add {}".format(job_path)) # finally commit commit_msg = job_name commit_msg += " - Success" if job_success else " - FAILURE" if args.test and args.output: if output_missing: commit_msg += ", MISSING OUTPUT" if logs_missing: commit_msg += ", MISSING LOGS" ccall("git commit -m '{}'".format(commit_msg)) ccall("git config user.name 'Precice Bot'") ccall("git config user.email ${PRECICE_BOT_EMAIL}") ccall( "git remote set-url origin https://${GH_TOKEN}@github.com/precice/precice_st_output.git > /dev/null 2>&1" ) failed_push_count = 0 failed_push_limit = 50 # push, retry if failed while call("git push"): ccall("git pull --rebase") failed_push_count += 1 if failed_push_count >= failed_push_limit: break print("Finished pushing to {}-{}!".format(repo_folder, args.st_branch))