def __init__(self, path, raw_file_url=None, username=None, password=None, local_site_name=None): if not is_exe_in_path('git'): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError self.path = self._normalize_git_url(path) self.raw_file_url = raw_file_url self.username = username self.password = password self.local_site_name = local_site_name self.git_dir = None url_parts = urlparse.urlparse(self.path) if url_parts[0] == 'file': self.git_dir = url_parts[2] p = self._run_git(['--git-dir=%s' % self.git_dir, 'config', 'core.repositoryformatversion']) failure = p.wait() if failure: # See if we have a permissions error if not os.access(self.git_dir, os.R_OK): raise SCMError(_("Permission denied accessing the local " "Git repository '%s'") % self.git_dir) else: raise SCMError(_('Unable to retrieve information from ' 'local Git repository'))
def __init__(self, path, raw_file_url): if not is_exe_in_path('git'): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError self.path = path self.raw_file_url = raw_file_url if not raw_file_url: p = subprocess.Popen( ['git', '--git-dir=%s' % self.path, 'config', 'core.repositoryformatversion'], stderr=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=(os.name != 'nt') ) contents = p.stdout.read() errmsg = p.stderr.read() failure = p.wait() if failure: # TODO: Provide a better error if we're using a git:// # or equivalent URL. raise ImportError
def clean_tool(self): """Checks the SCMTool used for this repository for dependencies. If one or more dependencies aren't found, they will be presented as validation errors. """ tool = self.cleaned_data['tool'] scmtool_class = tool.get_scmtool_class() errors = [] for dep in scmtool_class.dependencies.get('modules', []): try: imp.find_module(dep) except ImportError: errors.append('The Python module "%s" is not installed.' 'You may need to restart the server ' 'after installing it.' % dep) for dep in scmtool_class.dependencies.get('executables', []): if not is_exe_in_path(dep): if sys.platform == 'win32': exe_name = '%s.exe' % dep else: exe_name = dep errors.append('The executable "%s" is not in the path.' % exe_name) if errors: raise forms.ValidationError(errors) return tool
def __init__(self, path, raw_file_url=None): if not is_exe_in_path('git'): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError self.path = self._normalize_git_url(path) self.raw_file_url = raw_file_url self.git_dir = None url_parts = urlparse.urlparse(self.path) if url_parts[0] == 'file': self.git_dir = url_parts[2] p = subprocess.Popen( ['git', '--git-dir=%s' % self.git_dir, 'config', 'core.repositoryformatversion'], stderr=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=(os.name != 'nt') ) failure = p.wait() if failure: raise SCMError(_('Unable to retrieve information from local ' 'Git repository'))
def test_clean_commiter_unsupported(self): """Testing UploadCommitForm.clean when committer_ fields are present for a SCMTool that doesn't support them """ if not is_exe_in_path('hg'): raise nose.SkipTest('Hg is not installed') self.repository.tool = Tool.objects.get(name='Mercurial') self.repository.save() diff = SimpleUploadedFile('diff', self.DEFAULT_GIT_FILEDIFF_DATA_DIFF, content_type='text/x-patch') form = UploadCommitForm(diffset=self.diffset, data=self._default_form_data.copy(), files={ 'diff': diff, }) self.assertTrue(form.is_valid()) self.assertNotIn('committer_date', form.cleaned_data) self.assertNotIn('committer_email', form.cleaned_data) self.assertNotIn('committer_name', form.cleaned_data)
def __init__(self, path, raw_file_url=None, username=None, password=None, encoding="", local_site_name=None): super(GitClient, self).__init__(self._normalize_git_url(path), username=username, password=password) if not is_exe_in_path("git"): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError self.raw_file_url = raw_file_url self.encoding = encoding self.local_site_name = local_site_name self.git_dir = None url_parts = urllib_urlparse(self.path) if url_parts[0] == "file": self.git_dir = url_parts[2] p = self._run_git(["--git-dir=%s" % self.git_dir, "config", "core.repositoryformatversion"]) failure = p.wait() if failure: # See if we have a permissions error if not os.access(self.git_dir, os.R_OK): raise SCMError(_("Permission denied accessing the local " "Git repository '%s'") % self.git_dir) else: raise SCMError(_("Unable to retrieve information from " "local Git repository"))
def clean_tool(self): """ Checks the SCMTool used for this repository for dependencies. If one or more dependencies aren't found, they will be presented as validation errors. """ tool = self.cleaned_data['tool'] scmtool_class = tool.get_scmtool_class() errors = [] for dep in scmtool_class.dependencies.get('modules', []): try: imp.find_module(dep) except ImportError: errors.append('The Python module "%s" is not installed.' 'You may need to restart the server ' 'after installing it.' % dep) for dep in scmtool_class.dependencies.get('executables', []): if not is_exe_in_path(dep): if sys.platform == 'win32': exe_name = '%s.exe' % dep else: exe_name = dep errors.append('The executable "%s" is not in the path.' % exe_name) if errors: raise forms.ValidationError(errors) return tool
def clean_tool(self): """Checks the SCMTool used for this repository for dependencies. If one or more dependencies aren't found, they will be presented as validation errors. """ tool = self.cleaned_data["tool"] scmtool_class = tool.get_scmtool_class() errors = [] for dep in scmtool_class.dependencies.get("modules", []): if not has_module(dep): errors.append( _( 'The Python module "%s" is not installed. ' "You may need to restart the server " "after installing it." ) % dep ) for dep in scmtool_class.dependencies.get("executables", []): if not is_exe_in_path(dep): if sys.platform == "win32": exe_name = "%s.exe" % dep else: exe_name = dep errors.append(_('The executable "%s" is not in the path.') % exe_name) if errors: raise ValidationError(errors) return tool
def setUp(self): super(PerforceStunnelTests, self).setUp() if not is_exe_in_path('stunnel'): raise nose.SkipTest('stunnel is not installed') cert = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'stunnel.pem') self.proxy = STunnelProxy('public.perforce.com:1666') self.proxy.start_server(cert) # Find an available port to listen on path = 'stunnel:localhost:%d' % self.proxy.port self.repository = Repository(name='Perforce.com - secure', path=path, username='******', encoding='none', tool=Tool.objects.get(name='Perforce')) try: self.tool = self.repository.get_scmtool() self.tool.use_stunnel = True except ImportError: raise nose.SkipTest('perforce/p4python is not installed')
def npm_install(self, package_spec): """Install a package via npm. This will first determine if npm is available, and then attempt to install the given package. Args: package_spec (unicode): The package specification (name and optional version range) to install. Raises: distutils.errors.DistutilsExecError: :command:`npm` could not be found, or there was an error installing the package. """ if not hasattr(self, '_checked_npm'): if not is_exe_in_path('npm'): raise DistutilsExecError( 'Unable to locate npm in the path, which is needed to ' 'install %s. Static media cannot be built.' % package_spec) self._checked_npm = True if not os.path.exists('node_modules'): os.mkdir('node_modules', 0755) print 'Installing %s...' % package_spec result = os.system('npm install %s' % package_spec) if result != 0: raise DistutilsExecError('Installation of %s failed.' % package_spec)
def __init__(self, path, username, password, encoding='', host=None, client_name=None, local_site_name=None, use_ticket_auth=False): """Initialize the client. Args: path (unicode): The path to the repository (equivalent to :envvar:`P4PORT`). username (unicode): The username for the connection. password (unicode): The password for the connection. encoding (unicode, optional): The encoding to use for the connection. host (unicode, optional): The client's host name to use for the connection (equivalent to :envvar:`P4HOST`). client_name (unicode, optional): The name of the Perforce client (equivalent to :envvar:`P4CLIENT`). local_site_name (unicode, optional): The name of the local site used for the repository. use_ticket_auth (bool, optional): Whether to use ticket-based authentication. By default, this is not used. """ if path.startswith('stunnel:'): path = path[8:] self.use_stunnel = True else: self.use_stunnel = False self.p4port = path self.username = username self.password = password or '' self.encoding = encoding self.p4host = host self.client_name = client_name self.local_site_name = local_site_name self.use_ticket_auth = use_ticket_auth import P4 self.p4 = P4.P4() if self.use_stunnel and not is_exe_in_path('stunnel'): raise AttributeError('stunnel proxy was requested, but stunnel ' 'binary is not in the exec path.')
def setUp(self): super(BasePerforceTestCase, self).setUp() if P4 is None: raise nose.SkipTest('The p4python module is not installed') if not is_exe_in_path('p4'): raise nose.SkipTest('The p4 command line tool is not installed')
def test_create_with_parser_get_orig_commit_id(self): """Testing UploadDiffForm.create uses correct base revision returned by DiffParser.get_orig_commit_id """ if not is_exe_in_path('hg'): raise nose.SkipTest('Hg is not installed') diff = ( b'# Node ID a6fc203fee9091ff9739c9c00cd4a6694e023f48\n' b'# Parent 7c4735ef51a7c665b5654f1a111ae430ce84ebbd\n' b'diff --git a/doc/readme b/doc/readme\n' b'--- a/doc/readme\n' b'+++ b/doc/readme\n' b'@@ -1,3 +1,3 @@\n' b' Hello\n' b'-\n' b'+...\n' b' goodbye\n' ) parent_diff = ( b'# Node ID 7c4735ef51a7c665b5654f1a111ae430ce84ebbd\n' b'# Parent 661e5dd3c4938ecbe8f77e2fdfa905d70485f94c\n' b'diff --git a/doc/newfile b/doc/newfile\n' b'new file mode 100644\n' b'--- /dev/null\n' b'+++ b/doc/newfile\n' b'@@ -0,0 +1,1 @@\n' b'+Lorem ipsum\n' ) diff_file = SimpleUploadedFile('diff', diff, content_type='text/x-patch') parent_diff_file = SimpleUploadedFile('parent_diff', parent_diff, content_type='text/x-patch') repository = Repository.objects.create( name='Test HG', path='scmtools/testdata/hg_repo', tool=Tool.objects.get(name='Mercurial')) form = UploadDiffForm( repository=repository, files={ 'path': diff_file, 'parent_diff_path': parent_diff_file, }) self.assertTrue(form.is_valid()) diffset = form.create() self.assertEqual(diffset.files.count(), 1) filediff = diffset.files.get() self.assertEqual(filediff.source_revision, '7c4735ef51a7c665b5654f1a111ae430ce84ebbd') self.assertEqual(filediff.extra_data.get('parent_source_revision'), '661e5dd3c4938ecbe8f77e2fdfa905d70485f94c')
def __init__(self, repository, reponame, hostname, port): if not is_exe_in_path('cm'): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError self.reponame = reponame self.hostname = hostname self.port = port
def __init__(self, mode, target): if not is_exe_in_path('stunnel'): raise OSError('stunnel was not found in the exec path') if mode not in (STUNNEL_SERVER, STUNNEL_CLIENT): raise AttributeError self.mode = mode self.target = target self.pid = None
def __init__(self, repository, path): self.tempdir = "" self.currentdir = os.getcwd() self.repository = repository self.path = path if not is_exe_in_path('cvs'): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError
def __init__(self, path): if not is_exe_in_path('mtn'): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError self.path = path if not os.path.isfile(self.path): raise SCMError("Repository %s does not exist" % path)
def __init__(self, cvsroot, path, local_site_name): self.tempdir = "" self.currentdir = os.getcwd() self.cvsroot = cvsroot self.path = path self.local_site_name = local_site_name if not is_exe_in_path('cvs'): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError
def __init__(self, target): """Initialize the proxy. Args: target (unicode): The target server to proxy to. """ if not is_exe_in_path('stunnel'): raise OSError('stunnel was not found in the exec path') self.target = target self.pid = None
def __init__(self, p4port, username, password, use_stunnel=False): self.p4port = p4port self.username = username self.password = password self.use_stunnel = use_stunnel self.proxy = None import P4 self.p4 = P4.P4() if use_stunnel and not is_exe_in_path('stunnel'): raise AttributeError('stunnel proxy was requested, but stunnel ' 'binary is not in the exec path.')
def run(self): import os import sys from django.core.management.commands.compilemessages import \ compile_messages from djblets.util.filesystem import is_exe_in_path if not is_exe_in_path('msgfmt'): raise RuntimeError('Could not find the "msgfmt" binary.') cwd = os.getcwd() os.chdir(os.path.realpath('reviewboard')) compile_messages(stderr=sys.stderr) os.chdir(cwd)
def __init__(self, repository): super(HgTool, self).__init__(repository) if repository.path.startswith('http'): credentials = repository.get_credentials() self.client = HgWebClient(repository.path, credentials['username'], credentials['password']) else: if not is_exe_in_path('hg'): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError self.client = HgClient(repository.path, repository.local_site)
def __init__(self, p4port, username, password, encoding, use_stunnel=False, use_ticket_auth=False): self.p4port = p4port self.username = username self.password = password self.encoding = encoding self.use_stunnel = use_stunnel self.use_ticket_auth = use_ticket_auth self.proxy = None import P4 self.p4 = P4.P4() if use_stunnel and not is_exe_in_path("stunnel"): raise AttributeError("stunnel proxy was requested, but stunnel " "binary is not in the exec path.")
def __init__(self, repository): super(HgTool, self).__init__(repository) if not is_exe_in_path('hg'): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError if repository.path.startswith('http'): credentials = repository.get_credentials() self.client = HgWebClient(repository.path, credentials['username'], credentials['password']) else: self.client = HgClient(repository.path, repository.local_site)
def guess_mimetype(uploaded_file): """Guess the mimetype of an uploaded file. Uploaded files don't necessarily have valid mimetypes provided, so attempt to guess them when they're blank. This only works if `file` is in the path. If it's not, or guessing fails, we fall back to a mimetype of :mimetype:`application/octet-stream`. Args: uploaded_file (django.core.files.File): The uploaded file object. Returns: unicode: The guessed mimetype. """ if not is_exe_in_path('file'): return DEFAULT_MIMETYPE # The browser didn't know what this was, so we'll need to do # some guess work. If we have 'file' available, use that to # figure it out. p = subprocess.Popen(['file', '--mime-type', '-b', '-'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) # Write the content from the file until file has enough data to # make a determination. for chunk in uploaded_file.chunks(): try: p.stdin.write(chunk) except IOError: # file closed, so we hopefully have an answer. break p.stdin.close() ret = p.wait() if ret == 0: mimetype = p.stdout.read().strip() # Reset the read position so we can properly save this. uploaded_file.seek(0) return mimetype or DEFAULT_MIMETYPE
def setUp(self): super(BZRTests, self).setUp() if not is_exe_in_path('bzr'): raise nose.SkipTest() self.bzr_repo_path = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'bzr_repo') self.bzr_ssh_path = ('bzr+ssh://localhost/%s' % self.bzr_repo_path.replace('\\', '/')) self.bzr_sftp_path = ('sftp://localhost/%s' % self.bzr_repo_path.replace('\\', '/')) self.repository = Repository(name='Bazaar', path='file://' + self.bzr_repo_path, tool=Tool.objects.get(name='Bazaar')) self.tool = self.repository.get_scmtool()
def __init__(self, path, raw_file_url=None, username=None, password=None, encoding='', local_site_name=None): super(GitClient, self).__init__(self._normalize_git_url(path), username=username, password=password) if not is_exe_in_path('git'): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError self.raw_file_url = raw_file_url self.encoding = encoding self.local_site_name = local_site_name self.git_dir = None url_parts = urllib_urlparse(self.path) if url_parts[0] == 'file': if platform.system() == "Windows": # Windows requires drive letter (e.g. C:/) self.git_dir = url_parts[1] + url_parts[2] else: self.git_dir = url_parts[2] p = self._run_git([ '--git-dir=%s' % self.git_dir, 'config', 'core.repositoryformatversion' ]) failure = p.wait() if failure: # See if we have a permissions error if not os.access(self.git_dir, os.R_OK): raise SCMError( _("Permission denied accessing the local " "Git repository '%s'") % self.git_dir) else: raise SCMError( _('Unable to retrieve information from ' 'local Git repository'))
def __init__(self, path, username, password, encoding='', host=None, client_name=None, use_stunnel=False, use_ticket_auth=False): self.p4port = path self.username = username self.password = password self.encoding = encoding self.p4host = host self.client_name = client_name self.use_stunnel = use_stunnel self.use_ticket_auth = use_ticket_auth self.proxy = None import P4 self.p4 = P4.P4() if use_stunnel and not is_exe_in_path('stunnel'): raise AttributeError('stunnel proxy was requested, but stunnel ' 'binary is not in the exec path.')
def npm_install(self, package_spec=None): """Install a package via npm. This will first determine if npm is available, and then attempt to install the given package. Args: package_spec (unicode, optional): The package specification (name and optional version range) to install. If not specified, this will use the default behavior of reading :file:`package.json`. Raises: distutils.errors.DistutilsExecError: :command:`npm` could not be found, or there was an error installing the package. """ if not hasattr(self, '_checked_npm'): if not is_exe_in_path('npm'): raise DistutilsExecError( 'Unable to locate npm in the path, which is needed to ' 'install %s. Static media cannot be built.' % package_spec) self._checked_npm = True if package_spec: # Ensure there's a node_modules directory here, so it doesn't # install higher up in the directory hierarchy. if not os.path.exists('node_modules'): os.mkdir('node_modules', 0o755) print('Installing %s...' % package_spec) result = os.system('npm install %s' % package_spec) else: print('Installing node packages...') result = os.system('npm install') if result != 0: raise DistutilsExecError('Installation from npm failed.')
def _guess_mimetype(self, file): """Guess the mimetype of an uploaded file. Uploaded files don't necessarily have valid mimetypes provided, so attempt to guess them when they're blank. This only works if `file` is in the path. If it's not, or guessing fails, we fall back to a mimetype of application/octet-stream. """ if not is_exe_in_path("file"): return self.DEFAULT_MIMETYPE # The browser didn't know what this was, so we'll need to do # some guess work. If we have 'file' available, use that to # figure it out. p = subprocess.Popen( ["file", "--mime-type", "-b", "-"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE ) # Write the content from the file until file has enough data to # make a determination. for chunk in file.chunks(): try: p.stdin.write(chunk) except IOError: # file closed, so we hopefully have an answer. break p.stdin.close() ret = p.wait() if ret == 0: mimetype = p.stdout.read().strip() else: mimetype = None # Reset the read position so we can properly save this. file.seek(0) return mimetype or self.DEFAULT_MIMETYPE
def __init__(self, path, raw_file_url=None, username=None, password=None, encoding='', local_site_name=None): super(GitClient, self).__init__(self._normalize_git_url(path), username=username, password=password) if not is_exe_in_path('git'): # This is technically not the right kind of error, but it's the # pattern we use with all the other tools. raise ImportError self.raw_file_url = raw_file_url self.encoding = encoding self.local_site_name = local_site_name self.git_dir = None url_parts = urllib_urlparse(self.path) if url_parts[0] == 'file': if platform.system() == "Windows": # Windows requires drive letter (e.g. C:/) self.git_dir = url_parts[1] + url_parts[2] else: self.git_dir = url_parts[2] p = self._run_git(['--git-dir=%s' % self.git_dir, 'config', 'core.repositoryformatversion']) failure = p.wait() if failure: # See if we have a permissions error if not os.access(self.git_dir, os.R_OK): raise SCMError(_("Permission denied accessing the local " "Git repository '%s'") % self.git_dir) else: raise SCMError(_('Unable to retrieve information from ' 'local Git repository'))
def get_bzr_exe(): """Return the name of the executable used to run Bazaar/Breezy. If :command:`brz` is in :envvar:`PATH`, then ``brz`` will be returned. Otherwise, ``bzr`` will be returned, even if not found on the system. Version Added: 4.0.7 Returns: unicode: The name of the executable to run. """ global _bzr_exe if _bzr_exe is None: if is_exe_in_path('brz'): # Breezy is installed, so we'll prefer that. _bzr_exe = 'brz' else: # Fall back to Bazaar, whether it's installed or not. _bzr_exe = 'bzr' return _bzr_exe
def check_updates_required(): """Checks if there are manual updates required. Sometimes, especially in developer installs, some things need to be tweaked by hand before Review Board can be used on this server. """ global _updates_required global _install_fine if not _updates_required and not _install_fine: # Check if the site has moved and the old media directory no longer # exists. if not os.path.exists(settings.MEDIA_ROOT): new_media_root = os.path.join(settings.HTDOCS_ROOT, "media") if os.path.exists(new_media_root): from djblets.siteconfig.models import SiteConfiguration siteconfig = SiteConfiguration.objects.get_current() siteconfig.set("site_media_root", new_media_root) settings.MEDIA_ROOT = new_media_root # Check if there's a media/uploaded/images directory. If not, this is # either a new install or is using the old-style media setup and needs # to be manually upgraded. uploaded_dir = os.path.join(settings.MEDIA_ROOT, "uploaded") if not os.path.isdir(uploaded_dir) or \ not os.path.isdir(os.path.join(uploaded_dir, "images")): _updates_required.append(( "admin/manual-updates/media-upload-dir.html", { 'MEDIA_ROOT': settings.MEDIA_ROOT } )) try: from reviewboard.changedescs.models import ChangeDescription ChangeDescription.objects.count() except: # We were unable to load this, so it's likely that the user # hasn't run syncdb yet. _updates_required.append(( "admin/manual-updates/run-syncdb.html", {} )) if not is_exe_in_path('patch'): if sys.platform == 'win32': binaryname = 'patch.exe' else: binaryname = 'patch' _updates_required.append(( "admin/manual-updates/install-patch.html", { 'platform': sys.platform, 'binaryname': binaryname, 'search_path': os.getenv('PATH'), } )) # # NOTE: Add new checks above this. # _install_fine = not _updates_required return _updates_required
def check_dependencies(settings): # Some of our checks require access to django.conf.settings, so # tell Django about our settings. # from djblets.util.filesystem import is_exe_in_path from reviewboard.admin.import_utils import has_module dependency_error = settings.dependency_error # Python 2.6 if sys.version_info[0] < 2 or \ (sys.version_info[0] == 2 and sys.version_info[1] < 6): dependency_error('Python 2.6 or newer is required.') # django-evolution if not has_module('django_evolution'): dependency_error("django_evolution is required.\n" "http://code.google.com/p/django-evolution/") # PIL if not has_module('PIL') and not has_module('Image'): dependency_error('The Python Imaging Library (Pillow or PIL) ' 'is required.') # The following checks are non-fatal warnings, since these dependencies are # merely recommended, not required. def dependency_warning(string): sys.stderr.write('Warning: %s\n' % string) global warnings_found warnings_found += 1 if not has_module('pysvn') and not has_module('subvertpy'): dependency_warning('Neither subvertpy nor pysvn found. ' 'SVN integration will not work.') if has_module('P4'): try: subprocess.call(['p4', '-h'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) except OSError: dependency_error('p4 command not found. Perforce integration ' 'will not work.') else: dependency_warning('p4python (>=07.3) not found. Perforce integration ' 'will not work.') if not has_module('mercurial'): dependency_warning('hg not found. Mercurial integration will not ' 'work.') if not has_module('bzrlib'): dependency_warning('bzrlib not found. Bazaar integration will not ' 'work.') if not is_exe_in_path('cvs'): dependency_warning('cvs binary not found. CVS integration ' 'will not work.') if not is_exe_in_path('git'): dependency_warning('git binary not found. Git integration ' 'will not work.') if not is_exe_in_path('mtn'): dependency_warning('mtn binary not found. Monotone integration ' 'will not work.') # Django will print warnings/errors for database backend modules and flup # if the configuration requires it. if warnings_found: sys.stderr.write(settings.install_help) sys.stderr.write('\n\n')
class UploadDiffFormTests(SpyAgency, TestCase): """Unit tests for UploadDiffForm.""" fixtures = ['test_scmtools'] def test_create(self): """Testing UploadDiffForm.create""" diff_file = SimpleUploadedFile('diff', self.DEFAULT_GIT_FILEDIFF_DATA_DIFF, content_type='text/x-patch') repository = self.create_repository(tool_name='Test') self.spy_on(repository.get_file_exists, call_fake=lambda *args, **kwargs: True) form = UploadDiffForm(repository=repository, data={ 'basedir': '/', 'base_commit_id': '1234', }, files={ 'path': diff_file, }) self.assertTrue(form.is_valid()) diffset = form.create() self.assertEqual(diffset.files.count(), 1) self.assertEqual(diffset.basedir, '/') self.assertEqual(diffset.base_commit_id, '1234') def test_create_filters_parent_diffs(self): """Testing UploadDiffForm.create filters parent diff files""" saw_file_exists = {} def get_file_exists(repository, filename, revision, *args, **kwargs): saw_file_exists[(filename, revision)] = True return True parent_diff_1 = (b'diff --git a/README b/README\n' b'index d6613f4..5b50865 100644\n' b'--- README\n' b'+++ README\n' b'@@ -2 +2 @@\n' b'-blah..\n' b'+blah blah\n') parent_diff_2 = (b'diff --git a/UNUSED b/UNUSED\n' b'index 1234567..5b50866 100644\n' b'--- UNUSED\n' b'+++ UNUSED\n' b'@@ -1,1 +1,1 @@\n' b'-foo\n' b'+bar\n') parent_diff = parent_diff_1 + parent_diff_2 diff_file = SimpleUploadedFile('diff', self.DEFAULT_GIT_FILEDIFF_DATA_DIFF, content_type='text/x-patch') parent_diff_file = SimpleUploadedFile('parent_diff', parent_diff, content_type='text/x-patch') repository = self.create_repository(tool_name='Test') self.spy_on(repository.get_file_exists, call_fake=get_file_exists) form = UploadDiffForm(repository=repository, data={ 'basedir': '/', }, files={ 'path': diff_file, 'parent_diff_path': parent_diff_file, }) self.assertTrue(form.is_valid()) diffset = form.create() self.assertEqual(diffset.files.count(), 1) filediff = diffset.files.get() self.assertEqual(filediff.diff, self.DEFAULT_GIT_FILEDIFF_DATA_DIFF) self.assertEqual(filediff.parent_diff, parent_diff_1) self.assertIn(('/README', 'd6613f4'), saw_file_exists) self.assertNotIn(('/UNUSED', '1234567'), saw_file_exists) self.assertEqual(len(saw_file_exists), 1) @unittest.skipIf(not is_exe_in_path('hg'), 'Hg is not installed') def test_create_with_parser_get_orig_commit_id(self): """Testing UploadDiffForm.create uses correct base revision returned by DiffParser.get_orig_commit_id """ diff = (b'# Node ID a6fc203fee9091ff9739c9c00cd4a6694e023f48\n' b'# Parent 7c4735ef51a7c665b5654f1a111ae430ce84ebbd\n' b'diff --git a/doc/readme b/doc/readme\n' b'--- a/doc/readme\n' b'+++ b/doc/readme\n' b'@@ -1,3 +1,3 @@\n' b' Hello\n' b'-\n' b'+...\n' b' goodbye\n') parent_diff = (b'# Node ID 7c4735ef51a7c665b5654f1a111ae430ce84ebbd\n' b'# Parent 661e5dd3c4938ecbe8f77e2fdfa905d70485f94c\n' b'diff --git a/doc/newfile b/doc/newfile\n' b'new file mode 100644\n' b'--- /dev/null\n' b'+++ b/doc/newfile\n' b'@@ -0,0 +1,1 @@\n' b'+Lorem ipsum\n') diff_file = SimpleUploadedFile('diff', diff, content_type='text/x-patch') parent_diff_file = SimpleUploadedFile('parent_diff', parent_diff, content_type='text/x-patch') repository = Repository.objects.create( name='Test HG', path='scmtools/testdata/hg_repo', tool=Tool.objects.get(name='Mercurial')) form = UploadDiffForm(repository=repository, files={ 'path': diff_file, 'parent_diff_path': parent_diff_file, }) self.assertTrue(form.is_valid()) diffset = form.create() self.assertEqual(diffset.files.count(), 1) filediff = diffset.files.get() self.assertEqual(filediff.source_revision, '7c4735ef51a7c665b5654f1a111ae430ce84ebbd') self.assertEqual(filediff.extra_data.get('parent_source_revision'), '661e5dd3c4938ecbe8f77e2fdfa905d70485f94c') def test_create_with_parent_filediff_with_move_and_no_change(self): """Testing UploadDiffForm.create with a parent diff consisting only of a move/rename without content change """ revisions = [ b'93e6b3e8944c48737cb11a1e52b046fa30aea7a9', b'4839fc480f47ca59cf05a9c39410ea744d1e17a2', ] parent_diff = SimpleUploadedFile('parent_diff', (b'diff --git a/foo b/bar\n' b'similarity index 100%%\n' b'rename from foo\n' b'rename to bar\n'), content_type='text/x-patch') diff = SimpleUploadedFile('diff', (b'diff --git a/bar b/bar\n' b'index %s..%s 100644\n' b'--- a/bar\n' b'+++ b/bar\n' b'@@ -1,2 +1,3 @@\n' b' Foo\n' b'+Bar\n') % (revisions[0], revisions[1]), content_type='text/x-patch') repository = self.create_repository(tool_name='Test') self.spy_on(repository.get_file_exists, call_fake=lambda *args, **kwargs: True) # We will only be making one call to get_file and we can fake it out. self.spy_on(repository.get_file, call_fake=lambda *args, **kwargs: b'Foo\n') self.spy_on(patch) form = UploadDiffForm(repository=repository, data={ 'basedir': '/', }, files={ 'path': diff, 'parent_diff_path': parent_diff, }) self.assertTrue(form.is_valid()) diffset = form.create() self.assertEqual(diffset.files.count(), 1) f = diffset.files.get() self.assertEqual(f.source_revision, revisions[0].decode('utf-8')) self.assertEqual(f.dest_detail, revisions[1].decode('utf-8')) # We shouldn't call out to patch because the parent diff is just a # rename. original_file = get_original_file(filediff=f, request=None, encoding_list=['ascii']) self.assertEqual(original_file, b'Foo\n') self.assertFalse(patch.spy.called) patched_file = get_patched_file(source_data=original_file, filediff=f) self.assertEqual(patched_file, b'Foo\nBar\n') self.assertTrue(patch.spy.called) def test_create_with_parent_filediff_with_move_and_change(self): """Testing UploadDiffForm.create with a parent diff consisting of a move/rename with content change """ revisions = [ b'5d36b88bb697a2d778f024048bafabd443d74503', b'9b32edcd37a88c6ada91efc562afa637ccfdad36', b'8a567d328293f85d68332bc693b0a98869b23b47', ] parent_diff = SimpleUploadedFile('parent_diff', (b'diff --git a/foo b/bar\n' b'similarity index 55%%\n' b'rename from foo\n' b'rename to bar\n' b'index %s..%s 100644\n' b'--- a/foo\n' b'+++ b/bar\n' b'@@ -1,2 +1,3 @@\n' b' Foo\n' b'+Bar\n') % (revisions[0], revisions[1]), content_type='text/x-patch') diff = SimpleUploadedFile('diff', (b'diff --git a/bar b/bar\n' b'index %s..%s 100644\n' b'--- a/bar\n' b'+++ b/bar\n' b'@@ -1,3 +1,4 @@\n' b' Foo\n' b' Bar\n' b'+Baz\n') % (revisions[1], revisions[2]), content_type='text/x-patch') repository = self.create_repository(tool_name='Test') self.spy_on(repository.get_file_exists, call_fake=lambda *args, **kwargs: True) # We will only be making one call to get_file and we can fake it out. self.spy_on(repository.get_file, call_fake=lambda *args, **kwargs: b'Foo\n') self.spy_on(patch) form = UploadDiffForm(repository=repository, data={ 'basedir': '/', }, files={ 'path': diff, 'parent_diff_path': parent_diff, }) self.assertTrue(form.is_valid()) diffset = form.create() self.assertEqual(diffset.files.count(), 1) filediff = diffset.files.get() self.assertEqual(filediff.source_file, 'bar') self.assertEqual(filediff.dest_file, 'bar') self.assertEqual(filediff.source_revision, revisions[1].decode('utf-8')) self.assertEqual(filediff.dest_detail, revisions[2].decode('utf-8')) self.assertEqual( filediff.extra_data, { '__parent_diff_empty': False, 'is_symlink': False, 'new_unix_mode': '100644', 'old_unix_mode': '100644', 'parent_moved': True, 'parent_source_filename': '/foo', 'parent_source_revision': revisions[0].decode('utf-8'), 'raw_delete_count': 0, 'raw_insert_count': 1, }) original_file = get_original_file(filediff=filediff, request=None, encoding_list=['ascii']) self.assertEqual(original_file, b'Foo\nBar\n') self.assertTrue(patch.spy.called) patched_file = get_patched_file(source_data=original_file, filediff=filediff) self.assertEqual(patched_file, b'Foo\nBar\nBaz\n') self.assertEqual(len(patch.spy.calls), 2) def test_create_missing_basedir(self): """Testing UploadDiffForm with a missing basedir field that is required """ repository = self.create_repository(tool_name='Test') scmtool = repository.get_scmtool() self.spy_on(repository.get_file_exists, call_fake=lambda *args, **kwargs: True) revisions = [ b'93e6b3e8944c48737cb11a1e52b046fa30aea7a9', b'4839fc480f47ca59cf05a9c39410ea744d1e17a2', ] diff = SimpleUploadedFile('diff', (b'diff --git a/bar b/bar\n' b'index %s..%s 100644\n' b'--- a/bar\n' b'+++ b/bar\n' b'@@ -1,2 +1,3 @@\n' b' Foo\n' b'+Bar\n') % (revisions[0], revisions[1]), content_type='text/x-patch') try: orig_use_abs_paths = scmtool.diffs_use_absolute_paths scmtool.diffs_use_absolute_paths = True form = UploadDiffForm(repository=repository, files={ 'path': diff, }) self.assertFalse(form.is_valid()) finally: scmtool.diffs_use_absolute_paths = orig_use_abs_paths self.assertIn('basedir', form.errors) self.assertIn('This field is required.', form.errors['basedir']) def test_create_with_parent_filediff_with_new_file(self): """Testing UploadDiffForm.create with a parent diff consisting of a newly-introduced file """ revisions = [ b'0000000000000000000000000000000000000000', b'9b32edcd37a88c6ada91efc562afa637ccfdad36', b'8a567d328293f85d68332bc693b0a98869b23b47', ] parent_diff = SimpleUploadedFile('parent_diff', (b'diff --git a/foo b/foo\n' b'new file mode 100644\n' b'index %s..%s\n' b'--- /dev/null\n' b'+++ b/foo\n' b'@@ -0,0 +1,2 @@\n' b'+Foo\n' b'+Bar\n') % (revisions[0], revisions[1]), content_type='text/x-patch') diff = SimpleUploadedFile('diff', (b'diff --git a/foo b/foo\n' b'index %s..%s 100644\n' b'--- a/foo\n' b'+++ b/foo\n' b'@@ -1,3 +1,4 @@\n' b' Foo\n' b' Bar\n' b'+Baz\n') % (revisions[1], revisions[2]), content_type='text/x-patch') repository = self.create_repository(tool_name='Test') self.spy_on(repository.get_file_exists, call_fake=lambda *args, **kwargs: True) # We will only be making one call to get_file and we can fake it out. self.spy_on(repository.get_file, call_fake=lambda *args, **kwargs: b'Foo\n') self.spy_on(patch) form = UploadDiffForm(repository=repository, data={ 'basedir': '/', }, files={ 'parent_diff_path': parent_diff, 'path': diff, }) self.assertTrue(form.is_valid()) diffset = form.create() self.assertEqual(diffset.files.count(), 1) filediff = diffset.files.get() self.assertEqual(filediff.source_file, 'foo') self.assertEqual(filediff.dest_file, 'foo') self.assertEqual(filediff.source_revision, revisions[1].decode('utf-8')) self.assertEqual(filediff.dest_detail, revisions[2].decode('utf-8')) self.assertEqual( filediff.extra_data, { '__parent_diff_empty': False, 'is_symlink': False, 'new_unix_mode': '100644', 'old_unix_mode': '100644', 'parent_source_filename': '/foo', 'parent_source_revision': 'PRE-CREATION', 'raw_delete_count': 0, 'raw_insert_count': 1, }) # Double-check the types. self.assertIsInstance(filediff.extra_data['parent_source_filename'], str) self.assertIsInstance(filediff.extra_data['parent_source_revision'], str) original_file = get_original_file(filediff=filediff, request=None, encoding_list=['ascii']) self.assertEqual(original_file, b'Foo\nBar\n') self.assertSpyCalled(patch) patched_file = get_patched_file(source_data=original_file, filediff=filediff) self.assertEqual(patched_file, b'Foo\nBar\nBaz\n') self.assertEqual(len(patch.calls), 2)
def check_updates_required(): """Checks if there are manual updates required. Sometimes, especially in developer installs, some things need to be tweaked by hand before Review Board can be used on this server. """ global _install_fine updates_required = [] if not _install_fine: site_dir = os.path.dirname(settings.HTDOCS_ROOT) devel_install = (os.path.exists(os.path.join(settings.LOCAL_ROOT, 'manage.py'))) siteconfig = None # Check if we can access a SiteConfiguration. There should always # be one, unless the user has erased stuff by hand. # # This also checks for any sort of errors in talking to the database. # This could be due to the database being down, or corrupt, or # tables locked, or an empty database, or other cases. We want to # catch this before getting the point where plain 500 Internal Server # Errors appear. try: siteconfig = SiteConfiguration.objects.get_current() except (DatabaseError, SiteConfiguration.DoesNotExist), e: updates_required.append(( 'admin/manual-updates/database-error.html', { 'error': e, } )) # Check if the version running matches the last stored version. # Only do this for non-debug installs, as it's really annoying on # a developer install.: cur_version = get_version_string() if siteconfig and siteconfig.version != cur_version: updates_required.append(( 'admin/manual-updates/version-mismatch.html', { 'current_version': cur_version, 'stored_version': siteconfig.version, 'site_dir': site_dir, 'devel_install': devel_install, } )) # Check if the site has moved and the old media directory no longer # exists. if siteconfig and not os.path.exists(settings.STATIC_ROOT): new_media_root = os.path.join(settings.HTDOCS_ROOT, "static") if os.path.exists(new_media_root): siteconfig.set("site_media_root", new_media_root) settings.STATIC_ROOT = new_media_root # Check if there's a media/uploaded/images directory. If not, this is # either a new install or is using the old-style media setup and needs # to be manually upgraded. uploaded_dir = os.path.join(settings.MEDIA_ROOT, "uploaded") if not os.path.isdir(uploaded_dir) or \ not os.path.isdir(os.path.join(uploaded_dir, "images")): updates_required.append(( "admin/manual-updates/media-upload-dir.html", { 'MEDIA_ROOT': settings.MEDIA_ROOT } )) try: username = getpass.getuser() except ImportError: # This will happen if running on Windows (which doesn't have # the pwd module) and if %LOGNAME%, %USER%, %LNAME% and # %USERNAME% are all undefined. username = "******" # Check if the data directory (should be $HOME) is writable by us. data_dir = os.environ.get('HOME', '') if (not data_dir or not os.path.isdir(data_dir) or not os.access(data_dir, os.W_OK)): try: username = getpass.getuser() except ImportError: # This will happen if running on Windows (which doesn't have # the pwd module) and if %LOGNAME%, %USER%, %LNAME% and # %USERNAME% are all undefined. username = "******" updates_required.append(( 'admin/manual-updates/data-dir.html', { 'data_dir': data_dir, 'writable': os.access(data_dir, os.W_OK), 'server_user': username, 'expected_data_dir': os.path.join(site_dir, 'data'), } )) # Check if the htdocs/media/ext directory is writable by us. ext_dir = settings.EXTENSIONS_STATIC_ROOT if not os.path.isdir(ext_dir) or not os.access(ext_dir, os.W_OK): updates_required.append(( 'admin/manual-updates/ext-dir.html', { 'ext_dir': ext_dir, 'writable': os.access(ext_dir, os.W_OK), 'server_user': username, } )) if not is_exe_in_path('patch'): if sys.platform == 'win32': binaryname = 'patch.exe' else: binaryname = 'patch' updates_required.append(( "admin/manual-updates/install-patch.html", { 'platform': sys.platform, 'binaryname': binaryname, 'search_path': os.getenv('PATH'), } )) # # NOTE: Add new checks above this. # _install_fine = not updates_required
#!/usr/bin/env python from __future__ import unicode_literals import os import sys from django.core.management.commands.compilemessages import compile_messages from djblets.util.filesystem import is_exe_in_path if __name__ == '__main__': if not is_exe_in_path('msgfmt'): raise RuntimeError('Could not find the "msgfmt" binary.') cwd = os.getcwd() os.chdir(os.path.realpath('djblets')) compile_messages(stderr=sys.stderr) os.chdir(cwd)
def check_dependencies(): # Some of our checks require access to django.conf.settings, so # tell Django about our settings. # # This must go before the imports. setup_environ(settings) from django.template.defaultfilters import striptags from djblets.util.filesystem import is_exe_in_path from reviewboard.admin import checks from settings import dependency_error # Python 2.4 if sys.version_info[0] < 2 or \ (sys.version_info[0] == 2 and sys.version_info[1] < 4): dependency_error('Python 2.4 or newer is required.') # Django 1.0 try: # Django 1.0 final has VERSION (1, 0, "final"). # All subsequent versions have a 5-tuple, e.g. (1, 1, 0, "alpha", 0). import django if not (django.VERSION == (1, 0, "final") or (len(django.VERSION) == 5 and django.VERSION[1] >= 0)): raise ImportError except ImportError: dependency_error("Django 1.0 or newer is required.") # django-evolution try: imp.find_module('django_evolution') except ImportError: dependency_error("django_evolution is required.\n" "http://code.google.com/p/django-evolution/") # PIL try: imp.find_module('PIL') except ImportError: dependency_error('The Python Imaging Library (PIL) is required.') # ReCaptcha try: import recaptcha except ImportError: dependency_error('The recaptcha python module is required.') import subprocess # The following checks are non-fatal warnings, since these dependencies are # merely recommended, not required. def dependency_warning(string): sys.stderr.write('Warning: %s\n' % string) global warnings_found warnings_found += 1 try: imp.find_module('pysvn') except ImportError: dependency_warning('pysvn not found. SVN integration will not work.') try: imp.find_module('P4') subprocess.call(['p4', '-h'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) except ImportError: dependency_warning('p4python (>=07.3) not found. Perforce integration will not work.') except OSError: dependency_error('p4 command not found. Perforce integration will not work.') try: imp.find_module('mercurial') except ImportError: dependency_warning('hg not found. Mercurial integration will not work.') try: imp.find_module('bzrlib') except ImportError: dependency_warning('bzrlib not found. Bazaar integration will not work.') for check_func in (checks.get_can_enable_search, checks.get_can_enable_syntax_highlighting): success, reason = check_func() if not success: dependency_warning(striptags(reason)) if not is_exe_in_path('cvs'): dependency_warning('cvs binary not found. CVS integration ' 'will not work.') if not is_exe_in_path('git'): dependency_warning('git binary not found. Git integration ' 'will not work.') if not is_exe_in_path('mtn'): dependency_warning('mtn binary not found. Monotone integration ' 'will not work.') # Django will print warnings/errors for database backend modules and flup # if the configuration requires it. if warnings_found: sys.stderr.write(settings.install_help) sys.stderr.write('\n\n')
def check_dependencies(): # Some of our checks require access to django.conf.settings, so # tell Django about our settings. # # This must go before the imports. setup_environ(settings) from django.template.defaultfilters import striptags from djblets.util.filesystem import is_exe_in_path from reviewboard.admin import checks from settings import dependency_error # Python 2.4 if sys.version_info[0] < 2 or \ (sys.version_info[0] == 2 and sys.version_info[1] < 4): dependency_error('Python 2.4 or newer is required.') # django-evolution try: imp.find_module('django_evolution') except ImportError: dependency_error("django_evolution is required.\n" "http://code.google.com/p/django-evolution/") # PIL try: imp.find_module('PIL') except ImportError: try: imp.find_module('Image') except ImportError: dependency_error('The Python Imaging Library (PIL) is required.') # ReCaptcha try: import recaptcha except ImportError: dependency_error('The recaptcha python module is required.') import subprocess # The following checks are non-fatal warnings, since these dependencies are # merely recommended, not required. def dependency_warning(string): sys.stderr.write('Warning: %s\n' % string) global warnings_found warnings_found += 1 try: imp.find_module('pysvn') except ImportError: dependency_warning('pysvn not found. SVN integration will not work.') try: imp.find_module('P4') subprocess.call(['p4', '-h'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) except ImportError: dependency_warning('p4python (>=07.3) not found. Perforce integration' ' will not work.') except OSError: dependency_error('p4 command not found. Perforce integration will not' ' work.') try: imp.find_module('mercurial') except ImportError: dependency_warning('hg not found. Mercurial integration will not' ' work.') try: imp.find_module('bzrlib') except ImportError: dependency_warning('bzrlib not found. Bazaar integration will not work.') for check_func in (checks.get_can_enable_search, checks.get_can_enable_syntax_highlighting): success, reason = check_func() if not success: dependency_warning(striptags(reason)) if not is_exe_in_path('cvs'): dependency_warning('cvs binary not found. CVS integration ' 'will not work.') if not is_exe_in_path('git'): dependency_warning('git binary not found. Git integration ' 'will not work.') if not is_exe_in_path('mtn'): dependency_warning('mtn binary not found. Monotone integration ' 'will not work.') # Django will print warnings/errors for database backend modules and flup # if the configuration requires it. if warnings_found: sys.stderr.write(settings.install_help) sys.stderr.write('\n\n')
def check_dependencies(settings): # We're now safe to import anything that might touch Django settings, # such as code utilizing the database. Start importing what we need for # dependency checks. from djblets.util.filesystem import is_exe_in_path from reviewboard.admin.import_utils import has_module from reviewboard.dependencies import (dependency_error, dependency_warning, fail_if_missing_dependencies) # Make sure the correct version of Python is being used. This should be # covered by setup.py, but it's best to make sure here. if sys.version_info[0] != 2 or sys.version_info[1] != 7: dependency_error('Python 2.7 is required.') # Check for NodeJS and installed modules, to make sure these weren't # missed during installation. if not is_exe_in_path('node'): dependency_error('node (from NodeJS) was not found. It must be ' 'installed from your package manager or from ' 'https://nodejs.org/') if not os.path.exists('node_modules'): dependency_error('The node_modules directory is missing. Please ' 're-run `./setup.py develop` to install all NodeJS ' 'dependencies.') for key in ('UGLIFYJS_BINARY', 'LESS_BINARY', 'BABEL_BINARY'): path = settings.PIPELINE[key] if not os.path.exists(path): dependency_error('%s is missing. Please re-run `./setup.py ' 'develop` to install all NodeJS dependencies.' % os.path.abspath(path)) # The following checks are non-fatal warnings, since these dependencies # are merely recommended, not required. These are primarily for SCM # support. if not has_module('pysvn') and not has_module('subvertpy'): dependency_warning('Neither the subvertpy nor pysvn Python modules ' 'were found. Subversion integration will not work. ' 'For pysvn, see your package manager for the ' 'module or download from ' 'http://pysvn.tigris.org/project_downloads.html. ' 'For subvertpy, run `pip install subvertpy`. We ' 'recommend pysvn for better compatibility.') if has_module('P4'): try: subprocess.call(['p4', '-h'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) except OSError: dependency_warning('The p4 command not found. Perforce ' 'integration will not work. To enable support, ' 'download p4 from ' 'http://cdist2.perforce.com/perforce/ and ' 'place it in your PATH.') else: dependency_warning('The p4python module was not found. Perforce ' 'integration will not work. To enable support, ' 'run `pip install p4python`') if not is_exe_in_path('hg'): dependency_warning('The hg command was not found. Mercurial ' 'integration will not work. To enable support, ' 'run `pip install mercurial`') if not is_exe_in_path('bzr'): dependency_warning('The bzr command was not found. Bazaar integration ' 'will not work. To enable support, run ' '`pip install bzr`') if not is_exe_in_path('cvs'): dependency_warning('The cvs command was not found. CVS integration ' 'will not work. To enable support, install cvs ' 'from your package manager or from ' 'http://www.nongnu.org/cvs/') if not is_exe_in_path('git'): dependency_warning('The git command not found. Git integration ' 'will not work. To enable support, install git ' 'from your package manager or from ' 'https://git-scm.com/downloads') # Along with all those, Django will print warnings/errors for database # backend modules if the configuration requires it. # # Now that that's all done, check if anything was missing and, if so, # fail with some helpful text. fail_if_missing_dependencies()
def check_dependencies(settings): # We're now safe to import anything that might touch Django settings, # such as code utilizing the database. Start importing what we need for # dependency checks. from djblets.util.filesystem import is_exe_in_path from reviewboard.admin.import_utils import has_module from reviewboard.dependencies import (dependency_error, dependency_warning, fail_if_missing_dependencies) # Make sure the correct version of Python is being used. This should be # covered by setup.py, but it's best to make sure here. pyver = sys.version_info[:2] if pyver < PYTHON_2_MIN_VERSION or (3, 0) <= pyver < PYTHON_3_MIN_VERSION: dependency_error('Python %s or %s+ is required.' % (PYTHON_2_MIN_VERSION_STR, PYTHON_3_MIN_VERSION_STR)) # Check for NodeJS and installed modules, to make sure these weren't # missed during installation. if not is_exe_in_path('node'): dependency_error('node (from NodeJS) was not found. It must be ' 'installed from your package manager or from ' 'https://nodejs.org/') if not os.path.exists('node_modules'): dependency_error('The node_modules directory is missing. Please ' 're-run `./setup.py develop` to install all NodeJS ' 'dependencies.') for key in ('UGLIFYJS_BINARY', 'LESS_BINARY', 'BABEL_BINARY'): path = settings.PIPELINE[key] if not os.path.exists(path): dependency_error('%s is missing. Please re-run `./setup.py ' 'develop` to install all NodeJS dependencies.' % os.path.abspath(path)) # The following checks are non-fatal warnings, since these dependencies # are merely recommended, not required. These are primarily for SCM # support. if not has_module('pysvn') and not has_module('subvertpy'): dependency_warning('Neither the subvertpy nor pysvn Python modules ' 'were found. Subversion integration will not work. ' 'For pysvn, see your package manager for the ' 'module or download from ' 'http://pysvn.tigris.org/project_downloads.html. ' 'For subvertpy, run `pip install subvertpy`. We ' 'recommend pysvn for better compatibility.') if has_module('P4'): try: subprocess.call(['p4', '-h'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) except OSError: dependency_warning('The p4 command not found. Perforce ' 'integration will not work. To enable support, ' 'download p4 from ' 'http://cdist2.perforce.com/perforce/ and ' 'place it in your PATH.') else: dependency_warning('The p4python module was not found. Perforce ' 'integration will not work. To enable support, ' 'run `pip install p4python`') if not is_exe_in_path('hg'): dependency_warning('The hg command was not found. Mercurial ' 'integration will not work. To enable support, ' 'run `pip install mercurial`') if not is_exe_in_path('bzr'): dependency_warning('The bzr command was not found. Bazaar integration ' 'will not work. To enable support, run ' '`pip install bzr`') if not is_exe_in_path('cvs'): dependency_warning('The cvs command was not found. CVS integration ' 'will not work. To enable support, install cvs ' 'from your package manager or from ' 'http://www.nongnu.org/cvs/') if not is_exe_in_path('git'): dependency_warning('The git command not found. Git integration ' 'will not work. To enable support, install git ' 'from your package manager or from ' 'https://git-scm.com/downloads') # Along with all those, Django will print warnings/errors for database # backend modules if the configuration requires it. # # Now that that's all done, check if anything was missing and, if so, # fail with some helpful text. fail_if_missing_dependencies()
def check_dependencies(settings): # Some of our checks require access to django.conf.settings, so # tell Django about our settings. # from django.template.defaultfilters import striptags from djblets.util.filesystem import is_exe_in_path from reviewboard.admin import checks dependency_error = settings.dependency_error # Python 2.4 if sys.version_info[0] < 2 or \ (sys.version_info[0] == 2 and sys.version_info[1] < 4): dependency_error('Python 2.4 or newer is required.') # django-evolution try: imp.find_module('django_evolution') except ImportError: dependency_error("django_evolution is required.\n" "http://code.google.com/p/django-evolution/") # PIL try: imp.find_module('PIL') except ImportError: try: imp.find_module('Image') except ImportError: dependency_error('The Python Imaging Library (Pillow or PIL) ' 'is required.') # ReCaptcha try: # For some reason, imp.find_module('recaptcha') doesn't always work. import recaptcha except ImportError: dependency_error('The recaptcha python module is required.') # The following checks are non-fatal warnings, since these dependencies are # merely recommended, not required. def dependency_warning(string): sys.stderr.write('Warning: %s\n' % string) global warnings_found warnings_found += 1 try: imp.find_module('subvertpy') except ImportError: try: imp.find_module('pysvn') except ImportError: dependency_warning('Neither subvertpy nor pysvn found. ' 'SVN integration will not work.') try: imp.find_module('P4') subprocess.call(['p4', '-h'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) except ImportError: dependency_warning('p4python (>=07.3) not found. Perforce integration ' 'will not work.') except OSError: dependency_error('p4 command not found. Perforce integration will not ' 'work.') try: imp.find_module('mercurial') except ImportError: dependency_warning('hg not found. Mercurial integration will not ' 'work.') try: imp.find_module('bzrlib') except ImportError: dependency_warning('bzrlib not found. Bazaar integration will not ' 'work.') success, reason = checks.get_can_enable_syntax_highlighting() if not success: dependency_warning(striptags(reason)) if not is_exe_in_path('cvs'): dependency_warning('cvs binary not found. CVS integration ' 'will not work.') if not is_exe_in_path('git'): dependency_warning('git binary not found. Git integration ' 'will not work.') if not is_exe_in_path('mtn'): dependency_warning('mtn binary not found. Monotone integration ' 'will not work.') # Django will print warnings/errors for database backend modules and flup # if the configuration requires it. if warnings_found: sys.stderr.write(settings.install_help) sys.stderr.write('\n\n')
def check_updates_required(): """Checks if there are manual updates required. Sometimes, especially in developer installs, some things need to be tweaked by hand before Review Board can be used on this server. """ global _updates_required global _install_fine if not _updates_required and not _install_fine: # Check if the site has moved and the old media directory no longer # exists. if not os.path.exists(settings.MEDIA_ROOT): new_media_root = os.path.join(settings.HTDOCS_ROOT, "media") if os.path.exists(new_media_root): from djblets.siteconfig.models import SiteConfiguration siteconfig = SiteConfiguration.objects.get_current() siteconfig.set("site_media_root", new_media_root) settings.MEDIA_ROOT = new_media_root # Check if there's a media/uploaded/images directory. If not, this is # either a new install or is using the old-style media setup and needs # to be manually upgraded. uploaded_dir = os.path.join(settings.MEDIA_ROOT, "uploaded") if not os.path.isdir(uploaded_dir) or \ not os.path.isdir(os.path.join(uploaded_dir, "images")): _updates_required.append( ("admin/manual-updates/media-upload-dir.html", { 'MEDIA_ROOT': settings.MEDIA_ROOT })) try: from reviewboard.changedescs.models import ChangeDescription ChangeDescription.objects.count() except: # We were unable to load this, so it's likely that the user # hasn't run syncdb yet. _updates_required.append( ("admin/manual-updates/run-syncdb.html", {})) if not is_exe_in_path('patch'): if sys.platform == 'win32': binaryname = 'patch.exe' else: binaryname = 'patch' _updates_required.append( ("admin/manual-updates/install-patch.html", { 'platform': sys.platform, 'binaryname': binaryname, })) # # NOTE: Add new checks above this. # _install_fine = not _updates_required return _updates_required
'absolute_paths': False, }, } BLESS_IMPORT_PATHS = ('rb/css/',) PIPELINE_CSS_COMPRESSOR = None PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.jsmin.JSMinCompressor' # On production (site-installed) builds, we always want to use the pre-compiled # versions. We want this regardless of the DEBUG setting (since they may # turn DEBUG on in order to get better error output). # # On a build running out of a source tree, for testing purposes, we want to # use the raw .less and JavaScript files when DEBUG is set. When DEBUG is # turned off in a non-production build, though, we want to be able to play # with the built output, so treat it like a production install. if PRODUCTION or not DEBUG or os.getenv('FORCE_BUILD_MEDIA', ''): if is_exe_in_path('lessc'): PIPELINE_COMPILERS = ['pipeline.compilers.less.LessCompiler'] else: PIPELINE_COMPILERS = ['djblets.pipeline.compilers.bless.BlessCompiler'] PIPELINE = True elif DEBUG: PIPELINE_COMPILERS = [] PIPELINE = False # Packages to unit test TEST_PACKAGES = ['reviewboard']
def check_updates_required(): """Check if there are manual updates required. Sometimes, especially in developer installs, some things need to be tweaked by hand before Review Board can be used on this server. """ global _install_fine updates_required = [] if not _install_fine: site_dir = os.path.dirname(settings.HTDOCS_ROOT) devel_install = (os.path.exists( os.path.join(settings.LOCAL_ROOT, 'manage.py'))) siteconfig = None # Check if we can access a SiteConfiguration. There should always # be one, unless the user has erased stuff by hand. # # This also checks for any sort of errors in talking to the database. # This could be due to the database being down, or corrupt, or # tables locked, or an empty database, or other cases. We want to # catch this before getting the point where plain 500 Internal Server # Errors appear. try: siteconfig = SiteConfiguration.objects.get_current() except (DatabaseError, SiteConfiguration.DoesNotExist) as e: updates_required.append( ('admin/manual-updates/database-error.html', { 'error': e, })) # Check if the version running matches the last stored version. # Only do this for non-debug installs, as it's really annoying on # a developer install.: cur_version = get_version_string() if siteconfig and siteconfig.version != cur_version: updates_required.append( ('admin/manual-updates/version-mismatch.html', { 'current_version': cur_version, 'stored_version': siteconfig.version, 'site_dir': site_dir, 'devel_install': devel_install, })) # Check if the site has moved and the old media directory no longer # exists. if siteconfig and not os.path.exists(settings.STATIC_ROOT): new_media_root = os.path.join(settings.HTDOCS_ROOT, "static") if os.path.exists(new_media_root): siteconfig.set("site_media_root", new_media_root) settings.STATIC_ROOT = new_media_root # Check if the user has any pending static media configuration # changes they need to make. if siteconfig and 'manual-updates' in siteconfig.settings: stored_updates = siteconfig.settings['manual-updates'] if not stored_updates.get('static-media', False): updates_required.append( ('admin/manual-updates/server-static-config.html', { 'STATIC_ROOT': settings.STATIC_ROOT, 'SITE_ROOT': settings.SITE_ROOT, 'SITE_DIR': settings.LOCAL_ROOT, })) # Check if there's a media/uploaded/images directory. If not, this is # either a new install or is using the old-style media setup and needs # to be manually upgraded. uploaded_dir = os.path.join(settings.MEDIA_ROOT, "uploaded") if not os.path.isdir(uploaded_dir) or \ not os.path.isdir(os.path.join(uploaded_dir, "images")): updates_required.append( ("admin/manual-updates/media-upload-dir.html", { 'MEDIA_ROOT': settings.MEDIA_ROOT })) try: username = getpass.getuser() except ImportError: # This will happen if running on Windows (which doesn't have # the pwd module) and if %LOGNAME%, %USER%, %LNAME% and # %USERNAME% are all undefined. username = "******" # Check if the data directory (should be $HOME) is writable by us. data_dir = os.environ.get('HOME', '') if (not data_dir or not os.path.isdir(data_dir) or not os.access(data_dir, os.W_OK)): try: username = getpass.getuser() except ImportError: # This will happen if running on Windows (which doesn't have # the pwd module) and if %LOGNAME%, %USER%, %LNAME% and # %USERNAME% are all undefined. username = "******" updates_required.append(('admin/manual-updates/data-dir.html', { 'data_dir': data_dir, 'writable': os.access(data_dir, os.W_OK), 'server_user': username, 'expected_data_dir': os.path.join(site_dir, 'data'), })) # Check if the the legacy htdocs and modern static extension # directories exist and are writable by us. ext_roots = [settings.MEDIA_ROOT] if not settings.DEBUG: ext_roots.append(settings.STATIC_ROOT) for root in ext_roots: ext_dir = os.path.join(root, 'ext') if not os.path.isdir(ext_dir) or not os.access(ext_dir, os.W_OK): updates_required.append(('admin/manual-updates/ext-dir.html', { 'ext_dir': ext_dir, 'writable': os.access(ext_dir, os.W_OK), 'server_user': username, })) if not is_exe_in_path('patch'): if sys.platform == 'win32': binaryname = 'patch.exe' else: binaryname = 'patch' updates_required.append( ("admin/manual-updates/install-patch.html", { 'platform': sys.platform, 'binaryname': binaryname, 'search_path': os.getenv('PATH'), })) # # NOTE: Add new checks above this. # _install_fine = not updates_required return updates_required