def begin_site(self): jobs = self.site.content.node_from_relative_path('jobs/') with Log("Checking jobs metadata") as l: last_exc = None for resource in jobs.walk_resources(): if not resource.is_processable: l.output("Skipping %s" % (resource.name, )) continue with Log(resource.name): # Ensure that all tags are lowercase resource.meta.tags = [ self.fix_tag(a) for a in resource.meta.tags ] for tester in self._get_testers(): assert tester.__doc__ is not None with Log("Test %s" % (tester.__doc__.strip())) as l2: try: tester(resource) except Exception, e: for line in traceback.format_exc().strip( ).splitlines(): l2.output(line) l2.ok_msg = l2.fail_msg last_exc = e if last_exc is not None: raise last_exc
def main(): with Log("Identifying paths") as l: commit = os.environ.get("TRAVIS_COMMIT", "manual") temp_dir = tempfile.mkdtemp() l.output("Temp dir: %s" % temp_dir) working_dir = os.path.join(temp_dir, 'build') checkout_dir = os.path.join(temp_dir, 'checkout') template_dir = os.path.abspath(os.path.dirname(__file__)) hyde_root = os.path.join(template_dir, 'hyde') build_dir = os.path.join(hyde_root, 'deploy') gh_token = os.environ['GH_TOKEN'] with Log("Checking out pythonjobs.github.io"): os.mkdir(checkout_dir) repo_url = 'https://%[email protected]/pythonjobs/pythonjobs.github.io.git' % gh_token subprocess.check_call(['git', 'clone', repo_url, checkout_dir], cwd=checkout_dir) with Log("Setting up working dir"): with Log("Copy in built site"): shutil.copytree(build_dir, working_dir) with Log("Move in .git"): os.rename(os.path.join(checkout_dir, '.git'), os.path.join(working_dir, '.git')) with Log("Committing"): with Log("Adding any new files"): subprocess.check_call(['git', 'add', '-A'], cwd=working_dir) with Log("Setting up git variables"): subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**'], cwd=working_dir) subprocess.check_call(['git', 'config', 'user.name', 'Travis Job'], cwd=working_dir) with Log("Committing"): subprocess.check_call(['git', 'commit', '-a', '-m', 'Site deploy for %s' % commit], cwd=working_dir) with Log("Pushing"): subprocess.check_call(['git', 'push', 'origin', 'master'], cwd=working_dir)
def begin_site(self): jobs = self.site.content.node_from_relative_path('jobs/') with Log("Checking jobs metadata") as l: for resource in jobs.walk_resources(): if not resource.is_processable: l.output("Skipping %s" % (resource.name, )) continue with Log(resource.name): # Ensure that all tags are lowercase resource.meta.tags = [ self.fix_tag(a) for a in resource.meta.tags ] for tester in self._get_testers(): docstring = tester.__doc__.strip() assert docstring with Log("Test %s" % (docstring, )): tester(resource) if self.errors: proc = subprocess.Popen( [sys.executable, os.path.join(ROOT, "comment.py")], stdin=subprocess.PIPE) proc.communicate(self.get_pr_comment()) with Log("Site Processing Errors"): for filename, errors in self.errors.items(): with Log(filename, ok_msg="x") as log: for error in errors: log.output(error) raise HydeException("Some job listings failed validation") self.site.locations = json.dumps(self.location_finder.known_locations)
def report_error(msg): with Log("Error Found"): with Log("Commenting on PR"): proc = subprocess.Popen( [sys.executable, os.path.join(ROOT, "comment.py")], stdin=subprocess.PIPE) proc.communicate(COMMENT_TEMPLATE % msg) raise AssertionError(msg)
def get_preview(driver, filename, pull_request_num): basename, ext = os.path.splitext(filename) url = "http://localhost:8080/jobs/%s.html" % urllib.quote(basename) with Log(url): driver.get(url) while driver.execute_script( "return document.readyState") != "complete": time.sleep(0.5) png = driver.get_screenshot_as_png() with Log("Uploading Screenshot"): return upload_screenshot(png)
def find_location(self, location): if location in self.known_locations: return self.known_locations[location] with Log("Querying Google API for location"): result = self.query_location(location) if result: self.known_locations[location] = result return result
def add_comment(comment): with Log("PR comment"): if "TRAVIS_PULL_REQUEST" not in os.environ or os.environ[ "TRAVIS_PULL_REQUEST"] == 'false': Log.output("No TRAVIS_PULL_REQUEST found, not commenting") return pr_num = os.environ["TRAVIS_PULL_REQUEST"] url = "https://i2xwshcjfa.execute-api.eu-west-1.amazonaws.com/live/pythonjobs-commentbot/prcomment" with Log("Submitting request"): req = requests.post( url, json={ "pr": int(pr_num), "msg": COMMENT_TEMPLATE % comment }, ) print(req.text) req.raise_for_status()
def find_location(self, location): if location in self.known_locations: #log.output("Location %s already known" % (location, )) return self.known_locations[location] with Log("Querying Google API for location"): result = self.query_location(location) if result: self.known_locations[location] = result return result
def _get_json(self, url): with Log('Getting %s' % url): res = urllib.urlopen(url) if res.code != 200: return {} try: return json.load(res) except: return {}
def node_complete(self, node): if node.path != self.jobs_dir: return with Log("Building Full-Text index"): num_resource = len(self.resources) threshold = num_resource * self.INDEX_FREQUENCY_LIMIT tree = Tree() for word, by_resource in self.by_word.iteritems(): tree.add(word, by_resource.items()) self.write_index((self.resources, tree.collapse(threshold)))
def test_lookup_location(self, resource): """Get the latitude longitude values for this location""" # This is NOT a test, but performs useful per-node things location = getattr(resource.meta.contact, 'address', None) if location is None: location = resource.meta.location with Log("Finding job location"): coords = self.location_finder.find_location(location) if coords is not None: resource.meta._coords = coords
def main(jobs_dir): with Log("Identifying paths"): jobs_root = os.path.abspath(jobs_dir) template_dir = os.path.abspath(os.path.dirname(__file__)) hyde_root = os.path.join(template_dir, 'hyde') jobs_source = os.path.join(jobs_root, 'jobs') jobs_dest = os.path.join(hyde_root, 'content', 'jobs') deploy_dir = os.path.join(hyde_root, 'deploy') jobs_meta_path = os.path.join(jobs_dest, 'meta.yaml') with Log("Copy in jobs") as l: for file in os.listdir(jobs_source): if not file.endswith(".html"): l.output("Skipping: %s" % file) continue with Log('Copying %s' % file): src_path = '%s/%s' % (jobs_source, file ) # This is safer than join() dest_path = '%s/%s' % (jobs_dest, file) shutil.copyfile(src_path, dest_path) with Log("Building Site"): subprocess.check_call(['hyde', '-x', '-s', hyde_root, 'gen', '-r'])
def get_file_previews(pull_request_num): with Log("Generating Previews"): preview_ids = [] driver = webdriver.PhantomJS() hyde_root = os.path.join(TEMPLATE_DIR, 'hyde') with Log("Starting Server"): server_proc = subprocess.Popen(['hyde', '-s', hyde_root, 'serve']) time.sleep(4) try: driver.set_window_size(800, 600) for filename in get_modified_files(): with Log(filename): image_id = get_preview(driver, filename, pull_request_num) preview_ids.append(image_id) finally: driver.close() server_proc.terminate() server_proc.wait() if not preview_ids: return link_template = '![Job listing preview](https://s3-eu-west-1.amazonaws.com/pythonjobs-screenshots/%s.png)' image_links = [ link_template % preview_id for preview_id in preview_ids ] message = """Here are some screenshots of what the live listing should look like: %s """ % ("\n".join(image_links), ) proc = subprocess.Popen( [sys.executable, os.path.join(TEMPLATE_DIR, "comment.py")], stdin=subprocess.PIPE) proc.communicate(message)
def main(jobs_dir): with Log("Identifying paths"): jobs_root = os.path.abspath(jobs_dir) template_dir = os.path.abspath(os.path.dirname(__file__)) hyde_root = os.path.join(template_dir, 'hyde') jobs_source = os.path.join(jobs_root, 'jobs') jobs_dest = os.path.join(hyde_root, 'content', 'jobs') with Log("Checking for unexpected files"): unexpected_files = set() with Log(jobs_root) as l: for filename in os.listdir(jobs_root): if filename.endswith('.html') or filename.endswith('.md'): if filename not in {"README.md", "jobtemplate.html"}: unexpected_files.add((filename, jobs_root)) with Log(jobs_source) as l: for filename in os.listdir(jobs_source): if not filename.endswith(".html"): unexpected_files.add((filename, jobs_source)) if unexpected_files: file_descs = ["%s (%s)" % i for i in unexpected_files] report_error(""" All job files must be added under the jobs/ directory, and must end in '.html'. I found the following files that do not match this: * %s """ % ("\n * ".join(file_descs))) with Log("Copy in jobs") as l: for job_file in os.listdir(jobs_source): with Log('Copying %s' % job_file): src_path = '%s/%s' % (jobs_source, job_file ) # This is safer than join() dest_path = '%s/%s' % (jobs_dest, job_file) validate(src_path) shutil.copyfile(src_path, dest_path) with Log("Building & Validating Site"): subprocess.check_call(['hyde', '-x', '-s', hyde_root, 'gen', '-r'])
def write_index(self, index): with Log("Encoding as JSON") as l: data = json.dumps(index, separators=",:") l.output("%s bytes" % len(data)) self.deploy(data, "text_index.json")