def main(): logging.basicConfig(level=logging.INFO) c = boto.connect_s3() b = c.get_bucket('files.projecthawkthorne.com') path = os.path.join('releases', 'v' + version.current_version()) upload.upload_path(b, path, 'notes.html')
def main(): logging.basicConfig(level=logging.INFO) c = boto.connect_s3() b = c.get_bucket('files.projecthawkthorne.com', validate=False) path = os.path.join('releases', 'v' + version.current_version()) upload.upload_path(b, path, 'notes.html')
def post_content(): resp = requests.get(pulls_url, params={'state': 'closed', 'base': 'release'}) pulls = resp.json() if not pulls: raise ValueError(('No pull request for this release, which means no' 'post')) template = jinja2.Template(open('templates/post.md').read()) return template.render(pull=pulls[0], version=version.current_version())
def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser() parser.add_argument('input', type=argparse.FileType('r')) parser.add_argument('-d', '--debug', default=False, action='store_true') args = parser.parse_args() v = version.current_version() post = args.input.read() post_url = update_reddit(title.format(v), post) update_twitter(v, post_url)
def post_content(): resp = requests.get(pulls_url, params={ 'state': 'closed', 'base': 'release' }) pulls = resp.json() if not pulls: raise ValueError(('No pull request for this release, which means no' 'post')) template = jinja2.Template(open('templates/post.md').read()) return template.render(pull=pulls[0], version=version.current_version())
def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser() parser.add_argument("input", type=argparse.FileType("r")) parser.add_argument("-d", "--debug", default=False, action="store_true") args = parser.parse_args() v = version.current_version() body = args.input.read() template = open("templates/post.md").read() post = template.format(body=body, version=v) post_url = update_reddit(title.format(v), post, community=version.is_release()) update_twitter(v, post_url)
def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser() parser.add_argument('input', type=argparse.FileType('r')) parser.add_argument('-d', '--debug', default=False, action='store_true') args = parser.parse_args() v = version.current_version() body = args.input.read() template = open('templates/post.md').read() post = template.format(body=body, version=v) post_url = update_reddit(title.format(v), post, community=version.is_release()) update_twitter(v, post_url)
def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser() parser.add_argument('input', type=argparse.FileType('r')) parser.add_argument('-d', '--debug', default=False, action='store_true') args = parser.parse_args() v = version.current_version() post = args.input.read() if not version.is_release(): logging.info('[NO RELEASE] Not posting to social media') return post_url = update_reddit(title.format(v), post) update_twitter(v, post_url)
import os import urllib import version import json import sys if __name__ == "__main__": current = version.current_version() feed = urllib.urlopen("http://www.reddit.com/user/Britta-bot.json") listing = json.load(feed) versions = set() for post in listing['data']['children']: if post['kind'] != 't3': continue _, v = post['data']['title'].split("Hawkthorne ") versions.add(v) if current not in versions: sys.stdout.write('true') else: sys.stdout.write('false') exit(0)
date = etree.SubElement(item, 'pubDate') date.text = formatdate(timegm(datetime.datetime.now().utctimetuple())) full_zip = etree.SubElement(item, 'enclosure') full_zip.attrib['url'] = HAWK_URL.format(version) full_zip.attrib['length'] = unicode(os.path.getsize(zip_path)) full_zip.attrib['type'] = "application/octet-stream" full_zip.attrib['sparkle:version'] = sparkle_version full_zip.attrib['sparkle:dsaSignature'] = sign(zip_path) return item if __name__ == "__main__": current_version = "v" + version.current_version() sparkle_current_version = current_version.replace("v", "") current_dir = path.join("sparkle", "releases", current_version) if not path.exists("sparkle/appcast.xml"): urllib.urlretrieve(CAST_URL, "sparkle/appcast.xml") appcast = etree.parse("sparkle/appcast.xml") channel = appcast.find('channel') # Namespace bull root = appcast.getroot() if not path.exists("sparkle/releases"): os.makedirs("sparkle/releases")
import os import urllib import version import json import sys if __name__ == "__main__": current = version.current_version() url = "http://files.projecthawkthorne.com/releases/{}/notes.html" feed = urllib.urlopen(url.format(current)) if feed.getcode() == 403: sys.stdout.write('true') else: sys.stdout.write('false') exit(0)