def index(request): logger = logging.getLogger(__name__) logger.info('this package is alive') s = [] org_twitter_handle = org_name = get_owner() s.append('<link rel="shortcut icon" type="image/png" ' 'href="../static/favicon.png"/>') api_data_dump = json.loads( requests.get('https://gci-leaders.netlify.com/data.json').content) for item in api_data_dump: if item['name'] == org_name: org_twitter_handle = item['twitter_url'].split( 'twitter.com/')[-1] if org_twitter_handle is not None: s.append('<a class="twitter-timeline" data-height="1000" ' 'data-link-color="#2B7BB9" ' 'href="https://twitter.com/{twitter_handle}">' 'Tweets by {twitter_handle}</a> <script async ' 'src="https://platform.twitter.com/widgets.js" ' 'charset="utf-8"></script>'.format( twitter_handle=org_twitter_handle)) return HttpResponse('\n'.join(s))
def activity_json(request): org_name = get_owner() # URL to grab all issues from issues_url = 'http://' + org_name + '.github.io/gh-board/issues.json' content = requests.get(issues_url) try: parsed_json = content.json() except json.JSONDecodeError: return HttpResponse('{}') real_data = Scraper(parsed_json['issues'], datetime.datetime.today()) real_data = real_data.get_data() return HttpResponse(json.dumps(real_data))
class LatestTasksFeed(Feed): title = 'GCI tasks feed' link = get_deploy_url() + '/gci/tasks/rss.xml' description = 'GCI tasks ordered by modification time.' author_name = get_owner() author_link = get_deploy_url() def items(self): yaml = YAML() with open(os.path.join('_site', 'tasks.yaml')) as f: res = list(yaml.load(f).values()) res.sort(key=lambda x: x['last_modified'], reverse=True) return res def item_title(self, item): return item['name'] def item_description(self, item): desc = item['description'] if item['external_url']: desc += '\n\nExternal URL: [{url}]({url})'.format( url=item['external_url']) return markdown2.markdown(desc) def item_link(self, item): return 'https://codein.withgoogle.com/tasks/' + str(item['id']) def item_pubdate(self, item): return dateutil.parser.parse(item['last_modified']) def item_updateddate(self, item): return dateutil.parser.parse(item['last_modified']) def item_author_name(self): return self.author_name def item_categories(self, item): return tuple(item['tags'])
import scrapy import string import json import logging from ruamel.yaml import YAML import os.path from collections import OrderedDict from community.git import get_owner from gsoc.config import get_year from gsoc.config import get_current_year logger = logging.getLogger(__name__ + '.index') org_name = get_owner() year = get_year() yaml = YAML() if int(year) >= 2018: print("Year should not be a present or future year\n") exit() class GsocSpider(scrapy.Spider): name = 'gsoc' start_urls = [ 'https://summerofcode.withgoogle.com/archive/{}/organizations'.format( year), ] def parse(self, response): home_url = 'https://summerofcode.withgoogle.com/'\