def get(self): result = requests.get(self.url) xml = BS(result.text, features="lxml") for entry in self.get_entries(xml): yield Entry(title=self.get_title(entry), image=self.get_image(entry), link=self.get_link(entry))
def process_entries(entries: List[FeedParserDict], source: SourceModel) -> None: """Iterate through entries (presumably from a feed), add the new ones to the database. :param entries: The entries from the feed. :param source: The source that the entries should be associated with. """ for entry in entries: # build UTC time updated_parsed: struct_time = entry.get('updated_parsed', None) updated: datetime = utc_timestamp_from_struct_time(updated_parsed) link: Optional[str] = entry.get('link', None) if not link: # no point to an entry without a link continue title: str = entry.get('title', link) summary: str = entry.get('summary', '') # check if this entry already exists check: Optional[EntryModel] = EntryModel.get(link=link, source=source, title=title, updated=updated) if check is not None: continue # unique entry, add it EntryModel(link=link, source=source, summary=summary, title=title, updated=updated)
def test_entries_created(self): """tests if entries can be made to the database""" with patch('builtins.input', side_effect=[ self.date, self.employee, self.task_name, self.minutes, self.notes ]) as mock: self.entry.add_entry() #asserts if the colum Entry.name is not empty assert Entry.select().where(Entry.name.is_null(False))
def translate(self, phrase): r = requests.get(self.__api_url, params={ 'from': self.from_lang, 'dest': self.dest_lang, 'phrase': phrase, 'tm': False, 'format': 'json' }) json = r.json() if json['result'] != 'ok': return None # No results found if len(json['tuc']) == 0: return None # Check if plural, if so fetch propper translation patterns = [ '[Pp]lural form of (\w+)', '[Pp]resent participle of (\w+)', '[Ss]imple past tense and past participle of (\w+)' ] for pattern in patterns: plural = re.search(pattern, r.text) if plural: return self.translate(plural.group(1)) # Prepare and format database entry entry = Entry() for t in sorted(json['tuc'], key=itemgetter('authors'), reverse=True): if t.get('phrase'): best = t break entry.searchstring = phrase entry.phrase = best['phrase']['text'].capitalize() if best.get('meanings'): entry.meaning = best['meanings'][0]['text'].capitalize() # Return database entry; NOT SAVED YET return entry
def __init__(self, key: str, content: Optional[Union[str, bytes]], statistics: Optional[database.Statistics] = None): self.file_name = key.replace("/tmp/s3/", "") self.file_name = key.replace("/tmp/", "") self.file_name = self.file_name.replace("/", "-") self.file_name = "/tmp/s3/" + self.file_name if statistics is None: statistics = Statistics() Entry.__init__(self, key, None, statistics) if content is not None: if type(content) == str: options = "w+" else: options = "wb+" with open(self.file_name, options) as f: f.write(content) self.length = len(content) else: self.length = os.path.getsize(self.file_name) self.last_modified = time.time()
def award_entries(self, members): for member in members: # search database for discord ID resultset = session.query(Entry).filter_by(discord_id=member.id) count = resultset.count() if count > 0: entry = resultset.first() entry.score += 1 session.add(entry) session.commit() else: entry = Entry(discord_id=member.id, score=1) session.add(entry) session.commit() return
def main(): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(title='Action', dest='action') parser_import = subparsers.add_parser( 'import', help='Import new words into database') parser_import.add_argument('-k', '--kobo', help='Import from kobo database file') parser_import.add_argument( '-r', '--raw', nargs='+', help='Import list of words, seperated with space') parser_import.add_argument('-i', '--stdin', action='store_true', help='Import every line from stdin') parser_export = subparsers.add_parser('export', help='Export saved words to file') parser_export.add_argument('-o', '--output', help='Location to save file') parser_export.add_argument('-s', '--style', choices=['json', 'anki'], default='json', help='The output style') args = parser.parse_args() if args.action == 'import': if args.raw: import_raw(args.raw) elif args.kobo: import_kobo(args.kobo) elif args.stdin: import_stdin() elif args.action == 'export': out = Entry.export(args.style) if args.output: with open(args.output, 'w') as f: f.write(out) else: print(out)
def add_entries(self): Entry.create(username=self.task_name, date=self.task_date, title=self.task_title, spent=self.task_time, note=self.task_note)
last = data['data'][0]['id'] time.sleep(1) def parse_single(comment): blob = textblob.TextBlob(comment) avg = 0 for sentence in blob.sentences: avg += sentence.sentiment.polarity return avg if __name__ == '__main__': parsed = 0 for item in poll_recent_comments(): parsed += 1 e = Entry() e.sub = item['subreddit'] e.val = parse_single(item['body']) e.cid = item['id'] e.body = item['body'] e.ups = item['ups'] e.downs = item['downs'] e.link_url = item['link_url'] e.link_title = item['link_title'] e.author = item['author'] e.save() if parsed % 10 == 0: print "Parsed %s comments..." % parsed if parsed >= 5000000: sys.exit()