Example #1
0
 def __init__(self):
     self.clients.append(self)
     self.id = None
     makeTemporary(self)
     self.d = None
     self.doing = ""
     self.channel = Mud.getInstance().channel
     self.channel.addListener(self)
     self.character = Character()
     self.character.addListener(self)
     persist.persist(self.character)
Example #2
0
def load_data():
    conf = config.Config(
                CONF_PATH,
                defaults={
                    'base_name': 'comitup',
                    'web_service': '',
                },
             )

    data = persist.persist(
                PERSIST_PATH,
                {'id': str(random.randrange(1000, 9999))},
           )

    return (conf, data)
Example #3
0
def load_data():
    conf = config.Config(
                CONF_PATH,
                defaults={
                    'base_name': 'comitup',
                    'web_service': '',
                    'external_callback': '/usr/local/bin/comitup-callback',
                },
             )

    data = persist.persist(
                PERSIST_PATH,
                {'id': str(random.randrange(1000, 9999))},
           )

    return (conf, data)
Example #4
0
def sync_players():
    results = extract_mls_fantasy_data()
    for r in results:
        persist(r['data'], r['name'], fieldnames=r['fieldnames'], project_name='mls-data')
 def saveDocument(self):
     self.getdocument()
     data = getDataFromCRDT(self.treedoc)
     persist(data)
Example #6
0
def sync_new_games():
    games = extract_game_logs(new=True)
    for g in games:
        persist(g, 'games', file_name=g['match']['slug'], project_name='mls-data', 
            write_meta=False, write_csv=False, auto_add_file=True)
Example #7
0
#prepare database
db = db_operations.db_operations()
if not db.connect():
    print('error opening database')
    exit(2)
print('rss read and database connected')
#read rss links from config file
config_parser = ConfigParser()
config_parser.read(db_operations.CONFIG_FILE_PATH)
if (config_parser.has_section(section_name)):
    links = config_parser.items(section_name)

for rss_link in links[0][1].split(','):
    feed = feedparser.parse(rss_link)
    if len(feed.entries) == 0:
        continue
    for post in feed.entries:
        title = post.title
        link = post.link
        if (not db.found_duplicate(link, title)):
            db.insert(rss_link, title, link)
            print(str.format('{}: {}', rss_link, title))

read_content.read_content()
persist.persist()
read_price.read_persist_price()
print('done!')
'''
COMMAND LINE:
C:/Users/yongwei/AppData/Local/Programs/Python/Python38/python.exe  c:/Python/NLPredict/datafeeder/read_parse_save.py
'''