def get(self): res = Res() res.result = STATUS_OK[0] conf_ = getConf() sync = [] for key in conf_: s = { 'vars' : [], 'database': key } for var in conf_[key].keys(): if var == "is_active": s['is_active'] = conf_[key][var] continue s['vars'].append({ 'key' : var, 'value' : conf_[key][var] }) sync.append(s) res.data = { 'sync' : sync, 'scrapers' : getScrapers(scraper_dir) } self.finish(res.emit())
def post(self, action): res = Res() if action == "sync": try: s = json.loads(self.request.body) db_name = s['database'] del s['database'] except ValueError as e: print e self.finish(res.emit()) return except KeyError as e: print e self.finish(res.emit()) return db = None if db_name == "M2X": from ISData.m2xdb import M2XDB db = M2XDB() if db is not None: db.updateConfig(s) res.result = STATUS_OK[0] else: activate = False if action == "start": activate = True res.data = { "started" : [], "stopped" : [] } for scraper in getScrapers(scraper_dir): s = Schema(scraper['url']) if s.is_active != activate: status = "started" if not activate: status = "stopped" res.data[status].append(s._id) s.activate(activate=activate) res.result = STATUS_OK[0] self.finish(res.emit())