def dictFromJsonFile(filename, error=True): """ Open a json file and returns its content as a dict """ def checking_for_unique_keys(pairs): """ Check if there are duplicate keys defined; this is useful for any hand-edited file This SO answer was useful here: http://stackoverflow.com/questions/16172011/json-in-python-receive-check-duplicate-key-error """ result = dict() for key,value in pairs: if key in result: msg.error("duplicate key ('%s') specified in %s" % (key, filename), KeyError) result[key] = value return result try: with open(filename, 'rb') as f: json_data = json.load(f, object_pairs_hook=checking_for_unique_keys) except IOError, OSError: if error == True: msg.error("Couldn't open JSON file: %s" % filename, IOError) else: msg.info("Couldn't open JSON file: %s" % filename, IOError)
def msg(lang): # str -> dic = DICS[lang] import messages messages.info( name="JMDict (%s)" % lang, location="Caches/Dictionaries/JMDict/%s.fpw" % lang, size=dic['size'], urls=["http://ftp.monash.edu.au/pub/nihongo/" + dic['file']], )
def msg(dic): # str -> d = DICS[dic] import messages messages.info( name="StarDict (%s)" % dic, location="Caches/Dictionaries/" + d['path'], size=d['size'], urls=[DIC_URL], )
def msg(lang): # str -> dic = DICS[lang] url = DIC_URL % dic import messages messages.info( name="KANJIDIC (%s)" % lang, location="Caches/Dictionaries/KanjiDic/%s" % dic, size=MIN_DIC_SIZE, urls=[HP_URL, url], )
def msg(): import messages messages.info( name="Wadoku", location="Caches/Dictionaries/Wadoku", size=WADOKU_FILESIZE, urls=[ 'http://www.wadoku.de/wiki/display/WAD/Downloads+und+Links', WADOKU_URL, ], )
def msg(): import messages messages.info( name="UniDicMLJ", location="Caches/Dictionaries/UniDicMLJ", size=UNIDIC_FILESIZE, urls=[ 'http://www2.ninjal.ac.jp/lrc/index.php?UniDic%2F%B6%E1%C2%E5%CA%B8%B8%ECUniDic', 'https://docs.google.com/file/d/0ByoM4WpH84qIS2Q1UU9tbnRDVk0', 'http://sakuradite.org/pub/unidic-mlj/unidic-mlj-1.3.tar.xz', ] )
def msg(): import messages messages.info( name="UniDic", location="Caches/Dictionaries/UniDic", size=UNIDIC_MIN_FILESIZE, urls=[ #'http://sourceforge.jp/projects/unidic', #'http://jaist.dl.sourceforge.jp/unidic/58338/unidic-mecab-2.1.2_bin.zip', #'http://osdn.dl.sourceforge.jp/unidic/58338/unidic-mecab-2.1.2_bin.zip', #'http://distfiles.macports.org/mecab/unidic-mecab-2.1.2_bin.zip', #'http://mse.uk.distfiles.macports.org/sites/distfiles.macports.org/mecab/unidic-mecab-2.1.2_bin.zip', initdefs.DOWNLOAD_MAINLAND_URL, 'http://sakuradite.org/pub/unidic/unidic-2.1.2.tar.xz', ])
def redirect_unauthenticated(redirect_url=None): redirect_url = redirect_url or reverse("index") no_auth = bottle.default_app().config.get("no_auth", False) if not no_auth and not is_user_authenticated(): from foris.core import ugettext as _ import messages messages.info(_("You have been logged out due to longer inactivity.")) if bottle.request.is_xhr: # "raise" JSON response if requested by XHR res = bottle.response.copy(cls=bottle.HTTPResponse) res.content_type = 'application/json' res.body = json.dumps(dict(success=False, loggedOut=True, loginUrl=redirect_url)) raise res # "raise" standard bottle redirect login_url = "%s?next=%s" % (redirect_url, bottle.request.fullpath) bottle.redirect(login_url)
def redirect_unauthenticated(redirect_url=None): redirect_url = redirect_url or reverse("index") no_auth = bottle.default_app().config.get("no_auth", False) if not no_auth and not is_user_authenticated(): from foris.core import ugettext as _ import messages messages.info(_("You have been logged out due to longer inactivity.")) if bottle.request.is_xhr: # "raise" JSON response if requested by XHR res = bottle.response.copy(cls=bottle.HTTPResponse) res.content_type = "application/json" res.body = json.dumps(dict(success=False, loggedOut=True, loginUrl=redirect_url)) raise res # "raise" standard bottle redirect login_url = "%s?next=%s" % (redirect_url, bottle.request.fullpath) bottle.redirect(login_url)
def show_timer(): global _start_time info("total time:",end=' ') seconds = time.time() - _start_time hours = int(seconds/3600) if hours > 0: info(str(hours) + " hours,", end=' ') seconds = seconds - 3600*hours minutes = int(seconds/60) if minutes > 0: info(str(minutes) + " minutes,", end=' ') seconds = seconds - 60*minutes info("%0.3g seconds" % seconds)
def extract(extract, extract_refdefs): """ """ svg_in = utils.openBoardSVG() if extract == True: msg.info("Extracting routing and vias") extractRouting(svg_in) msg.info("Extracting components info") extractComponents(svg_in) msg.info("Extracting documentation and indicies locations") extractDocs(svg_in) if extract_refdefs == True: msg.info("Extracting refdefs info") extractRefdefs(svg_in) return
def extract(): """ """ svg_in = utils.openBoardSVG() msg.info("Extracting routing and vias") extractRouting(svg_in) msg.info("Extracting components info") extractComponents(svg_in) msg.info("Extracting documentation and indicies locations") extractDocs(svg_in) return
def main(): messages.info("spider test") spdr.findimages("http://page.art.pl")
flags.DEFINE_string("model", None, "Nombre del modelo a entrenar") FLAGS(sys.argv) if not FLAGS.model: m.error("Error: unespecified model for training") exit() img_size = 128 num_channels = 3 dataset_train = "%s/dataset_train" % (FLAGS.model) classes = [] m.info("Reading dataset: %s" % (dataset_train)) dirs = os.listdir(dataset_train) for dir_name in dirs: if dir_name != '.' and dir_name != '..': classes.append(dir_name) num_classes = len(classes) if num_classes == 0: m.error("Error: empty dataset") exit() model_checkpoint = '%s/%s.ckpt' % (FLAGS.model, FLAGS.model)