def convert_nlp_test(pred): global test_cnt # print "% ", unicode(pred) lang = pred.args[0].name ivr_in = pred.args[1].args[0].args[0] ivr_out = pred.args[1].args[1].args[0] head = Predicate(name='nlp_test', args=[ StringLiteral(MODULE_NAME), Predicate(name=lang), StringLiteral('t%04d' % test_cnt), Predicate(name='FIXME'), ListLiteral([ivr_in, ivr_out, ListLiteral([])]) ]) test_cnt += 1 clause = Clause(head=head) print unicode(clause)
def _setup_context (self, user, lang, inp, prev_context, prev_res): cur_context = Predicate(do_gensym (self.rt, 'context')) res = { } if ASSERT_OVERLAY_VAR_NAME in prev_res: res[ASSERT_OVERLAY_VAR_NAME] = prev_res[ASSERT_OVERLAY_VAR_NAME].clone() res = do_assertz ({}, Clause ( Predicate('user', [cur_context, Predicate(user)]) , location=self.dummyloc), res=res) res = do_assertz ({}, Clause ( Predicate('lang', [cur_context, Predicate(lang)]) , location=self.dummyloc), res=res) token_literal = ListLiteral (list(map(lambda x: StringLiteral(x), inp))) res = do_assertz ({}, Clause ( Predicate('tokens', [cur_context, token_literal]) , location=self.dummyloc), res=res) currentTime = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC).isoformat() res = do_assertz ({}, Clause ( Predicate('time', [cur_context, StringLiteral(currentTime)]) , location=self.dummyloc), res=res) if prev_context: res = do_assertz ({}, Clause ( Predicate('prev', [cur_context, prev_context]) , location=self.dummyloc), res=res) # copy over all previous context statements to the new one s1s = self.rt.search_predicate ('context', [prev_context, '_1', '_2'], env=res) for s1 in s1s: res = do_assertz ({}, Clause ( Predicate('context', [cur_context, s1['_1'], s1['_2']]) , location=self.dummyloc), res=res) # copy over all previous mem statements to the new one s1s = self.rt.search_predicate ('mem', [prev_context, '_1', '_2'], env=res) for s1 in s1s: res = do_assertz ({}, Clause ( Predicate('mem', [cur_context, s1['_1'], s1['_2']]) , location=self.dummyloc), res=res) # import pdb; pdb.set_trace() res['C'] = cur_context return res, cur_context
def convert_answerz(c): pred = c.head lang = pred.args[1].name n = pred.args[2].name pred = c.body s = pred.args[2].s head = Predicate( name='nlp_%s_r' % MODULE_NAME, args=[Predicate(name=lang), Predicate(name=n), Variable(name='R')]) body = Predicate( name='says', args=[Predicate(name=lang), Variable(name='R'), StringLiteral(s)]) clause = Clause(head=head, body=body) print unicode(clause)
def builtin_tokenize(g, pe): """ tokenize (+Lang, +Str, -Tokens) """ pe._trace('CALLED BUILTIN tokenize', g) pred = g.terms[g.inx] args = pred.args if len(args) != 3: raise PrologRuntimeError('tokenize: 3 args expected.', g.location) arg_lang = pe.prolog_eval(args[0], g.env, g.location) if not isinstance(arg_lang, Predicate) or len(arg_lang.args) > 0: raise PrologRuntimeError( 'tokenize: first argument: constant expected, %s found instead.' % repr(args[0]), g.location) arg_str = pe.prolog_get_string(args[1], g.env, g.location) arg_tokens = pe.prolog_get_variable(args[2], g.env, g.location) tokens = list( map(lambda s: StringLiteral(s), tokenize(arg_str, lang=arg_lang.name))) g.env[arg_tokens] = ListLiteral(tokens) return True
def builtin_transcribe_number(g, pe): """ transcribe_number (+Lang, +Case, +N, -N_SCRIPT) """ pe._trace('CALLED BUILTIN transcribe_number', g) # import pdb; pdb.set_trace() pred = g.terms[g.inx] args = pred.args if len(args) != 4: raise PrologRuntimeError('transcribe_number: 4 args expected.', g.location) arg_Lang = pe.prolog_get_constant(args[0], g.env, g.location) arg_Case = pe.prolog_get_constant(args[1], g.env, g.location) arg_N = pe.prolog_get_int(args[2], g.env, g.location) arg_NSCR = pe.prolog_get_variable(args[3], g.env, g.location) if arg_Case == 'nominative': res = num2words(arg_N, ordinal=False, lang=arg_Lang) elif arg_Case == 'ordinal': res = num2words(arg_N, ordinal=True, lang=arg_Lang) elif arg_Case == 'ordgen': res = num2words(arg_N, ordinal=True, lang=arg_Lang) if arg_Lang == 'de': res += u'n' else: raise PrologRuntimeError( 'transcribe_number: case "%s" not recognized.' % arg_Case, g.location) g.env[arg_NSCR] = StringLiteral(res) return True
def convert_macro_string(ms): pos = 0 state = STATE_NORMAL curs = u'' res = [] while pos<len(ms): c = ms[pos] if state == STATE_NORMAL: if c=='@': if len(curs.strip())>0: res.append(StringLiteral(curs.strip())) curs = u'' state = STATE_MACRO elif c=='(': if len(curs.strip())>0: res.append(StringLiteral(curs.strip())) curs = u'' choices = [] state = STATE_CHOICE else: curs += c elif state == STATE_MACRO: if c==' ': state = STATE_NORMAL elif state == STATE_CHOICE: if c==')': state = STATE_NORMAL choices.append(StringLiteral(curs)) curs = u'' res.append(ListLiteral(choices)) elif c=='|': choices.append(StringLiteral(curs)) curs = u'' else: curs += c pos += 1 if state == STATE_NORMAL: if len(curs.strip())>0: res.append(StringLiteral(curs.strip())) return ListLiteral(res)
def _prolog_from_json(o): if o['pt'] == 'Constant': return Predicate(o['name']) if o['pt'] == 'StringLiteral': return StringLiteral(o['s']) if o['pt'] == 'NumberLiteral': return NumberLiteral(o['f']) if o['pt'] == 'ListLiteral': return ListLiteral(o['l']) raise PrologRuntimeError('cannot convert from json: %s .' % repr(o))
def rdf_to_pl(l): value = unicode(l) if isinstance(l, rdflib.Literal): if l.datatype: datatype = str(l.datatype) if datatype == 'http://www.w3.org/2001/XMLSchema#decimal': value = NumberLiteral(float(value)) elif datatype == 'http://www.w3.org/2001/XMLSchema#float': value = NumberLiteral(float(value)) elif datatype == 'http://www.w3.org/2001/XMLSchema#integer': value = NumberLiteral(float(value)) elif datatype == 'http://www.w3.org/2001/XMLSchema#dateTime': dt = dateutil.parser.parse(value) value = NumberLiteral(time.mktime(dt.timetuple())) elif datatype == 'http://www.w3.org/2001/XMLSchema#date': dt = dateutil.parser.parse(value) value = NumberLiteral(time.mktime(dt.timetuple())) elif datatype == DT_LIST: value = json.JSONDecoder( object_hook=_prolog_from_json).decode(value) elif datatype == DT_CONSTANT: value = Predicate(value) else: raise PrologRuntimeError( 'sparql_query: unknown datatype %s .' % datatype) else: if l.value is None: value = ListLiteral([]) else: value = StringLiteral(value) else: value = StringLiteral(value) return value
def convert_nlp_gens(pred): # print "gens ", pred.args lang = pred.args[0].name ms = pred.args[1].s resp = pred.args[2] res = convert_macro_string(ms) res = Predicate( name='nlp_gens', args=[StringLiteral(MODULE_NAME), Predicate(name=lang), res, resp]) print unicode(res) + u'.'
def builtin_r_sayv(g, pe): """" r_sayv (+Context, +Var, +Fmt) """ pe._trace('CALLED BUILTIN r_sayv', g) pred = g.terms[g.inx] args = pred.args if len(args) != 3: raise PrologRuntimeError( 'r_sayv: 3 args (+Context, +Var, +Fmt) expected.', g.location) arg_context = pe.prolog_eval(args[0], g.env, g.location) arg_var = pe.prolog_eval(args[1], g.env, g.location) arg_fmt = pe.prolog_get_constant(args[2], g.env, g.location) if not isinstance(arg_var, Literal): raise PrologRuntimeError( u'r_sayv: failed to eval "%s"' % unicode(args[1]), g.location) # import pdb; pdb.set_trace() res = {} if isinstance(arg_var, StringLiteral): v = arg_var.s else: v = unicode(arg_var) if arg_fmt == 'd': v = unicode(int(float(v))) elif arg_fmt == 'f': v = unicode(float(v)) res = do_assertz(g.env, Clause(Predicate( 'c_say', [arg_context, StringLiteral(v)]), location=g.location), res=res) return [res]
def builtin_uriref(g, pe): pe._trace('CALLED BUILTIN uriref', g) pred = g.terms[g.inx] args = pred.args if len(args) != 2: raise PrologRuntimeError('uriref: 2 args expected.') if not isinstance(args[0], Predicate): raise PrologRuntimeError( 'uriref: first argument: predicate expected, %s found instead.' % repr(args[0])) if not isinstance(args[1], Variable): raise PrologRuntimeError( 'uriref: second argument: variable expected, %s found instead.' % repr(args[1])) g.env[args[1].name] = StringLiteral( pe.kb.resolve_aliases_prefixes(args[0].name)) return True
def fetch_weather_forecast(kernal): api_key = kernal.config.get("weather", "api_key") logging.debug('fetch_weather_forecast cronj ob, api key: %s' % api_key) sl = SourceLocation(fn='__internet__', col=0, line=0) # # resolve city ids, timezones # locations = {} # owmCityId(wdeLosAngeles, 5368361). solutions = kernal.rt.search_predicate('owmCityId', ['_1', '_2']) for s in solutions: location = s['_1'].name city_id = int(s['_2'].f) # aiTimezone(wdeNewYorkCity, "America/New_York"). solutions2 = kernal.rt.search_predicate('aiTimezone', [location, '_1']) if len(solutions2) < 1: continue timezone = solutions2[0]['_1'].s solutions2 = kernal.rt.search_predicate('rdfsLabel', [location, 'en', '_1']) if len(solutions2) < 1: continue label = solutions2[0]['_1'].s # wdpdCoordinateLocation(wdeBerlin, "Point(13.383333333 52.516666666)"). solutions2 = kernal.rt.search_predicate('wdpdCoordinateLocation', [location, '_1']) if len(solutions2) < 1: continue m = coord_matcher.match(solutions2[0]['_1'].s) if not m: continue geo_lat = float(m.group(2)) geo_long = float(m.group(1)) if not location in locations: locations[location] = {} locations[location]['city_id'] = city_id locations[location]['timezone'] = timezone locations[location]['label'] = label locations[location]['long'] = geo_long locations[location]['lat'] = geo_lat def mangle_label(label): return ''.join(map(lambda c: c if c.isalnum() else '', label)) # # generate triples of weather and astronomical data # env = {} for location in locations: city_id = locations[location]['city_id'] timezone = locations[location]['timezone'] loc_label = mangle_label(locations[location]['label']) geo_lat = locations[location]['lat'] geo_long = locations[location]['long'] tz = pytz.timezone(timezone) ref_dt = datetime.now(tz).replace(hour=0, minute=0, second=0, microsecond=0) logging.debug("%s %s" % (location, ref_dt)) # # sunrise / sunset # l = astral.Location() l.name = 'name' l.region = 'region' l.latitude = geo_lat l.longitude = geo_long l.timezone = timezone l.elevation = 0 for day_offset in range(7): cur_date = (ref_dt + timedelta(days=day_offset)).date() sun = l.sun(date=cur_date, local=True) sun_const = u'aiUnlabeledSun%s%s' % (loc_label, cur_date.strftime('%Y%m%d')) env = do_retract(env, build_predicate('aiLocation', [sun_const, '_'])) env = do_retract(env, build_predicate('aiDate', [sun_const, '_'])) env = do_retract(env, build_predicate('aiDawn', [sun_const, '_'])) env = do_retract(env, build_predicate('aiSunrise', [sun_const, '_'])) env = do_retract(env, build_predicate('aiNoon', [sun_const, '_'])) env = do_retract(env, build_predicate('aiSunset', [sun_const, '_'])) env = do_retract(env, build_predicate('aiDusk', [sun_const, '_'])) env = do_assertz( env, Clause(location=sl, head=build_predicate('aiLocation', [sun_const, location]))) env = do_assertz( env, Clause(location=sl, head=build_predicate( 'aiDate', [sun_const, StringLiteral(cur_date.isoformat())]))) env = do_assertz( env, Clause(location=sl, head=build_predicate( 'aiDawn', [sun_const, StringLiteral(sun['dawn'].isoformat())]))) env = do_assertz( env, Clause( location=sl, head=build_predicate( 'aiSunrise', [sun_const, StringLiteral(sun['sunrise'].isoformat())]))) env = do_assertz( env, Clause(location=sl, head=build_predicate( 'aiNoon', [sun_const, StringLiteral(sun['noon'].isoformat())]))) env = do_assertz( env, Clause( location=sl, head=build_predicate( 'aiSunset', [sun_const, StringLiteral(sun['sunset'].isoformat())]))) env = do_assertz( env, Clause(location=sl, head=build_predicate( 'aiDusk', [sun_const, StringLiteral(sun['dusk'].isoformat())]))) logging.debug("%s %s %s -> %s" % (sun_const, cur_date, sun['sunrise'], sun['sunset'])) # # fetch json forecast data from OpenWeatherMap # url = 'http://api.openweathermap.org/data/2.5/forecast?id=%s&APPID=%s' % ( city_id, api_key) data = json.load(urllib2.urlopen(url)) if not 'list' in data: logging.error('failed to fetch weather data for %s, got: %s' % (location, repr(data))) continue # print repr(data['list']) for fc in data['list']: dt_to = datetime.strptime(fc['dt_txt'], '%Y-%m-%d %H:%M:%S') dt_to = dt_to.replace(tzinfo=pytz.utc) dt_from = dt_to - timedelta(hours=3) city_id = city_id temp_min = fc['main']['temp_min'] - KELVIN temp_max = fc['main']['temp_max'] - KELVIN code = fc['weather'][0]['id'] precipitation = float( fc['rain'] ['3h']) if 'rain' in fc and '3h' in fc['rain'] else 0.0 icon = fc['weather'][0]['icon'] description = fc['weather'][0]['description'] clouds = float(fc['clouds']['all']) fc_const = 'aiUnlabeledFc%s%s' % (loc_label, dt_from.strftime('%Y%m%d%H%M%S')) logging.debug("%s on %s-%s city_id=%s" % (fc_const, dt_from, dt_to, city_id)) # aiDescription(aiUnlabeledFcFreudental20161205180000, "clear sky"). # aiDtEnd(aiUnlabeledFcFreudental20161205180000, "2016-12-05T21:00:00+00:00"). # aiTempMin(aiUnlabeledFcFreudental20161205180000, -6.666). # aiIcon(aiUnlabeledFcFreudental20161205180000, "01n"). # aiLocation(aiUnlabeledFcFreudental20161205180000, wdeFreudental). # aiDtStart(aiUnlabeledFcFreudental20161205180000, "2016-12-05T18:00:00+00:00"). # aiClouds(aiUnlabeledFcFreudental20161205180000, 0.0). # aiPrecipitation(aiUnlabeledFcFreudental20161205180000, 0.0). # aiTempMax(aiUnlabeledFcFreudental20161205180000, -6.45). env = do_retract(env, build_predicate('aiDescription', [fc_const, '_'])) env = do_retract(env, build_predicate('aiDtEnd', [fc_const, '_'])) env = do_retract(env, build_predicate('aiTempMin', [fc_const, '_'])) env = do_retract(env, build_predicate('aiIcon', [fc_const, '_'])) env = do_retract(env, build_predicate('aiLocation', [fc_const, '_'])) env = do_retract(env, build_predicate('aiDtStart', [fc_const, '_'])) env = do_retract(env, build_predicate('aiClouds', [fc_const, '_'])) env = do_retract( env, build_predicate('aiPrecipitation', [fc_const, '_'])) env = do_retract(env, build_predicate('aiTempMax', [fc_const, '_'])) env = do_assertz( env, Clause(location=sl, head=build_predicate('aiLocation', [fc_const, location]))) env = do_assertz( env, Clause(location=sl, head=build_predicate('aiTempMin', [fc_const, temp_min]))) env = do_assertz( env, Clause(location=sl, head=build_predicate('aiTempMax', [fc_const, temp_max]))) env = do_assertz( env, Clause(location=sl, head=build_predicate('aiPrecipitation', [fc_const, precipitation]))) env = do_assertz( env, Clause(location=sl, head=build_predicate('aiClouds', [fc_const, clouds]))) env = do_assertz( env, Clause(location=sl, head=build_predicate( 'aiIcon', [fc_const, StringLiteral(icon)]))) env = do_assertz( env, Clause(location=sl, head=build_predicate( 'aiDescription', [fc_const, StringLiteral(description)]))) env = do_assertz( env, Clause(location=sl, head=build_predicate( 'aiDtStart', [fc_const, StringLiteral(dt_from.isoformat())]))) env = do_assertz( env, Clause( location=sl, head=build_predicate( 'aiDtEnd', [fc_const, StringLiteral(dt_to.isoformat())]))) kernal.rt.apply_overlay(WEATHER_DATA_MODULE, env)
def builtin_weather_data (g, pe): """ weather_data (PLACE, TSTART, TEND, CODE, PREC, TEMP_MIN, TEMP_MAX, CLOUDS) """ pe._trace ('CALLED BUILTIN weather_data', g) # import pdb; pdb.set_trace() pred = g.terms[g.inx] args = pred.args if len(args) != 8: raise PrologRuntimeError('weather_data: expected 8 args, %d args found.' % len(args), g.location) arg_Place = pe.prolog_eval (args[0], g.env, g.location) arg_TStart = pe.prolog_get_string (args[1], g.env, g.location) arg_TEnd = pe.prolog_get_string (args[2], g.env, g.location) tstart = dateutil.parser.parse(arg_TStart) tend = dateutil.parser.parse(arg_TEnd) arg_code = pe.prolog_get_variable (args[3], g.env, g.location) arg_prec = pe.prolog_get_variable (args[4], g.env, g.location) arg_temp_min = pe.prolog_get_variable (args[5], g.env, g.location) arg_temp_max = pe.prolog_get_variable (args[6], g.env, g.location) arg_clouds = pe.prolog_get_variable (args[7], g.env, g.location) wevs = pe.search_predicate('weather_events', [arg_Place, '_1', '_2', '_3', '_4', '_5', '_6', '_7']) cnt = 0 code = '' prec = 0.0 temp_min = 10000.0 temp_max = -10000.0 clouds = 0.0 for wev in wevs: logging.debug(repr(wev)) unbound_values = False for k in ['_1', '_2', '_3', '_4', '_5', '_6', '_7']: if not k in wev: unbound_values = True break if unbound_values: logging.debug ("skipping: unbound values found.") continue wev_tstart = dateutil.parser.parse(wev['_1'].s) wev_tend = dateutil.parser.parse(wev['_2'].s) if (wev_tstart > tend) or (wev_tend < tstart): # logging.info ('ignoring wev %s' % repr(wev)) # import pdb; pdb.set_trace() continue wev_code = wev['_3'].s[:2] wev_prec = wev['_4'].f wev_temp_min = wev['_5'].f wev_temp_max = wev['_6'].f wev_clouds = wev['_7'].f if wev_temp_min < temp_min: temp_min = wev_temp_min if wev_temp_max > temp_max: temp_max = wev_temp_max if wev_code > code: code = wev_code prec += wev_prec clouds += wev_clouds cnt += 1 if cnt == 0: raise PrologRuntimeError('weather_data: no data found.', g.location) prec /= float(cnt) clouds /= float(cnt) g.env[arg_code] = StringLiteral(code) g.env[arg_prec] = NumberLiteral(prec) g.env[arg_temp_min] = NumberLiteral(temp_min) g.env[arg_temp_max] = NumberLiteral(temp_max) g.env[arg_clouds] = NumberLiteral(clouds) return True