def save_to_db(identifier, provider, lang, meta):
    """
    save item's meta info tp database
    Args:
        identifier: identifier of the item
        provider: metadata provider to sabe info for
        lang: language to save info for
        meta: metadata
    """
    import time
    import koding

    koding.Remove_From_Table("meta", {
        "identifier": identifier,
        "provider": provider,
        "lang": lang
    })
    koding.Add_To_Table(
        "meta", {
            "identifier": identifier,
            "provider": provider,
            "lang": lang,
            "meta": pickle.dumps(meta).replace("\"", "'"),
            "created": time.time()
        })
示例#2
0
def get_xml(link):
    import time
    xml_cache_spec = {
        "columns": {
            "xml": "TEXT",
            "link": "TEXT",
            "created": "TEXT",
            "changed": "TEXT"
        },
        "constraints": {
            "unique": "link"
        }
    }
    koding.Create_Table("xml_cache", xml_cache_spec)

    url = replace_url(link)
    req = requests.get(url, verify=False)
    changed = req.headers["Last-Modified"]
    result = koding.Get_From_Table("xml_cache", {"link": link})
    if result:
        if result[0]["changed"] == changed:
            return result[0]["xml"]
        else:
            koding.Remove_From_Table("xml_cache", {"link": link})
    xml = req.content
    koding.Add_To_Table("xml_cache", {
        "xml": xml,
        "link": link,
        "created": time.time(),
        "changed": changed
    })
    return xml
示例#3
0
 def onPlayBackStopped(self):
     koding.dolog("playback stopped")
     if self.identifier == "0":
         return
     if not self.currentTime > 1:
         return
     koding.Create_Table("watched", self.tablespec)
     try:
         koding.Remove_From_Table(
             "watched", {
                 "identifier": self.identifier,
                 "season": self.season,
                 "episode": self.episode
             })
     except:
         pass
     koding.Add_To_Table(
         "watched", {
             "identifier": self.identifier,
             "season": self.season,
             "episode": self.episode,
             "watched": "0",
             "currentTime": self.currentTime
         })
     return True
示例#4
0
def save_to_db(item, url):
    if not item or not url:
        return False
    item = remove_non_ascii(item)
    koding.reset_db()
    koding.Remove_From_Table("trakt_plugin", {"url": url})

    koding.Add_To_Table("trakt_plugin", {
        "url": url,
        "item": pickle.dumps(item).replace("\"", "'"),
        "created": time.time()
    })
示例#5
0
def save_view_mode(content):
    viewid = get_view_id()
    skin = xbmc.getSkinDir()
    koding.Create_Table("addonviews", view_spec)
    koding.Remove_From_Table("addonviews", {"skin": skin, "content": content})
    koding.Add_To_Table("addonviews", {
        "skin": skin,
        "content": content,
        "viewid": viewid,
    })
    icon = xbmcaddon.Addon().getAddonInfo('icon')
    xbmcgui.Dialog().notification(xbmcaddon.Addon().getAddonInfo('name'),
                                  "View set for %s" % content, icon)
示例#6
0
def remove_search(term):
    koding.Remove_From_Table("search", {"term": term})
    xbmc.executebuiltin("Container.Refresh")
示例#7
0
 def get_cached(self, url, cached=True):
     if not url.startswith("http"):
         return
     if __builtin__.BOB_BASE_DOMAIN not in url and "norestrictions" not in url:
         return requests.get(url).content
     xml_cache_spec = {
         "columns": {
             "url": "TEXT",
             "xml": "TEXT",
             "cache_time": "TEXT",
             "created": "TEXT"
         },
         "constraints": {
             "unique": "url"
         }
     }
     koding.Create_Table("xml_cache", xml_cache_spec)
     if not cached:
         koding.dolog("uncached requested")
         response = requests.get(url, verify=False)
         xml = response.content
         response.close()
     else:
         match = koding.Get_From_Table("xml_cache", {"url": url})
         if match:
             koding.dolog("match: " + repr(match))
             match = match[0]
             created_time = float(match["created"])
             cache_time = int(match["cache_time"])
             koding.dolog("expire time: " + repr(created_time + cache_time))
             koding.dolog("created_time: " + repr(created_time))
             koding.dolog("now: " + repr(time.mktime(time.gmtime())))
             if time.mktime(time.gmtime()) <= created_time + cache_time:
                 koding.dolog("loading from cache, cache time not reached")
                 return pickle.loads(match["xml"])
             else:
                 try:
                     response = requests.get(url, verify=False, timeout=10)
                     changed = response.headers["Last-Modified"]
                     changed_struct = time.strptime(
                         changed, "%a, %d %b %Y %H:%M:%S GMT")
                     epoch_changed = int(time.mktime(changed_struct))
                     if epoch_changed < created_time:
                         koding.dolog(
                             "loading from cache, list not changed")
                         #xml = pickle.loads(match["xml"])
                         xml = response.content
                         response.close()
                     else:
                         koding.dolog("refreshing content")
                         xml = response.content
                         response.close()
                 except Exception as e:
                     koding.dolog("cache error: " + repr(e))
                     return pickle.loads(match["xml"])
         else:
             koding.dolog("initial load")
             response = requests.get(url, verify=False)
             xml = response.content
             response.close()
     if not xml:
         xbmcgui.Dialog().notification(ADDON.getAddonInfo("name"),
                                       "Server under high load, try again")
         return ""
     info = JenItem(xml.split('<item>')[0].split('<dir>')[0])
     cache_time = int(info.get("cache", 21600))
     koding.dolog("cache_time: " + repr(cache_time))
     created_time = time.mktime(time.gmtime())
     try:
         koding.Remove_From_Table("xml_cache", {
             "url": url,
         })
     except Exception, e:
         koding.dolog("Database error: " + repr(e))