def __insmod__(path, r):
  mod = open(path, "r")
  try:
    exec(mod.read())
    mod.close()
  except Exception as e:
    mod.close()
    Logger.err("Module insertion failed: " + r)
    Logger.fatal(e)
Exemple #2
0
 def get(self, data):
   id, dest, type, fname = data
   Logger.info("Downloading informations... ("+str(id)+")")
   useragent = self.scope['UserAgent']
   dest = self.scope['Directories'][dest]
   if os.path.exists(self.scope['Directories']['Cache'] + "/" + fname):
     if not "ignore_cached" in sys.argv:
       Logger.warn("Using from cache: " + fname)
       self.decompress(fname, dest, type)
     return
   url = "http://modthesims.info/download.php?t=" + id + "#actualtab1"
   req = urllib.request.Request(url, data=None, headers={"User-Agent": useragent})
   response = urllib.request.urlopen(req)
   html = b''
   chunk = True
   while chunk:
     chunk = response.read(1024)
     html += chunk
   html = BeautifulSoup(html, "html.parser")
   title = html.body.find('div', attrs={'class': "well profilepic well-small well-inline"}).find("h2").text.strip()
   author = html.body.find('div', attrs={'class': "well profilepic well-small well-inline"}).find("div", attrs={'class': "pull-left"}).text.strip().replace("\n", " ")
   try:
     author = author.split(" ", 2)[1]
   except Exception:
     author = "(unknown)"
   Logger.warn("Downloading: "+"\""+title+"\" ("+fname+") by " + author +" ("+url+")")
   files = html.body.find('div', attrs={'id': 'actualtab1'}).find('tbody').find_all('tr')
   furl = None
   try:
     for file in files:
       a = file.find("a")
       if a.text.strip() == fname:
         furl = a['href']
         break
   except Exception:
     furl = None
   if not furl:
     if not "mts_ignore_errors" in sys.argv:
       Logger.fatal("No such file.")
     else: Logger.err("No such file.")
   Logger.info("Found URL: " + furl)
   req = urllib.request.Request(furl, data=None, headers={"User-Agent": useragent})
   response = urllib.request.urlopen(req)
   o_ = open(self.scope['Directories']['Cache'] + "/" + fname, "wb")
   o_.write(response.read())
   o_.close()
   self.decompress(fname, dest, type)
Exemple #3
0
def read(fname):
  f = open(fname, "r")
  super = {}
  data = []
  try:
    for l in f.readlines():
      l = l.strip()
      if l[0] == "#":
        continue
      if l[0] == "@":
        l = l[1:].split(" ", 1)
        super[l[0].strip()] = l[1].strip()
      elif l.startswith("- "): 
        l = l[2:].strip().split(" ", 3)
        data.append(l)
  except Exception as e:
    Logger.err("Parse error.")
    Logger.fatal(e)
  return {"super": super, "data": data}
  for k,v in SM.Scope['Hooks'][mk].items():
    SM.Scope['Hooks'][mk][k] = v(scope=SM.Scope)
Logger.success("Hooks initialized.")

Logger.info("Processing fog file(s)...")
Fogs = []
for fog in SM.Fogs:
  Fogs.append(Fog.Parser.read(fog))
Logger.success("Processed "+str(len(Fogs))+" fog file(s).")
if "debug" in sys.argv: print(Fogs)

Logger.info("Validation...")
index = 0
for fog in Fogs:
  if not fog["super"]["upstream"] in SM.Handlers:
    Logger.fatal("Unknown downloader: " + fog["super"]["upstream"])
  index = index+1
Logger.success("All fog files use valid downloaders.")

Logger.info("Running pre-hooks...")
for hook in SM.Hooks['Pre']:
  hook(SM.Scope)
  
Logger.info("Running downloaders...")
for entry in Fogs:
  handler = SM.Handlers[entry['super']['upstream']]
  for d in entry['data']:
    handler.get(d)
    time.sleep(SM.Pause)

Logger.info("Running post-hooks...")