Ejemplo n.º 1
0
def __insmod__(path, r):
  mod = open(path, "r")
  try:
    exec(mod.read())
    mod.close()
  except Exception as e:
    mod.close()
    Logger.err("Module insertion failed: " + r)
    Logger.fatal(e)
Ejemplo n.º 2
0
 def get(self, data):
   id, dest, type, url = data
   fname = url.split("/")[-1:][0]
   useragent = self.scope['UserAgent']
   dest = self.scope['Directories'][dest]
   if os.path.exists(self.scope['Directories']['Cache'] + "/" + fname):
     Logger.warn("Using from cache: " + fname)
     self.decompress(fname, dest, type)
     return
   Logger.warn("Downloading: "+url)
   req = urllib.request.Request(url, data=None, headers={"User-Agent": useragent})
   response = urllib.request.urlopen(req)
   o_ = open(self.scope['Directories']['Cache'] + "/" + fname, "wb")
   o_.write(response.read())
   o_.close()
   self.decompress(fname, dest, type)
Ejemplo n.º 3
0
def read(fname):
  f = open(fname, "r")
  super = {}
  data = []
  try:
    for l in f.readlines():
      l = l.strip()
      if l[0] == "#":
        continue
      if l[0] == "@":
        l = l[1:].split(" ", 1)
        super[l[0].strip()] = l[1].strip()
      elif l.startswith("- "): 
        l = l[2:].strip().split(" ", 3)
        data.append(l)
  except Exception as e:
    Logger.err("Parse error.")
    Logger.fatal(e)
  return {"super": super, "data": data}
Ejemplo n.º 4
0
 def __call__(self, fname, dest):
   Logger.info("Moving zzzInTeenimater_G.package to AL overrides")
   if os.path.exists(self.scope['Directories']['ALOverrides']+"/zzzInTeenimater_G.package"): os.remove(self.scope['Directories']['ALOverrides']+"/zzzInTeenimater_G.package")
   os.rename(dest+"/zzzInTeenimater_G.package", self.scope['Directories']['ALOverrides']+"/zzzInTeenimater_G.package")
Ejemplo n.º 5
0
 def decompress(self, fname, dest, type):
   Logger.info("Decompressing...")
   subprocess.call(["7z", "e", self.scope['Directories']['Cache'] + "/" + fname, "-o"+dest, self.scope['FileTypes'][type], "-r"])
   try:
     self.scope['Hooks'][type]['Extracted'](fname, dest)
   except KeyError: pass
Ejemplo n.º 6
0
from Basilisk.Taszka.Background import *
from Basilisk.Taszka.Foreground import *
from Basilisk import Logger
""" This is binding to Basilisk.Taszka.Background and Basilisk.Taszka.Foreground. """
Logger.warn("Basilisk.ANSIColors imported! Import Basilisk.Taszka.Foreground or/and Basilisk.Taszka.Background instead.")
Logger.info("Because FOUR files (Logger, ANSIColors and colors from Taszka module) need to be parsed, you may experience performance loss or bigger consumption.")
Ejemplo n.º 7
0
    Logger.fatal(e)
def insmod(fname):
  __insmod__("Modules/"+fname+".py", fname)

class SimsModderConfigScope:
  Handlers = {}
  Fogs = []
  Temp = "out"
  Pause = 1
  Scope = {'Directories':{}}
  Hooks = {'Pre':[],'Post':[]}
SM = SimsModderConfigScope()

__insmod__("config.py", "config")
  
Logger.info("Sims Modder\t" + SIMSMODDER_VERSION_STR)
Logger.warn("Compatible with The Sims 2. Some listings may require additional expansion packs.")
Logger.info("Using user-agent: " + SM.Scope['UserAgent'])
time.sleep(1)

Logger.info("Creating directories...")
for k,dir in SM.Scope['Directories'].items():
  if not os.path.exists(dir): os.mkdir(dir)
Logger.success("Done.")

Logger.info("Initializing downloaders...")
for k,v in SM.Handlers.items():
  SM.Handlers[k] = v(SM)
Logger.success("Downloaders initialized.")

Logger.info("Initializing hooks...")
Ejemplo n.º 8
0
 def get(self, data):
   id, dest, type, fname = data
   Logger.info("Downloading informations... ("+str(id)+")")
   useragent = self.scope['UserAgent']
   dest = self.scope['Directories'][dest]
   if os.path.exists(self.scope['Directories']['Cache'] + "/" + fname):
     if not "ignore_cached" in sys.argv:
       Logger.warn("Using from cache: " + fname)
       self.decompress(fname, dest, type)
     return
   url = "http://modthesims.info/download.php?t=" + id + "#actualtab1"
   req = urllib.request.Request(url, data=None, headers={"User-Agent": useragent})
   response = urllib.request.urlopen(req)
   html = b''
   chunk = True
   while chunk:
     chunk = response.read(1024)
     html += chunk
   html = BeautifulSoup(html, "html.parser")
   title = html.body.find('div', attrs={'class': "well profilepic well-small well-inline"}).find("h2").text.strip()
   author = html.body.find('div', attrs={'class': "well profilepic well-small well-inline"}).find("div", attrs={'class': "pull-left"}).text.strip().replace("\n", " ")
   try:
     author = author.split(" ", 2)[1]
   except Exception:
     author = "(unknown)"
   Logger.warn("Downloading: "+"\""+title+"\" ("+fname+") by " + author +" ("+url+")")
   files = html.body.find('div', attrs={'id': 'actualtab1'}).find('tbody').find_all('tr')
   furl = None
   try:
     for file in files:
       a = file.find("a")
       if a.text.strip() == fname:
         furl = a['href']
         break
   except Exception:
     furl = None
   if not furl:
     if not "mts_ignore_errors" in sys.argv:
       Logger.fatal("No such file.")
     else: Logger.err("No such file.")
   Logger.info("Found URL: " + furl)
   req = urllib.request.Request(furl, data=None, headers={"User-Agent": useragent})
   response = urllib.request.urlopen(req)
   o_ = open(self.scope['Directories']['Cache'] + "/" + fname, "wb")
   o_.write(response.read())
   o_.close()
   self.decompress(fname, dest, type)