Пример #1
0
def mono_amplitude_to_audio(channel_amplitude,
                            fft_window_size,
                            phase_iterations,
                            phase=None):
    audio = None
    if phase_iterations < 1:
        console.warn("phase iterations must >= 1")
    # phase reconstruction with successive approximation
    # credit to https://dsp.stackexchange.com/questions/3406/reconstruction-of-audio-signal-from-its-absolute-spectrogram/3410#3410
    # undo log1p
    amplitude = np.exp(channel_amplitude) - 1
    if phase is None:
        # if phase isn't given, make random phase
        phase_shape = channel_amplitude.shape
        phase = np.random.random_sample(phase_shape) + 1j * (
            2 * np.pi * np.random.random_sample(phase_shape) - np.pi)
    for i in range(phase_iterations):
        # combine target amplitude and current phase to get the next audio iteration
        complex_spectrogram = amplitude * np.exp(1j * phase)
        audio = librosa.istft(complex_spectrogram)

        # at each step, create a new spectrogram using the current audio
        reconstruction = librosa.stft(audio, fft_window_size)
        # amplitude = np.abs(reconstruction)
        phase = np.angle(reconstruction)
    return audio
Пример #2
0
def execute(envcopy, command, path = "", preexec = "", silent = False):
  if path:
    envcopy.append('cd "%s"' % path)
  if preexec:
    envcopy.append(preexec)
# envcopy.append('entryenv=`env`')
# envcopy.append('if %s; then exitenv=`env`; if [[ "$entryenv" == "$exitenv" ]]; then exit 0; fi; echo "Env change!" >&2; echo $entryenv >&2; echo $exitenv >&2; fi; exit 1' % command)
  envcopy.append('if %s; then exit 0; fi; exit 1' % command)

  std = puke.Std()

  console.info('Running command:')
  for i in envcopy:
    console.info(i)
  puke.sh(envcopy, std=std, output = False)
  if std.code == 1:
    if silent:
      raise()
    console.debug("Monade shell stdout:", std.out)
    console.fail("Monade shell stderr: %s" % std.err)

  console.debug("Monade shell stdout:", std.out)
  if std.err:
    console.warn("Monade shell stderr:", std.err)
  return std
Пример #3
0
 def load_settings(self):
     try:
         self.settings = io_utils.ini_to_dict("config/settings.ini")
     except IOError:
         self.settings = config.defaults['settings']
         console.warn("Failed to load game settings. Defaults loaded from " 
             "config.py.")
Пример #4
0
 def train(self, data, epochs, batch=8):
     xTrain, yTrain = data.train()
     xValid, yValid = data.valid()
     while epochs > 0:
         console.log("Training for", epochs, "epochs on", len(xTrain),
                     "examples")
         self.model.fit(xTrain,
                        yTrain,
                        batch_size=batch,
                        epochs=epochs,
                        validation_data=(xValid, yValid))
         console.notify(
             str(epochs) + " Epochs Complete!", "Training on", data.inPath,
             "with size", batch)
         while True:
             try:
                 epochs = int(
                     input("How many more epochs should we train for? "))
                 break
             except ValueError:
                 console.warn(
                     "Oops, number parse failed. Try again, I guess?")
         if epochs > 0:
             save = input("Should we save intermediate weights [y/n]? ")
             if not save.lower().startswith("n"):
                 weightPath = ''.join(
                     random.choice(string.digits)
                     for _ in range(16)) + ".h5"
                 console.log("Saving intermediate weights to", weightPath)
                 self.saveWeights(weightPath)
Пример #5
0
def amplitude_to_audio(channel_amplitudes,
                       fft_window_size,
                       phase_iterations=10,
                       phase=None):
    audio = None
    num_channels = 1 if channel_amplitudes.ndim == 2 else channel_amplitudes.shape[
        -1]
    if num_channels == 1:
        audio = mono_amplitude_to_audio(channel_amplitudes,
                                        fft_window_size,
                                        phase_iterations,
                                        phase=phase)
    elif num_channels == 2:
        audio = []
        for channel in range(2):
            channel_amplitude = channel_amplitudes[:, :, channel]
            channel_phase = None if phase is None else phase[:, :, channel]
            audio.append(
                mono_amplitude_to_audio(channel_amplitude,
                                        fft_window_size,
                                        phase_iterations,
                                        phase=channel_phase))
        audio = np.array(audio)
    else:
        console.warn("cannot parse spectrogram with num_channels:",
                     num_channels)
    return np.clip(audio, -1, 1)
Пример #6
0
def loadSpectrogram(filePath):
    fileName = basename(filePath)
    if filePath.index("sampleRate") < 0:
        console.warn("Sample rate should be specified in file name", filePath)
        sampleRate == 22050
    else:
        sampleRate = int(fileName[fileName.index("sampleRate=") + 11:fileName.index(").png")])
    console.info("Using sample rate : " + str(sampleRate))
    image = io.imread(filePath, as_grey=True)
    return image / np.max(image), sampleRate
def get_lrc(name, song_id):
    lrc_url = 'http://music.163.com/api/song/lyric?' + \
        'id=' + str(song_id) + '&lv=1&kv=1&tv=-1'
    lyric = requests.get(lrc_url, headers=headers)
    j = json.loads(lyric.text)
    if 'lrc' not in j:
        console.warn(name + "没有歌词!")
        console.info(str(j))
        return
    with open(save_path + ".lyric/" + name + '.lrc', 'w') as f:
        f.write(j['lrc']['lyric'])
Пример #8
0
  def __init__(self, name):
    yawn = puke.FileSystem.join('technicolor', '%s.yaml' % name)
    if not puke.FileSystem.exists(yawn):
      console.fail('The requested yawn (%s, at path %s) doesn\'t exist!' % (name, yawn))
    data = yaml.load(puke.FileSystem.readfile(yawn))

    self.name = name
    # Extend this so that multiple targets can be supported
    self.done = {}
    if 'LICENSE' in data:
      self.license = data['LICENSE']
    else:
      console.error('We are missing license information!')
      self.license = 'UNKNOWN'

    if 'VERSION' in data:
      self.version = data['VERSION']
    else:
      console.warn('We are missing version information')
      self.version = 'No version specified!'

    if 'URL' in data:
      self.url = data['URL'].replace('{VERSION}', self.version)
      self.__checksum = data['CHECKSUM']
      if 'LOCAL' in data:
        self.local = data['LOCAL'].replace('{VERSION}', self.version)
      else:
        self.local = self.url.split('/').pop().split('.').shift()
      self.production = data['PRODUCTION']
    else:
      console.info('This is a dummy package')
      self.url = ''
      self.__checksum = ''
      self.local = ''
      self.production = ''

    self.hack = False
    if 'HACK' in data:
      self.hack = data['HACK']

    if 'DEPENDS' in data:
      self.depends = data['DEPENDS']
      if isinstance(self.depends, str) and self.depends:
        self.depends = [self.depends]
    else:
      self.depends = []

    if 'TOOLCHAIN' in data:
      self.toolchain = data['TOOLCHAIN']
      if isinstance(self.toolchain, str) and self.toolchain:
        self.toolchain = [self.toolchain]
    else:
      self.toolchain = []
Пример #9
0
 def get_config(self):
     if self._config_exist():
         config = configparser2.ConfigParser(delimiters=('='))
         config.read(self.get_file(".config.cfg"))
         dict_config = dict(config._sections)
         for key in dict_config:
             dict_config[key] = dict(config._defaults, **dict_config[key])
             dict_config[key].pop('__name__', None)
         return dict_config
     else:
         console.warn("Config file not found...")
         sys.exit(2)
Пример #10
0
 def get_config(self):
     if self._config_exist():
         config = configparser2.ConfigParser(delimiters=('='))
         config.read(self.get_file(".config.cfg"))
         dict_config = dict(config._sections)
         for key in dict_config:
             dict_config[key] = dict(config._defaults, **dict_config[key])
             dict_config[key].pop('__name__', None)
         return dict_config
     else:
         console.warn("Config file not found...")
         sys.exit(2)
Пример #11
0
 def train(self, train, epochs, validation_data=None, test_file=None):
     losses = []
     cumulative_epochs = 0
     while epochs > 0:
         for epoch in range(epochs):
             console.info("epoch", cumulative_epochs)
             loss = self.train_epoch(train, validation_data, test_file)
             losses.append(loss)
             cumulative_epochs += 1
         while True:
             try:
                 epochs = int(console.prompt("How many more epochs should we train for? "))
                 break
             except ValueError:
                 console.warn("Oops, number parse failed. Try again, I guess?")
Пример #12
0
 def create_pause_ui(self):
     """Create the pause menu UI."""
     pause_ui_path = 'resources/ui/pause_ui.ini'
     w, h = self.game.settings['width'], self.game.settings['height']
     
     self.pause_ui = load_ui(pause_ui_path, (w, h))
     menu_scale = 0.8
     pad_scale = (1 - menu_scale) / 2.0
     menu_x, menu_y = w*pad_scale, h*pad_scale
     menu_width, menu_height = w*menu_scale, h*menu_scale
     pause_bg_bounds = menu_x, menu_y, menu_width, menu_height
     
     try:
         self.pause_ui['bg'].bounds = pause_bg_bounds
     except AttributeError, e:
         console.warn("Couldn't find `bg` in `" + pause_ui_path + "`.")
Пример #13
0
    def run(self):
        f, axarr = plt.subplots(2)
        self.plt_loss, self.plt_val_loss = axarr

        self.data = self.read_config(self.config_path)
        self.repeat = self.data.pop("repeat", None)
        combinations = 1
        for values in self.data.values():
            combinations *= len(values)
        console.warn("Running on ", combinations, " combinations.")

        with open(self.outfile, "w") as self.csvfile:
            self.resultwriter = csv.writer(self.csvfile,
                                           delimiter='|',
                                           quotechar='"',
                                           quoting=csv.QUOTE_MINIMAL)

            self.metric_names = ["loss"] + self.config.metrics.split(",")
            headers = sorted(list(self.data.keys()))
            headers += ["min_loss"] + self.metric_names
            headers = ["id"] + headers
            self.resultwriter.writerow(headers)
            lines = ["-" * len(head) for head in headers]
            self.resultwriter.writerow(lines)
            self.csvfile.flush()

            self.train_data = Data()
            for current_config in self.create_config(list(self.data.keys())):
                self.train(current_config)

        self.plt_loss.set_title("loss")
        self.plt_val_loss.set_title("val_loss")

        self.plt_loss.legend(self.ids, loc='upper right')
        self.plt_val_loss.legend(self.ids, loc='upper right')
        plt.savefig(os.path.join(self.config.log_base, 'benchmark-loss.png'))
Пример #14
0
 def load_keybindings(self):
     try:
         kb = io_utils.ini_to_dict("config/keybinds.ini")
     except IOError:
         kb = config.defaults['keybindings']
         console.warn("Failed to load keybindings. Defaults loaded from "
             "config.py.")
     else: 
         # No error, but we still must convert dict values from str -> list
         for action in kb:
             key_list = []
             key_list_as_str = kb[action].split(",")
             
             for key in key_list_as_str:
                 # Fix issues with caps (a user manually edits keybinds.ini?)
                 key = key.upper() if len(key) > 1 else key.lower()
                 key = "K_" + key
                 
                 if key in globals():
                     key_list.append(globals()[key])
             
             kb[action] = key_list
                 
     self.keybindings = kb
Пример #15
0
import console
console.log("Does not run on IDLE.")
console.warn("Does not run on IDLE.")
console.error("Does not run on IDLE.")
console.info("Does not run on IDLE.")
num_ok = 0
num_total = 0
db = pickle.load(open(Config.db_path, 'rb'))
for pid, j in db.items():
    pdfs = [x['href'] for x in j['links'] if x['type'] == 'application/pdf']
    assert len(pdfs) == 1
    pdf_url = pdfs[0] + '.pdf'
    pdf_url = pdf_url.replace("http:","https:") # ??
    basename = pdf_url.split('/')[-1]
    fname = os.path.join(Config.pdf_dir, basename)

    # try retrieve the pdf
    num_total += 1
    try:
        if not basename in have:
            console.log('fetching %s into %s' % (pdf_url, fname))
            req = urlopen(pdf_url, None, timeout_secs)
            with open(fname, 'wb') as fp:
                shutil.copyfileobj(req, fp)
            time.sleep(0.05 + random.uniform(0, 0.1))
        else:
            console.info('%s exists, skipping' % (fname, ))
            num_ok += 1
    except Exception as e:
        console.warn('error downloading: ', pdf_url)
        console.log(e)

    console.info('%d/%d of %d downloaded ok.' % (num_ok, num_total, len(db)))

console.h1('Final number of papers downloaded okay: %d/%d' % (num_ok, len(db)))
    def train(self, data, epochs, batch=8, start_epoch=0):
        x_train, y_train = data.train()
        x_valid, y_valid = data.valid()
        self.x_valid, self.y_valid = x_valid, y_valid
        checkpointer = Checkpointer(self)
        checkpoints = checkpointer.get()
        if self.config.batch_generator != "keras":
            batch_generator = Batch().get()
        if self.config.epoch_steps:
            epoch_steps = self.config.epoch_steps
        else:
            epoch_steps = remove_track_boundaries(x_train).shape[0]
        epoch_steps = epoch_steps // batch
        while epochs > 0:
            end_epoch = start_epoch + epochs
            console.log("Training for", epochs, "epochs on",
                        epoch_steps * batch, "examples")
            console.log("Validate on", len(x_valid), "examples")
            if self.config.batch_generator == "keras":
                x_train = remove_track_boundaries(x_train)
                y_train = remove_track_boundaries(y_train)
                history = self.model.fit(x_train,
                                         y_train,
                                         batch_size=batch,
                                         initial_epoch=start_epoch,
                                         epochs=end_epoch,
                                         validation_data=(x_valid, y_valid),
                                         callbacks=checkpoints)
            else:
                history = self.model.fit_generator(
                    batch_generator(x_train, y_train, batch_size=batch),
                    initial_epoch=start_epoch,
                    epochs=end_epoch,
                    steps_per_epoch=epoch_steps,
                    validation_data=(x_valid, y_valid),
                    callbacks=checkpoints)
            console.notify(
                str(epochs) + " Epochs Complete!", "Training on", data.in_path,
                "with size", batch)

            start_epoch += epochs
            if self.config.quit:
                break
            else:
                while True:
                    try:
                        epochs = int(
                            input("How many more epochs should we train for?"))
                        break
                    except ValueError:
                        console.warn(
                            "Oops, number parse failed. Try again, I guess?")
                if epochs > 0:
                    save = input("Should we save intermediate weights [y/n]? ")
                    if not save.lower().startswith("n"):
                        weight_path = ''.join(
                            random.choice(string.digits)
                            for _ in range(16)) + ".h5"
                        os.path.join(os.path.dirname(config.weights),
                                     weight_path)
                        console.log("Saving intermediate weights to",
                                    weight_path)
                        self.save_weights(weight_path)
        return history
Пример #18
0
for pid, j in db.items():
    n += 1
    idvv = "%sv%d" % (j["_rawid"], j["_version"])
    txt_path = os.path.join("data", "txt", idvv) + ".pdf.txt"
    if os.path.isfile(txt_path):  # some pdfs dont translate to txt
        with open(txt_path, "r") as f:
            txt = f.read()
        if len(txt) > 1000 and len(txt) < 200000:  # filter out the theses
            txt_paths.append(
                txt_path
            )  # todo later: maybe filter or something some of them
            pids.append(idvv)
            # print("read %d/%d (%s) with %d chars" % (n, len(db), idvv, len(txt)))
        else:
            console.warn(
                "skipped %d/%d (%s) with %d chars" % (n, len(db), idvv, len(txt))
            )
    # not important; some ids on arxiv don't have pdfs
    # else:
    # print("could not find %s in txt folder." % (txt_path, ))
print(
    "in total read in %d text files out of %d db entries." % (len(txt_paths), len(db))
)

# compute tfidf vectors with scikits
v = TfidfVectorizer(
    input="content",
    encoding="utf-8",
    decode_error="replace",
    strip_accents="unicode",
    lowercase=True,
Пример #19
0
def key_of_file(fileName):
    first_token = int(fileName.split()[0])
    if 0 < first_token <= NUMBER_OF_KEYS:
        return first_token
    console.warn("File", fileName, "doesn't specify its key, ignoring..")
    return None
Пример #20
0
def install(rootsrc, roottmp, deploy, target, build, name, toolchain = False, clean = False, cleanarchive = False, debug = False, static = False):
  globals()['technicolor'] = yawn.get(name)
  # If already done, for this target, during *this* session, silently pass - avoid verbose dependency checking and recursive references
  # (yeah, I'm looking at you goddamn pkgconfig)
  if (not toolchain) and (target in technicolor.done):
    return

  console.header("Target: %s - Build: %s - Yawn: %s (version %s - license %s)" % (target, build, technicolor.name, technicolor.version, technicolor.license), 1)

  # Start with clean envs
  env.set(target, build)
  # Setup the prefixes
  # XXX this totally sucks ass. Toolchain of a package may in turn depend on a specific tool, which makes it compiled into the Toolchain/Darwin directory
  # anyway...

  debug = not (not debug or (debug == "false"))
  static = not (not static or (static == "false"))

  env.native.prefix = fs.join(deploy, 'Toolchain', toolchain if toolchain else target)
  env.current.prefix = fs.join(deploy, target, 'debug' if debug else 'release', 'static' if static else 'dynamic')
  # And tmp dirs
  env.native.tmp = fs.join(roottmp, 'Toolchain', toolchain if toolchain else target)
  env.current.tmp = fs.join(roottmp, target, 'debug' if debug else 'release', 'static' if static else 'dynamic')
  # And src dirs
  env.native.src = fs.join(rootsrc, 'Toolchain', toolchain if toolchain else target)
  env.current.src = fs.join(rootsrc, target)

  console.header('Searching for it')
  if technicolor.production and __find(technicolor.production, env.native if toolchain else env.current, target != build):
    console.info("The requested monade seems to be already compiled and installed. Will do nothing.")
    technicolor.done[target] = True
    return
  else:
    console.info("Will build")

  console.header("Building tools")

  # Walk tool dependencies first and build them if need be
  for depname in technicolor.toolchain:
    # If we already are compiling a tool, forward the original toolchain value. If not, that will be the target
    forwardtoolchaintarget = toolchain if toolchain else target
    install(rootsrc, roottmp, deploy, build, build, depname, forwardtoolchaintarget, clean, cleanarchive)

  console.info('Done with tools')

  console.header('Building dependencies')

  # Get back to the original yawn now - XXX EXTREMELY bad design
  globals()['technicolor'] = yawn.get(name)


  # Walk dependencies and build them if need be
  for depname in technicolor.depends:
    # If we already are compiling a tool, forward the original toolchain value. If not, that will be the target
    forwardtoolchaintarget = toolchain if toolchain else False
    install(rootsrc, roottmp, deploy, target, build, depname, forwardtoolchaintarget, clean, cleanarchive, debug, static)

  console.info('Done with dependencies')

  # Get back to the original yawn now - XXX EXTREMELY bad design
  globals()['technicolor'] = yawn.get(name)
  env.set(target, build)
  env.current.debug = True if debug else False
  env.current.static = True if static else False
  env.native.prefix = fs.join(deploy, 'Toolchain', toolchain if toolchain else target)
  env.current.prefix = fs.join(deploy, target, 'debug' if debug else 'release', 'static' if static else 'dynamic')
  env.native.tmp = fs.join(roottmp, 'Toolchain', toolchain if toolchain else target)
  env.current.tmp = fs.join(roottmp, target, 'debug' if debug else 'release', 'static' if static else 'dynamic')
  env.native.src = fs.join(rootsrc, 'Toolchain', toolchain if toolchain else target)
  env.current.src = fs.join(rootsrc, target)

  console.header("Back onto Target: %s - Build: %s - Yawn: %s (version %s - license %s)" % (target, build, technicolor.name, technicolor.version, technicolor.license), 1)

  if not technicolor.url:
    console.warn('This is a dummy package with no url. Just passing by.')
    return

  # Load the python script file
  # sys.path.insert(0, 'monades')
  # currentjob = __import__('%s' % name)
  exec('from technicolor import %s as currentjob' % technicolor.name)


  # Ensure the package is here
  console.header('Getting remote stuff here')
  globals()['environ'] = env.native if toolchain else env.current

  # Set current directory now to source directory
  environ.pwd = fs.join(environ.src, technicolor.local)

  if technicolor.fetch(environ.src, cleanarchive):
    console.header("Got remote package, applying patches now.")
    technicolor.patch(environ.pwd, target)
    technicolor.copy(environ.pwd)

    if 'preprocess' in dir(currentjob):
      console.header('Preprocessing')
      currentjob.preprocess()
    else:
      console.warn('NO preprocessing specified for this monade. Going on.')
  else:
    console.warn("Output directory already exist. NOT getting package again and NOT running pre hook. If this is not acceptable, rm %s" % environ.pwd)

  # Ensure target dir are there to avoid complaints
  fs.makedir(fs.join(environ.prefix, 'lib'))
  fs.makedir(fs.join(environ.prefix, 'bin'))
  fs.makedir(fs.join(environ.prefix, 'include'))


  # Switch current directory now to temp directory
  environ.pwd = fs.join(environ.tmp, technicolor.local)
  if not fs.exists(environ.pwd):
    fs.makedir(environ.pwd)
  if 'configure' in dir(currentjob):
    # Exec
    console.header('Configure')
    currentjob.configure()
  else:
    console.warn('NO configure instructions specified for this monade! Going on.')

  if clean:
    if 'clean' in dir(currentjob):
      # Exec
      console.header('Clean')
      currentjob.clean()
    else:
      console.warn('NO clean instructions specified for this monade! Going on.')

  if 'make' in dir(currentjob):
    # Exec
    console.header('Make')
    currentjob.make()
  else:
    console.warn('NO make instructions specified for this monade! Going on.')

  # XXX switch this to deploy directory?
  console.header('Post-processing!', 1)
  if 'postprocess' in dir(currentjob):
    currentjob.postprocess()
  else:
    console.warn('NO post-process instructions specified for this monade! Going on.')

  technicolor.done[target] = True
Пример #21
0
def __find(production, envir, ignoresystem = False):
  # Initialize the environment for the given target
  # env.sethost(puke.Yak.XCC)
  # # Specify our deploy path

  # Currently, packages that are both compiled in a toolchain AND as native dependency override each other when it comes to pc files.
  # The only solution would be to maintain two different PKG_CONFIG...
  # Try pkg-config first
  # XXX This is massively broken when cross-compiling
  # if fs.exists(fs.join(env.native.prefix, 'bin', 'pkg-config')) and production != 'pkg-config':
  #   command = 'pkg-config --modversion %s' % production
  #   try:
  #     console.info(' * Trying to find through pkg-config')
  #     # First try our pkg-config
  #     ret = shell.execute(envir.flush(), command, silent = True)
  #     ret = shell.execute(envir.flush(), 'echo $PKG_CONFIG_PATH/%s.pc' % production)
  #     console.info('  + Found package at %s' % ret.out.strip())
  #     return ret.out.strip()
  #   except Exception as e:
  #     console.info('  - Failed!')
  # else:
  #   console.info(' * You don\'t have pkg-config (yet?)!')

  # Otherwise, try the production name, litterally
  verif = fs.join(envir.prefix, production)
  console.info(' * Trying to find the given filename %s' % verif)
  if fs.exists(verif):
    if verif == production:
      # Should NEVER happen - this is bad every inch
      console.warn('  + Found but... BEWARE! This is a system path and might not be portable (that being said, *you* specified it!): %s' % verif)
    else:
      console.info('  + Found package in: %s' % verif)
    return verif
  else:
    console.info('  - Failed!')
    console.info(' * Trying to find the package as a binary somewhere')
    # Finally, that may be a binary
    # XXX this first because on cross compiling, the path is not set to the local bin
    test = fs.join(envir.prefix, 'bin', production)
    if fs.exists(test):
      console.info('  + Found a binary here: %s' % test)
      return test
    # Dirty windows trick
    test = fs.join(envir.prefix, 'bin', '%s.exe' % production)
    if fs.exists(test):
      console.info('  + Found a binary here: %s' % test)
      return test
    command = 'which %s' % production
    try:
      ret = shell.execute(envir.flush(), command, silent = True)
      if ret.out.startswith(envir.prefix):
        console.info('  + Found a binary here: %s' % ret.out)
      else:
        if ignoresystem:
          return False
        console.error('  + Found a binary on the system (might cause compatibility problems!): %s' % ret.out)
        q = prompt('Do you want to build your own, or use the system package (type "system")?')
        if q != 'system':
          return False
      return ret.out.strip()
    except:
      pass

  console.info('  - Failed!')
  console.warn('Package production not found')
  return False