def test_update_config_file_to_version5(self):
        config_file = "yarss2_v4.conf"
        filename = yarss2.util.common.get_resource(config_file, path="tests/data/")
        tmp_dir = common.get_tmp_dir()
        shutil.copy(filename, tmp_dir)
        self.config = common.get_test_config(config_filename=config_file, config_dir=tmp_dir, verify_config=False)

        # Call the function that makes the changes
        self.config.config.run_converter((4, 4), 5, self.config.update_config_to_version5)

        # Test changes for "add_torrents_in_paused_state_to_GeneralSubsConf"
        self.assertEquals(self.config.config["subscriptions"]["0"]["add_torrents_in_paused_state"], GeneralSubsConf.DISABLED)
        self.assertEquals(self.config.config["subscriptions"]["1"]["add_torrents_in_paused_state"], GeneralSubsConf.ENABLED)

        for key in self.config.config["subscriptions"].keys():
            # last_update replaced with last_match
            self.assertFalse("last_update" in self.config.config["subscriptions"][key])
            self.assertTrue("last_match" in self.config.config["subscriptions"][key])

            # Add in paused state should be unicode
            self.assertEquals(type(self.config.config["subscriptions"][key]["add_torrents_in_paused_state"]), unicode)

            self.assertTrue("max_upload_slots" in self.config.config["subscriptions"][key])
            self.assertTrue("max_connections" in self.config.config["subscriptions"][key])
            self.assertTrue("max_upload_speed" in self.config.config["subscriptions"][key])
            self.assertTrue("prioritize_first_last_pieces" in self.config.config["subscriptions"][key])
            self.assertTrue("auto_managed" in self.config.config["subscriptions"][key])
            self.assertTrue("sequential_download" in self.config.config["subscriptions"][key])

        # Test changes for "change_value_from_list_to_dict"
        for cookie in self.config.config["cookies"].values():
            self.assertEquals(type(cookie["value"]), dict)
    def test_update_config_file_from_1_2_beta(self):
        tmp_dir = common.get_tmp_dir()
        config_file = "yarss2_v1.2.beta.conf"
        filename = yarss2.util.common.get_resource(config_file, path="tests/data/")
        shutil.copy(filename, tmp_dir)
        self.config = common.get_test_config(config_filename=config_file, config_dir=tmp_dir, verify_config=False)

        self.assertEquals(self.config.config._Config__version["format"], 1)
        self.assertEquals(self.config.config._Config__version["file"], 2)

        # Verify that the old values are what we expect
        for key in self.config.config["cookies"].keys():
            self.assertEquals(type(self.config.config["cookies"][key]["value"]), dict)

        # Update the config
        self.config._verify_config()
        config_dict = self.config.config.config

        config_file = "yarss2_v5.conf"
        filename = yarss2.util.common.get_resource(config_file, path="tests/data/")
        shutil.copy(filename, tmp_dir)
        self.config = common.get_test_config(config_filename=config_file, config_dir=tmp_dir, verify_config=False)
        config_dict_v5 = self.config.config.config

        # Verify that the 1.2 beta config equals the config updated from earlier versions
        self.assertTrue(yarss2.util.common.dicts_equals(config_dict_v5, config_dict))
Exemple #3
0
 def initialize(self):
     self.info("[US1608][BI] : Allow RSA and DSA SSH keys - CLI")
     self.user_email = self.config.OPENSHIFT_user_email
     self.user_passwd = self.config.OPENSHIFT_user_passwd
     self.app_type = common.app_types["php"]
     self.app_name = "php"+common.getRandomString(7)
     self.backup_dir = os.path.join(common.get_tmp_dir(),common.getRandomString(10))
     self.ssh_keyname="id_rsa"
     self.ssh_key = os.path.join(os.path.expanduser("~"),".ssh", self.ssh_keyname)
     common.env_setup()
Exemple #4
0
 def gen_dsa_key(self):
     key_path=os.path.join(common.get_tmp_dir(), self.ssh_keyname)
     try:
         os.remove(key_path)
     except:
         pass
     cmd = "ssh-keygen -t dsa -N '' -f %s "%(key_path)
     r = common.cmd_get_status(cmd, quiet=True)
     if r == 0:
         os.rename(key_path, self.ssh_key)
         os.rename(key_path+".pub", self.ssh_key+".pub")
         return 0
     else:
         self.error("Unable to generate new DSA key")
         return 1
    def test_update_config_file_to_version2(self):
        config_file = "yarss2_v1.conf"
        filename = yarss2.util.common.get_resource(config_file, path="tests/data/")
        tmp_dir = common.get_tmp_dir()
        shutil.copy(filename, tmp_dir)
        self.config = common.get_test_config(config_filename=config_file, config_dir=tmp_dir, verify_config=False)

        # Call the function that makes the changes
        self.config.config.run_converter((0, 1), 2, self.config.update_config_to_version2)

        # 1 - search field from subscription was removed
        # 2 - Added field 'custom_text_lines'
        subscriptions = self.config.config["subscriptions"]
        for key in subscriptions:
            self.assertFalse(subscriptions[key].has_key("search"), "Field 'search still exists'")
            self.assertTrue(subscriptions[key].has_key("custom_text_lines"), "Field 'custom_text_lines' does not exist!")
    def test_update_config_file_to_version4(self):
        config_file = "yarss2_v3.conf"
        filename = yarss2.util.common.get_resource(config_file, path="tests/data/")
        tmp_dir = common.get_tmp_dir()
        shutil.copy(filename, tmp_dir)
        self.config = common.get_test_config(config_filename=config_file, config_dir=tmp_dir, verify_config=False)

        subscription_keys = self.config.config["subscriptions"].keys()
        last_update_values = [self.config.config["subscriptions"][key]["last_update"] for key in subscription_keys]

        # Call the function that makes the changes
        self.config.config.run_converter((3, 3), 4, self.config.update_config_to_version4)

        for i, key in enumerate(subscription_keys):
            # Test changes for "replace_last_update_with_last_match"
            self.assertTrue(self.config.config["subscriptions"][key].has_key("last_match"))
            self.assertFalse(self.config.config["subscriptions"][key].has_key("last_update"))
            self.assertEquals(self.config.config["subscriptions"][key]["last_match"], last_update_values[i])
Exemple #7
0
    def run_ase(self, job):
        (mystr, _) = self.coords2xyz(job.v)

        tmp_dir = common.get_tmp_dir()

        job_base_name = "asejob" + str(self.__get_job_counter())
        mol_geom_file = os.path.join(tmp_dir, job_base_name + ".xyz")
        ase_stdout_file = os.path.join(tmp_dir, job_base_name + ".stdout")
        results_file = os.path.join(tmp_dir,
                                    job_base_name + common.LOGFILE_EXT)

        # write input file as xyz format
        f = open(mol_geom_file, "w")
        f.write(mystr)
        f.close()

        #        p = Popen(["./aseisolator.py", self.ase_settings_file, mol_geom_file], stdout=open(ase_stdout_file, "w"))
        cmd = [
            "python", "-m", "aseisolator", self.ase_settings_file,
            mol_geom_file
        ]
        p = Popen(cmd, stdout=open(ase_stdout_file, "w"))

        (_, ret_val) = os.waitpid(p.pid, 0)
        if ret_val != 0:
            #            raise MolInterfaceException("aseisolator.py returned with " +
            #                str(ret_val) + " when attempting: " + "./aseisolator.py " +
            #                self.ase_settings_file + " " +  mol_geom_file)

            # additions from Alexei
            raise MolInterfaceException("aseisolator.py returned with " +
                                        str(ret_val) +
                                        "\nwhen attempting to run " +
                                        ' '.join(cmd) +
                                        "\nMake sure $PYTHONPATH contains " +
                                        sys.path[0])

        # load results from file
        (e, g) = pickle.load(open(results_file, "r"))

        grads_opt = self.__transform(g, job.v, "dummy")

        return common.Result(job.v, e, grads_opt)
    def test_update_config_file_to_version3(self):
        config_file = "yarss2_v2.conf"
        filename = yarss2.util.common.get_resource(config_file, path="tests/data/")
        tmp_dir = common.get_tmp_dir()
        shutil.copy(filename, tmp_dir)
        self.config = common.get_test_config(config_filename=config_file, config_dir=tmp_dir, verify_config=False)

        # Call the function that makes the changes
        self.config.config.run_converter((2, 2), 3, self.config.update_config_to_version3)

        # Added field 'download_location'
        for key in self.config.config["subscriptions"]:
            self.assertTrue(self.config.config["subscriptions"][key].has_key("download_location"), "Field 'download_location' does not exist!")

        for key in self.config.config["rssfeeds"]:
            self.assertTrue(self.config.config["rssfeeds"][key].has_key("obey_ttl"), "Field 'obey_ttl' does not exist!")

        for key in self.config.config["email_configurations"].keys():
            self.assertTrue(not type(self.config.config["email_configurations"][key]) is str, "Field in str!")
    def test_update_config_file_from_1_0(self):
        tmp_dir = common.get_tmp_dir()
        # Copy the yarss2_v1.conf file to test dir to avoid changes to the file.
        config_file = "yarss2_v1.conf"
        filename = yarss2.util.common.get_resource(config_file, path="tests/data/")
        shutil.copy(filename, tmp_dir)
        self.config = common.get_test_config(config_filename=config_file, config_dir=tmp_dir, verify_config=False)

        self.assertEquals(self.config.config._Config__version["format"], 1)
        self.assertEquals(self.config.config._Config__version["file"], 1)

        # Verify that the old values are what we expect
        for key in self.config.config["cookies"].keys():
            self.assertEquals(type(self.config.config["cookies"][key]["value"]), list)

        # Update the config
        self.config._verify_config()

        for key in self.config.config["cookies"].keys():
            self.assertEquals(type(self.config.config["cookies"][key]["value"]), dict)

        for key in self.config.config["subscriptions"].keys():
            # last_update replaced with last_match
            self.assertFalse("last_update" in self.config.config["subscriptions"][key])
            self.assertTrue("last_match" in self.config.config["subscriptions"][key])

            # Add in paused state should be string
            self.assertEquals(type(self.config.config["subscriptions"][key]["add_torrents_in_paused_state"]), unicode)

            self.assertTrue("max_upload_slots" in self.config.config["subscriptions"][key])
            self.assertTrue("max_connections" in self.config.config["subscriptions"][key])
            self.assertTrue("max_upload_speed" in self.config.config["subscriptions"][key])
            self.assertTrue("prioritize_first_last_pieces" in self.config.config["subscriptions"][key])
            self.assertTrue("auto_managed" in self.config.config["subscriptions"][key])
            self.assertTrue("sequential_download" in self.config.config["subscriptions"][key])

        # Test cookie type
        for cookie in self.config.config["cookies"].values():
            self.assertEquals(type(cookie["value"]), dict)

        self.assertEquals(self.config.config._Config__version["format"], 1)
        self.assertEquals(self.config.config._Config__version["file"], 5)
Exemple #10
0
    def run_ase(self, job):
        (mystr, _) = self.coords2xyz(job.v)

        tmp_dir = common.get_tmp_dir()

        job_base_name = "asejob" + str(self.__get_job_counter())
        mol_geom_file = os.path.join(tmp_dir, job_base_name + ".xyz")
        ase_stdout_file = os.path.join(tmp_dir, job_base_name + ".stdout")
        results_file = os.path.join(tmp_dir, job_base_name + common.LOGFILE_EXT)

        # write input file as xyz format
        f = open(mol_geom_file, "w")
        f.write(mystr)
        f.close()

#        p = Popen(["./aseisolator.py", self.ase_settings_file, mol_geom_file], stdout=open(ase_stdout_file, "w"))
        cmd = ["python", "-m", "aseisolator", self.ase_settings_file, mol_geom_file]
        p = Popen(cmd, stdout=open(ase_stdout_file, "w"))

        (_, ret_val) = os.waitpid(p.pid, 0)
        if ret_val != 0:
#            raise MolInterfaceException("aseisolator.py returned with " + 
#                str(ret_val) + " when attempting: " + "./aseisolator.py " + 
#                self.ase_settings_file + " " +  mol_geom_file)

            # additions from Alexei
            raise MolInterfaceException("aseisolator.py returned with " + str(ret_val)
                + "\nwhen attempting to run " + ' '.join(cmd)
                + "\nMake sure $PYTHONPATH contains " + sys.path[0] )

        # load results from file
        (e, g) = pickle.load(open(results_file, "r"))

        grads_opt = self.__transform(g, job.v, "dummy")

        return common.Result(job.v, e, grads_opt)
Exemple #11
0
def main(argv=None):
    if argv is None:
        argv = sys.argv
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "ho", ["help", "optimise"])
        except getopt.error, msg:
             raise ASEIsolatorException(msg)
        
        mode = "calc_eg"
        for o, a in opts:
            if o in ("-h", "--help"):
                usage()
                return 0
            if o in ("-o", "--optimise"):
                mode = "optimise"
            else:
                usage()
                return -1

        if len(args) != 2:
            raise ASEIsolatorException("Exactly two input files must be given.")

        ase_job_settings = os.path.abspath(args[0])
        molecule = os.path.abspath(args[1])


        # create atoms object based on molecular geometry in file
        atoms = ase.io.read(molecule)

        jobname =  os.path.splitext(molecule)[0]

        # setup directories, filenames
        isolation_dir = os.path.basename(jobname)
        print isolation_dir

        old_dir = os.getcwd()


        # if a tmp directory is specified, then use it
        tmp_dir = common.get_tmp_dir()
        os.chdir(tmp_dir)

        # Create/change into isolation directory. This directory holds temporary files
        # specific to a computation, not including input and output files.
        if not os.path.exists(isolation_dir):
            os.mkdir(isolation_dir)
        os.chdir(isolation_dir)

        # set up calculators, etc.
        exec open(ase_job_settings).read()

        # Based on what was found in ase_job_settings, perform further
        # setup for 'atoms'
        if 'mycell' in locals():
            atoms.set_cell(mycell)
        if 'mypbc' in locals():
            atoms.set_pbc(mypbc)

        if not 'mycalc' in locals():
            raise ASEIsolatorException("'mycalc' not defined in " + ase_job_settings)

        atoms.set_calculator(mycalc)

        result_file = os.path.join(tmp_dir, jobname + common.LOGFILE_EXT)

        if mode == "calc_eg":
            # run job using ASE calculator
            g = atoms.get_forces().flatten()
            e = atoms.get_potential_energy()

            os.chdir(old_dir)

            result = (e, g)

            pickle.dump(result, open(result_file, "w"), protocol=2)

            # just for testing...
            #print pickle.load(open(result_file, "r"))

        elif mode == "optimise":
            optim = ase.LBFGS(atoms, trajectory='opt.traj')
            optim.run()
            os.chdir(old_dir)

            ase.io.write(result_file, atoms, format="traj")
        else:
            raise ASEIsolatorException("Unrecognised mode: " + mode)
Exemple #12
0
def main(argv=None):
    if argv is None:
        argv = sys.argv
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "ho", ["help", "optimise"])
        except getopt.error, msg:
            raise ASEIsolatorException(msg)

        mode = "calc_eg"
        for o, a in opts:
            if o in ("-h", "--help"):
                usage()
                return 0
            if o in ("-o", "--optimise"):
                mode = "optimise"
            else:
                usage()
                return -1

        if len(args) != 2:
            raise ASEIsolatorException(
                "Exactly two input files must be given.")

        ase_job_settings = os.path.abspath(args[0])
        molecule = os.path.abspath(args[1])

        # create atoms object based on molecular geometry in file
        atoms = ase.io.read(molecule)

        jobname = os.path.splitext(molecule)[0]

        # setup directories, filenames
        isolation_dir = os.path.basename(jobname)
        print isolation_dir

        old_dir = os.getcwd()

        # if a tmp directory is specified, then use it
        tmp_dir = common.get_tmp_dir()
        os.chdir(tmp_dir)

        # Create/change into isolation directory. This directory holds temporary files
        # specific to a computation, not including input and output files.
        if not os.path.exists(isolation_dir):
            os.mkdir(isolation_dir)
        os.chdir(isolation_dir)

        # set up calculators, etc.
        exec open(ase_job_settings).read()

        # Based on what was found in ase_job_settings, perform further
        # setup for 'atoms'
        if 'mycell' in locals():
            atoms.set_cell(mycell)
        if 'mypbc' in locals():
            atoms.set_pbc(mypbc)

        if not 'mycalc' in locals():
            raise ASEIsolatorException("'mycalc' not defined in " +
                                       ase_job_settings)

        atoms.set_calculator(mycalc)

        result_file = os.path.join(tmp_dir, jobname + common.LOGFILE_EXT)

        if mode == "calc_eg":
            # run job using ASE calculator
            g = atoms.get_forces().flatten()
            e = atoms.get_potential_energy()

            os.chdir(old_dir)

            result = (e, g)

            pickle.dump(result, open(result_file, "w"), protocol=2)

            # just for testing...
            #print pickle.load(open(result_file, "r"))

        elif mode == "optimise":
            optim = ase.LBFGS(atoms, trajectory='opt.traj')
            optim.run()
            os.chdir(old_dir)

            ase.io.write(result_file, atoms, format="traj")
        else:
            raise ASEIsolatorException("Unrecognised mode: " + mode)