def test3(self):
        pdb_3 = self.pdb.filter(ReleaseDate("2013-03-06", "2013-03-06"))
        results_3 = pdb_3.keys().collect()

        self.assertFalse('1O6Y' in results_3)
        self.assertFalse('4MYA' in results_3)
        self.assertTrue('3VCO' in results_3)
        self.assertFalse('5N0Y' in results_3)
    def test4(self):
        pdb_4 = self.pdb.filter(ReleaseDate("2017-05-24", "2017-05-24"))
        results_4 = pdb_4.keys().collect()

        self.assertFalse('1O6Y' in results_4)
        self.assertFalse('4MYA' in results_4)
        self.assertFalse('3VCO' in results_4)
        self.assertTrue('5N0Y' in results_4)
    def test2(self):
        pdb_2 = self.pdb.filter(ReleaseDate("2010-01-01", "2020-01-01"))
        results_2 = pdb_2.keys().collect()

        self.assertFalse('1O6Y' in results_2)
        self.assertTrue('4MYA' in results_2)
        self.assertTrue('3VCO' in results_2)
        self.assertTrue('5N0Y' in results_2)
Exemple #4
0
# ## Configure Spark

# In[2]:

conf = SparkConf().setMaster("local[*]").setAppName("FilterByReleaseDate")
sc = SparkContext(conf=conf)

# ## Read in MMTF Files, filter and count

# In[3]:

path = "../../resources/mmtf_reduced_sample/"

structures = mmtfReader.read_sequence_file(path, sc).filter(
    ReleaseDate("2000-01-28", "2017-02-28"))

print(
    f"Number of structure released between 2000-01-28 and 2017-02-28 is: {structures.count()}"
)

# ## Visualize Structures

# In[4]:

structure_names = structures.keys().collect()
view_structure(structure_names, style='line')

# ## Terminate Spark

# In[5]: