def test_user(self): with contexter.Contexter() as ctx: ctx << mock.patch('instalooter.cli.ProfileLooter.pages', MockPages('nintendo')) r = main(["user", "nintendo", self.tmpdir, "-q", '-n', '10']) self.assertEqual(r, 0) self.assertEqual(len(self.destfs.listdir('/')), 10)
def test_issue_012(self): """Feature request by @paramjitrohit. Allows downloading pictures and videos only within a timeframe. """ looter = ProfileLooter("nintendo", session=self.session) day = datetime.date(2018, 3, 16) with contexter.Contexter() as ctx: ctx << mock.patch.object(looter, 'pages', MockPages('nintendo')) medias_in_timeframe = list(looter.medias(timeframe=[day, day])) self.assertEqual(len(medias_in_timeframe), 2)
def test_issue_041(self): """Feature request by @liorlior Allow downloading only videos. """ looter = ProfileLooter("nintendo", videos_only=True, session=self.session) day = datetime.date(2017, 3, 10) with contexter.Contexter() as ctx: ctx << mock.patch.object(looter, 'pages', MockPages('nintendo')) looter.download(self.destfs, timeframe=[day, day]) self.assertEqual(self.destfs.listdir("/"), ["1467639884243493431.mp4"])
def __call__(self, semaphores=None): semaphores = semaphores or [] with contexter.Contexter(self.task_semaphore, *semaphores): result = self._do() if result: self.failures = 0 else: self.failures += 1 return result
def test_issue_014(self): """Feature request by @JFLarsen. Allows customizing filenames using a template following Python `.format()` minilanguage. """ looter = ProfileLooter("nintendo", template="{username}.{id}", session=self.session) with contexter.Contexter() as ctx: ctx << mock.patch.object(looter, 'pages', MockPages('nintendo')) looter.download(self.destfs, media_count=5) for f in self.destfs.scandir("/"): self.assertTrue(f.name.startswith('nintendo.'))
def test_issue_022(self): """ Thanks to @kuchenmitsahne for reporting this bug. Checks that using ``{datetime}`` in the template does not put a Windows forbidden character in the filename. """ FORBIDDEN = set('<>:"/\|?*') looter = ProfileLooter("nintendo", template="{datetime}", session=self.session) with contexter.Contexter() as ctx: ctx << mock.patch.object(looter, 'pages', MockPages('nintendo')) looter.download(self.destfs, media_count=5) for f in self.destfs.scandir("/"): self.assertFalse(FORBIDDEN.intersection(f.name))
def test_issue_009(self): """ Thanks to @kurtmaia for reporting this bug. Checks that adding metadata to pictures downloaded from a hashtag works as well. """ looter = HashtagLooter("fluoxetine", add_metadata=True, session=self.session) with contexter.Contexter() as ctx: ctx << mock.patch.object(looter, 'pages', MockPages('fluoxetine')) looter.download(self.destfs, media_count=10) for f in self.destfs.listdir("/"): exif = piexif.load(self.destfs.getbytes(f)) self.assertTrue(exif['Exif']) # Date & Caption self.assertTrue(exif['0th']) # Image creator
def test_pr_122_download_videos(self): """Feature implemented by @susundberg. Set the access time and modification time of a downloaded media according to its IG date. """ # Test download_videos looter = self._pr_122_looter() with contexter.Contexter() as ctx: ctx << mock.patch.object(looter, 'pages', MockPages('nintendo')) vid = next(m for m in looter.medias() if m['is_video']) looter.download_videos(self.destfs, media_count=1) stat = self.destfs.getdetails('{}.mp4'.format(vid['shortcode'])) self.assertEqual(stat.raw["details"]["accessed"], vid['taken_at_timestamp']) self.assertEqual(stat.raw["details"]["modified"], vid['taken_at_timestamp'])
def test_issue_066(self): """Thanks to @douglasrizzo for reporting this bug. Check that likescount and commentscount can be used in filename templates without causing the program to crash. """ looter = ProfileLooter( "nintendo", get_videos=True, add_metadata=True, template='{id}-{likescount}-{commentscount}', session=self.session) with contexter.Contexter() as ctx: ctx << mock.patch.object(looter, 'pages', MockPages('nintendo')) looter.download(self.destfs, media_count=10) for image in self.destfs.listdir("/"): self.assertRegex(image, '[a-zA-Z0-9]*-[0-9]*-[0-9]*.(jpg|mp4)')
def test_issue_015(self): """ Feature request by @MohamedIM. Checks that videos are not downloaded several times if present already in the destination directory. """ looter = ProfileLooter("nintendo", session=self.session) with contexter.Contexter() as ctx: ctx << mock.patch.object(looter, 'pages', MockPages('nintendo')) looter.download_videos(self.destfs, media_count=1) video_file = next(self.destfs.filterdir("/", ["*.mp4"])) mtime = self.destfs.getdetails(video_file.name).accessed looter.download_videos(self.destfs, media_count=1) self.assertEqual(mtime, self.destfs.getdetails(video_file.name).accessed)
def test_issue_019(self): """ Thanks to @emijawdo for reporting this bug. Checks that instalooter does not crash when not given a destination directory and uses the current directory. """ initial_dir = os.getcwd() os.chdir(self.tmpdir) try: with contexter.Contexter() as ctx: ctx << mock.patch('instalooter.looters.InstaLooter.pages', MockPages('nintendo')) main(["user", "nintendo", "-n", "3", "-q"]) self.assertGreaterEqual(len(self.destfs.listdir("/")), 3) finally: os.chdir(initial_dir)
def load_data(self, name): archive_path = "cases/{}.tar.xz".format(name) if not DATAFS.exists(archive_path): raise unittest.SkipTest("no test case found") with contexter.Contexter() as ctx: # open FASTA files casefs = ctx << fs.archive.open_archive(DATAFS, archive_path) result_fa = ctx << casefs.open("result.fa") vector_fa = ctx << casefs.open("vector.fa") modules_fa = ctx << casefs.open("modules.fa") # load records from FASTA handles res = CircularRecord(Bio.SeqIO.read(result_fa, "fasta")) vec = CircularRecord(Bio.SeqIO.read(vector_fa, "fasta")) mods = { record.id: CircularRecord(record) for record in Bio.SeqIO.parse(modules_fa, "fasta") } return res, vec, mods
def new_func(self, *args, **kwargs): with contexter.Contexter() as ctx: return func(self, ctx, *args, **kwargs)
def __init__( self, handle: Union[BinaryIO, str, None] = None, import_depth: int = -1, timeout: int = 5, ): """Create a new `Ontology` instance. Arguments: handle (str, ~typing.BinaryIO, or None): Either the path to a file or a binary file handle that contains a serialized version of the ontology. If `None` is given, an empty `Ontology` is returned and can be populated manually. import_depth (int): The maximum depth of imports to resolve in the ontology tree. *Note that the library may not behave correctly when not importing the complete dependency tree, so you should probably use the default value and import everything*. timeout (int): The timeout in seconds to use when performing network I/O, for instance when connecting to the OBO library to download imports. Raises: TypeError: When the given ``handle`` could not be used to parse and ontology. ValueError: When the given ``handle`` contains a serialized ontology not supported by any of the builtin parsers. """ from .parsers import BaseParser with contexter.Contexter() as ctx: self.import_depth = import_depth self.timeout = timeout self.imports = dict() self._terms: Dict[str, TermData] = {} self._relationships: Dict[str, RelationshipData] = {} # Creating an ontology from scratch is supported if handle is None: self.metadata = Metadata() self.path = self.handle = None return # Get the path and the handle from arguments if isinstance(handle, str): self.path: str = handle self.handle = ctx << get_handle(handle, timeout) _handle = ctx << decompress(self.handle) elif hasattr(handle, "read"): self.path: str = get_location(handle) self.handle = handle _handle = decompress(self.handle) else: raise TypeError(f"could not parse ontology from {handle!r}") # Parse the ontology using the appropriate parser buffer = _handle.peek(io.DEFAULT_BUFFER_SIZE) for cls in BaseParser.__subclasses__(): if cls.can_parse(typing.cast(str, self.path), buffer): cls(self).parse_from(_handle) break else: raise ValueError(f"could not find a parser to parse {handle!r}")