Пример #1
0
    def execute(self, job):
        filename = job.path
        self.status_update(state=FileWorker.STATE_LOAD,
                           msg='Reading File',
                           file=filename)

        try:
            header, part, body, trail = read_segment(filename)
        except InvalidSegmentException:
            return  # Skip File

        # The header tells us the target file and size. Let's setup the destination file if we need to.
        destination = os.path.join(job.dest, header['name'])
        with self.lock:
            if not os.path.isfile(destination):
                with open(destination, 'ab') as fp:
                    fp.truncate(int(header['size']))

        # The part tells us what block this is we are reading, let's put the block in the right place.
        self.status_update(state=FileWorker.STATE_PROC,
                           msg='Reading File',
                           file=filename,
                           sbyte=part['begin'],
                           ebyte=part['end'],
                           dest=destination)

        with open(destination, 'r+b') as fp:
            fp.seek(int(part['begin']) - 1)  # HOHOHO, yeah, -1...
            fp.write(body)
            fp.flush()
Пример #2
0
    def execute(self, job):
        filename = job.path
        self.status_update(state=FileWorker.STATE_LOAD, msg='Reading File', file=filename)

        try:
            header, part, body, trail = read_segment(filename)
        except InvalidSegmentException:
            return  # Skip File

        # The header tells us the target file and size. Let's setup the destination file if we need to.
        destination = os.path.join(job.dest, header['name'])
        with self.lock:
            if not os.path.isfile(destination):
                with open(destination, 'ab') as fp:
                    fp.truncate(int(header['size']))

        # The part tells us what block this is we are reading, let's put the block in the right place.
        self.status_update(state=FileWorker.STATE_PROC, msg='Reading File',
            file=filename, sbyte=part['begin'], ebyte=part['end'], dest=destination)

        with open(destination, 'r+b') as fp:
            fp.seek(int(part['begin'])-1)  # HOHOHO, yeah, -1...
            fp.write(body)
            fp.flush()
Пример #3
0
    def start(self, args, config):
        """
        1. Verify any files that exist in the destination that are sourced in the NZB.
        2. Queue segments for files that are remaining.
        3. Spin up download threads and allow them to empty the job queue.
        """
        # Init thread safe structures
        stat = Event()
        jobs = Queue()

        # For fancy printing.
        term = Terminal()

        # Parse specified segments out of NZB file.
        needed = set()
        with open(args.nzb) as fp:
            parsed = nzb_parser.parse(fp.read())
            subject = parsed.subject or args.nzb
        for file in parsed.files:
            for segment in file.segments:
                needed.add(NZBSegment(segment))

        # Check to see if destination exists, if not create.
        destination = args.dest
        if not destination:
            destination = os.path.join(os.path.dirname(os.path.realpath(args.nzb)), subject)
        if not os.path.isdir(destination):
            os.makedirs(destination)

        print 'Files will be downloaded to "{}"'.format(destination)

        print 'There are {} parts required for "{}"'.format(len(needed), args.nzb)

        # Scan destination directory for files we are looking for from the NZB.
        existing = (
            set([FileSegment(f) for f in os.listdir(destination)]) & needed
        )  # Adding the union of needed filters to just what we want.
        existing_count = len(existing)

        print "{} of these parts exist in the destination.".format(existing_count)

        if args.verify:
            failed = set()
            loop_count = 1
            for segment in existing:
                with term.location():
                    print "Checking {:<50} [{:05d}/{:05d}]{}".format(
                        segment.id, loop_count, existing_count, term.clear_eol
                    )
                try:
                    header, part, body, trail = read_segment(os.path.join(destination, segment.id))
                except InvalidSegmentException:
                    failed.add(segment)
                    # print segment.id
                    # raise
                if not check_crc(trail, body):
                    print "failed"
                    failed.add(segment)
                loop_count += 1
            print "{} of the existing parts are invalid and will be retried".format(len(failed))
            existing -= failed

        segments_to_fetch = needed - existing

        # Add files to fetch into the queue.
        # jobs.put(FetchJob(segments_to_fetch.pop(), destination))
        for segment in segments_to_fetch:
            jobs.put(FetchJob(segment, destination))

        master = TaskMaster(jobs, FetchWorker, max_threads=args.connections, status=stat, config=config["USENET"])

        thread = Thread(name="Task Master", target=master)
        thread.start()

        def status_output_control():
            # It's nice to have the total bytes.
            total_bytes = sum([s.bytes for s in segments_to_fetch])
            total_jobs = jobs.qsize()
            while thread.is_alive():
                stat.wait(timeout=1)
                self.print_status(master.threads, total_jobs, jobs.qsize(), total_bytes)
                stat.clear()
            print "Stat Exit"

        status = Thread(name="Status Printer", target=status_output_control)
        status.start()

        while jobs.qsize() > 0:
            try:
                thread.join(0.1)
            except KeyboardInterrupt:
                print "Waiting on Threads to Exit"
                master.stop()
                thread.join()
                break
        print "All Exiting"
Пример #4
0
    def start(self, args, config):
        """
        1. Verify any files that exist in the destination that are sourced in the NZB.
        2. Queue segments for files that are remaining.
        3. Spin up download threads and allow them to empty the job queue.
        """
        # Init thread safe structures
        stat = Event()
        jobs = Queue()

        # For fancy printing.
        term = Terminal()

        # Parse specified segments out of NZB file.
        needed = set()
        with open(args.nzb) as fp:
            parsed = nzb_parser.parse(fp.read())
            subject = parsed.subject or args.nzb
        for file in parsed.files:
            for segment in file.segments:
                needed.add(NZBSegment(segment))

        # Check to see if destination exists, if not create.
        destination = args.dest
        if not destination:
            destination = os.path.join(
                os.path.dirname(os.path.realpath(args.nzb)), subject)
        if not os.path.isdir(destination):
            os.makedirs(destination)

        print 'Files will be downloaded to "{}"'.format(destination)

        print 'There are {} parts required for "{}"'.format(
            len(needed), args.nzb)

        # Scan destination directory for files we are looking for from the NZB.
        existing = set([
            FileSegment(f) for f in os.listdir(destination)
        ]) & needed  # Adding the union of needed filters to just what we want.
        existing_count = len(existing)

        print '{} of these parts exist in the destination.'.format(
            existing_count)

        if args.verify:
            failed = set()
            loop_count = 1
            for segment in existing:
                with term.location():
                    print 'Checking {:<50} [{:05d}/{:05d}]{}'.format(
                        segment.id, loop_count, existing_count, term.clear_eol)
                try:
                    header, part, body, trail = read_segment(
                        os.path.join(destination, segment.id))
                except InvalidSegmentException:
                    failed.add(segment)
                    #print segment.id
                    #raise
                if not check_crc(trail, body):
                    print 'failed'
                    failed.add(segment)
                loop_count += 1
            print '{} of the existing parts are invalid and will be retried'.format(
                len(failed))
            existing -= failed

        segments_to_fetch = needed - existing

        # Add files to fetch into the queue.
        #jobs.put(FetchJob(segments_to_fetch.pop(), destination))
        for segment in segments_to_fetch:
            jobs.put(FetchJob(segment, destination))

        master = TaskMaster(jobs,
                            FetchWorker,
                            max_threads=args.connections,
                            status=stat,
                            config=config['USENET'])

        thread = Thread(name='Task Master', target=master)
        thread.start()

        def status_output_control():
            # It's nice to have the total bytes.
            total_bytes = sum([s.bytes for s in segments_to_fetch])
            total_jobs = jobs.qsize()
            while thread.is_alive():
                stat.wait(timeout=1)
                self.print_status(master.threads, total_jobs, jobs.qsize(),
                                  total_bytes)
                stat.clear()
            print 'Stat Exit'

        status = Thread(name='Status Printer', target=status_output_control)
        status.start()

        while jobs.qsize() > 0:
            try:
                thread.join(0.1)
            except KeyboardInterrupt:
                print 'Waiting on Threads to Exit'
                master.stop()
                thread.join()
                break
        print 'All Exiting'