예제 #1
0
    def fail_to_history(nzo: NzbObject, url: str, msg="", content=False):
        """Create History entry for failed URL Fetch
        msg: message to be logged
        content: report in history that cause is a bad NZB file
        """
        # Remove the "Trying to fetch" part
        if url:
            nzo.filename = url
            nzo.final_name = url.strip()

        if content:
            # Bad content
            msg = T("Unusable NZB file")
        else:
            # Failed fetch
            msg = T("URL Fetching failed; %s") % msg

        # Mark as failed
        nzo.set_unpack_info("Source", msg)
        nzo.fail_msg = msg

        notifier.send_notification(
            T("URL Fetching failed; %s") % "", "%s\n%s" % (msg, url), "failed",
            nzo.cat)
        if cfg.email_endjob() > 0:
            emailer.badfetch_mail(msg, url)

        # Parse category to make sure script is set correctly after a grab
        nzo.cat, _, nzo.script, _ = misc.cat_to_opts(nzo.cat,
                                                     script=nzo.script)

        # Add to history and run script if desired
        sabnzbd.NzbQueue.remove(nzo.nzo_id)
        sabnzbd.PostProcessor.process(nzo)
예제 #2
0
def try_sfv_check(nzo: NzbObject, workdir):
    """Attempt to verify set using SFV file
    Return None if no SFV-sets, True/False based on verification
    """
    # Get list of SFV names
    sfvs = globber_full(workdir, "*.sfv")

    # If no files named *.sfv, lets search for obfuscated SFV files
    if not sfvs:
        files = globber_full(workdir, "*")
        for file in files:
            if is_sfv_file(file):
                logging.debug("Found and will use obfuscated SFV file: %s",
                              file)
                sfvs.append(file)
        if not sfvs:
            # still no SFV, so:
            return None

    result = sfv_check(sfvs, nzo, workdir)
    if not result:
        print_sfv = [os.path.basename(sfv) for sfv in sfvs]
        fail_msg = T('Some files failed to verify against "%s"') % "; ".join(
            print_sfv)
        nzo.set_unpack_info("Repair", fail_msg)
        nzo.status = Status.FAILED
        nzo.fail_msg = fail_msg
        return False

    # Success
    nzo.set_unpack_info("Repair", T("Verified successfully using SFV files"))
    return True
예제 #3
0
def try_rar_check(nzo: NzbObject, rars):
    """Attempt to verify set using the RARs
    Return True if verified, False when failed
    When setname is '', all RAR files will be used, otherwise only the matching one
    If no RAR's are found, returns True
    """
    # Sort for better processing
    rars.sort(key=functools.cmp_to_key(rar_sort))

    # Test
    if rars:
        setname = setname_from_path(rars[0])
        nzo.status = Status.VERIFYING
        nzo.set_unpack_info("Repair", T("Trying RAR-based verification"),
                            setname)
        nzo.set_action_line(T("Trying RAR-based verification"), "...")
        try:
            # Set path to unrar and open the file
            # Requires de-unicode for RarFile to work!
            rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
            zf = rarfile.RarFile(rars[0])

            # Skip if it's encrypted
            if zf.needs_password():
                msg = T("[%s] RAR-based verification failed: %s") % (
                    setname, T("Passworded"))
                nzo.set_unpack_info("Repair", msg)
                return True

            # Will throw exception if something is wrong
            zf.testrar()
            # Success!
            msg = T("RAR files verified successfully")
            nzo.set_unpack_info("Repair", msg, setname)
            logging.info(msg)
            return True
        except rarfile.Error as e:
            nzo.fail_msg = T("RAR files failed to verify")
            msg = T("[%s] RAR-based verification failed: %s") % (setname, e)
            nzo.set_unpack_info("Repair", msg, setname)
            logging.info(msg)
            return False
    else:
        # No rar-files, so just continue
        return True
예제 #4
0
def rar_renamer(nzo: NzbObject, workdir):
    """ Deobfuscate rar file names: Use header and content information to give RAR-files decent names """
    nzo.status = Status.VERIFYING
    nzo.set_unpack_info("Repair", T("Trying RAR renamer"))
    nzo.set_action_line(T("Trying RAR renamer"), "...")

    renamed_files = 0

    # This is the most important datastructure (in case of mixed obfuscated rarsets)
    rarvolnr = {}
    # rarvolnr will contain per rar vol number the rarfilenames and their respective contents (and maybe other characteristics, like filesizes).
    # for example: rarvolnr[6]['somerandomfilename.rar']={'readme.txt', 'linux.iso'},
    # which means 'somerandomfilename.rar' has rarvolnumber 6, and contents 'readme.txt' and 'linux.iso'
    # if we find a rarfile with rarvolnumber 7, and 'linux.iso' in it, we have a match!

    # The volume number and real extension of a (obfuscated) rar file
    # so volnrext['dfakjldfalkjdfl.blabla'] = (14, 'part014.rar') or (2, 'r000')
    # Not really needed, but handy to avoid a second lookup at the renaming
    volnrext = {}

    # Scan rar files in workdir, but not subdirs
    workdir_files = os.listdir(workdir)
    for file_to_check in workdir_files:
        file_to_check = os.path.join(workdir, file_to_check)
        # We only want files:
        if not (os.path.isfile(file_to_check)):
            continue
        # The function will check if it's a RAR-file
        # We do a sanity-check for the returned number
        rar_vol, new_extension = rarvolinfo.get_rar_extension(file_to_check)
        if 0 < rar_vol < 1000:
            logging.debug("Detected volume-number %s from RAR-header: %s ",
                          rar_vol, file_to_check)
            volnrext[file_to_check] = (rar_vol, new_extension)
            # The files inside rar file
            rar_contents = rarfile.RarFile(os.path.join(
                workdir, file_to_check),
                                           single_file_check=True).filelist()
            try:
                rarvolnr[rar_vol]
            except:
                # does not yet exist, so create:
                rarvolnr[rar_vol] = {}
            rarvolnr[rar_vol][
                file_to_check] = rar_contents  # store them for matching (if needed)
        else:
            logging.debug("No RAR-volume-number found in %s", file_to_check)

    logging.debug("Deobfuscate: rarvolnr is: %s", rarvolnr)
    logging.debug("Deobfuscate: volnrext is: %s", volnrext)

    # Could be that there are no rar-files, we stop
    if not len(rarvolnr):
        return renamed_files

    # this can probably done with a max-key-lambda oneliner, but ... how?
    numberofrarsets = 0
    for mykey in rarvolnr.keys():
        numberofrarsets = max(numberofrarsets, len(rarvolnr[mykey]))
    logging.debug("Number of rarset is %s", numberofrarsets)

    if numberofrarsets == 1:
        # Just one obfuscated rarset ... that's easy
        logging.debug("Deobfuscate: Just one obfuscated rarset")
        for filename in volnrext:
            new_rar_name = "%s.%s" % (nzo.final_name, volnrext[filename][1])
            new_rar_name = os.path.join(workdir, new_rar_name)
            new_rar_name = get_unique_filename(new_rar_name)
            logging.debug("Deobfuscate: Renaming %s to %s" %
                          (filename, new_rar_name))
            renamer(filename, new_rar_name)
            renamed_files += 1
        return renamed_files

    # numberofrarsets bigger than 1, so a mixed rar set, so we need pre-checking

    # Sanity check of the rar set
    # Get the highest rar part number (that's the upper limit):
    highest_rar = sorted(rarvolnr.keys())[-1]
    # A staircase check: number of rarsets should no go up, but stay the same or go down
    how_many_previous = 1000  # 1000 rarset mixed ... should be enough ... typical is 1, 2 or maybe 3
    # Start at part001.rar and go the highest
    for rar_set_number in range(1, highest_rar + 1):
        try:
            how_many_here = len(rarvolnr[rar_set_number])
        except:
            # rarset does not exist at all
            logging.warning(
                "rarset %s is missing completely, so I can't deobfuscate.",
                rar_set_number)
            return 0
        # OK, it exists, now let's check it's not higher
        if how_many_here > how_many_previous:
            # this should not happen: higher number of rarset than previous number of rarset
            logging.warning(
                "no staircase! rarset %s is higher than previous, so I can't deobfuscate.",
                rar_set_number)
            return 0
        how_many_previous = how_many_here

    # OK, that looked OK (a declining staircase), so we can safely proceed
    # More than one obfuscated rarset, so we must do matching based of files inside the rar files

    # Assign (random) rar set names, first come first serve basis
    rarsetname = {
    }  # in which rar set it should be, so rar set 'A', or 'B', or ...
    mychar = "A"
    # First things first: Assigning a rarsetname to the rar file which have volume number 1
    for base_obfuscated_filename in rarvolnr[1]:
        rarsetname[base_obfuscated_filename] = mychar + "--" + nzo.final_name
        mychar = chr(ord(mychar) + 1)
    logging.debug("Deobfuscate: rarsetname %s", rarsetname)

    # Do the matching, layer by layer (read: rarvolnumber)
    # So, all rar files with rarvolnr 1, find the contents (files inside the rar),
    # and match with rarfiles with rarvolnr 2, and put them in the correct rarset.
    # And so on, until the highest rarvolnr minus 1 matched against highest rarvolnr
    for n in range(1, len(rarvolnr)):
        logging.debug(
            "Deobfuscate: Finding matches between rar sets %s and %s" %
            (n, n + 1))
        for base_obfuscated_filename in rarvolnr[n]:
            matchcounter = 0
            for next_obfuscated_filename in rarvolnr[n + 1]:
                # set() method with intersection (less strict): set(rarvolnr[n][base_obfuscated_filename]).intersection(set(rarvolnr[n+1][next_obfuscated_filename]))
                # check if the last filename inside the existing rar matches with the first filename in the following rar
                if rarvolnr[n][base_obfuscated_filename][-1] == rarvolnr[
                        n + 1][next_obfuscated_filename][0]:
                    try:
                        rarsetname[next_obfuscated_filename] = rarsetname[
                            base_obfuscated_filename]
                        matchcounter += 1
                    except KeyError:
                        logging.warning(
                            T("No matching earlier rar file for %s"),
                            next_obfuscated_filename)
            if matchcounter > 1:
                logging.info(
                    "Deobfuscate: more than one match, so risk on false positive matching."
                )

    # Do the renaming:
    for filename in rarsetname:
        new_rar_name = "%s.%s" % (rarsetname[filename], volnrext[filename][1])
        new_rar_name = os.path.join(workdir, new_rar_name)
        new_rar_name = get_unique_filename(new_rar_name)
        logging.debug("Deobfuscate: Renaming %s to %s" %
                      (filename, new_rar_name))
        renamer(filename, new_rar_name)
        renamed_files += 1

    # Done: The obfuscated rar files have now been renamed to regular formatted filenames
    return renamed_files
예제 #5
0
def parring(nzo: NzbObject, workdir: str):
    """ Perform par processing. Returns: (par_error, re_add) """
    logging.info("Starting verification and repair of %s", nzo.final_name)
    par_error = False
    re_add = False

    # Get verification status of sets
    verified = sabnzbd.load_data(VERIFIED_FILE, nzo.admin_path,
                                 remove=False) or {}

    # If all were verified successfully, we skip the rest of the checks
    if verified and all(verified.values()):
        logging.info("Skipping repair, all sets previously verified: %s",
                     verified)
        return par_error, re_add

    if nzo.extrapars:
        # Need to make a copy because it can change during iteration
        single = len(nzo.extrapars) == 1
        for setname in list(nzo.extrapars):
            if cfg.ignore_samples() and RE_SAMPLE.search(setname.lower()):
                continue
            # Skip sets that were already tried
            if not verified.get(setname, False):
                logging.info("Running verification and repair on set %s",
                             setname)
                parfile_nzf = nzo.partable[setname]

                # Check if file maybe wasn't deleted and if we maybe have more files in the parset
                if os.path.exists(
                        os.path.join(
                            nzo.download_path,
                            parfile_nzf.filename)) or nzo.extrapars[setname]:
                    need_re_add, res = par2_repair(parfile_nzf,
                                                   nzo,
                                                   workdir,
                                                   setname,
                                                   single=single)
                    re_add = re_add or need_re_add
                    verified[setname] = res
                else:
                    continue
                par_error = par_error or not res

    elif not verified.get("", False):
        # No par2-sets found, skipped if already tried before
        logging.info("No par2 sets for %s", nzo.final_name)
        nzo.set_unpack_info("Repair", T("[%s] No par2 sets") % nzo.final_name)

        # Try SFV-based verification and rename
        sfv_check_result = None
        if cfg.sfv_check() and not verified.get("", False):
            sfv_check_result = try_sfv_check(nzo, workdir)
            par_error = sfv_check_result is False

        # If no luck with SFV, do RAR-check or RAR-rename
        if sfv_check_result is None and cfg.enable_unrar():
            # Check for RAR's with a sensible extension
            _, _, rars, _, _ = build_filelists(workdir, check_rar=False)
            # If there's no RAR's, they might be super-obfuscated
            if not rars:
                # Returns number of renamed RAR's
                if rar_renamer(nzo, workdir):
                    # Re-parse the files so we can do RAR-check
                    _, _, rars, _, _ = build_filelists(workdir)
            if rars:
                par_error = not try_rar_check(nzo, rars)

        # Save that we already tried SFV/RAR-verification
        verified[""] = not par_error

    if re_add:
        logging.info("Re-added %s to queue", nzo.final_name)
        if nzo.priority != FORCE_PRIORITY:
            nzo.priority = REPAIR_PRIORITY
        nzo.status = Status.FETCHING
        sabnzbd.NzbQueue.add(nzo)
        sabnzbd.Downloader.resume_from_postproc()

    sabnzbd.save_data(verified, VERIFIED_FILE, nzo.admin_path)

    logging.info("Verification and repair finished for %s", nzo.final_name)
    return par_error, re_add
예제 #6
0
def process_job(nzo: NzbObject):
    """ Process one job """
    start = time.time()

    # keep track of whether we can continue
    all_ok = True
    # keep track of par problems
    par_error = False
    # keep track of any unpacking errors
    unpack_error = False
    # Signal empty download, for when 'empty_postproc' is enabled
    empty = False
    nzb_list = []
    # These need to be initialized in case of a crash
    workdir_complete = ""
    script_log = ""
    script_line = ""

    # Get the job flags
    nzo.save_attribs()
    flag_repair, flag_unpack, flag_delete = nzo.repair_opts
    # Normalize PP
    if flag_delete:
        flag_unpack = True
    if flag_unpack:
        flag_repair = True

    # Get the NZB name
    filename = nzo.final_name

    # Download-processes can mark job as failed, skip all steps
    if nzo.fail_msg:
        all_ok = False
        par_error = True
        unpack_error = 1

    try:
        # Get the folder containing the download result
        workdir = nzo.download_path
        tmp_workdir_complete = None

        # if no files are present (except __admin__), fail the job
        if all_ok and len(globber(workdir)) < 2:
            if nzo.precheck:
                _, ratio = nzo.check_availability_ratio()
                emsg = T(
                    "Download might fail, only %s of required %s available"
                ) % (ratio, cfg.req_completion_rate())
            else:
                emsg = T("Download failed - Not on your server(s)")
                empty = True
            emsg += " - https://sabnzbd.org/not-complete"
            nzo.fail_msg = emsg
            nzo.set_unpack_info("Download", emsg)
            nzo.status = Status.FAILED
            # do not run unpacking or parity verification
            flag_repair = flag_unpack = False
            all_ok = cfg.empty_postproc() and empty
            if not all_ok:
                par_error = True
                unpack_error = 1

        script = nzo.script
        logging.info(
            "Starting Post-Processing on %s => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s",
            filename,
            flag_repair,
            flag_unpack,
            flag_delete,
            script,
            nzo.cat,
        )

        # Set complete dir to workdir in case we need to abort
        workdir_complete = workdir

        # Send post-processing notification
        notifier.send_notification(T("Post-processing"), nzo.final_name, "pp",
                                   nzo.cat)

        # Par processing, if enabled
        if all_ok and flag_repair:
            par_error, re_add = parring(nzo, workdir)
            if re_add:
                # Try to get more par files
                return False

        # If we don't need extra par2, we can disconnect
        if sabnzbd.NzbQueue.actives(grabs=False) == 0 and cfg.autodisconnect():
            # This was the last job, close server connections
            sabnzbd.Downloader.disconnect()

        # Sanitize the resulting files
        if sabnzbd.WIN32:
            sanitize_files_in_folder(workdir)

        # Check if user allows unsafe post-processing
        if flag_repair and cfg.safe_postproc():
            all_ok = all_ok and not par_error

        if all_ok:
            # Fix encodings
            fix_unix_encoding(workdir)

            # Use dirs generated by direct-unpacker
            if nzo.direct_unpacker and nzo.direct_unpacker.unpack_dir_info:
                (
                    tmp_workdir_complete,
                    workdir_complete,
                    file_sorter,
                    one_folder,
                    marker_file,
                ) = nzo.direct_unpacker.unpack_dir_info
            else:
                # Generate extraction path
                tmp_workdir_complete, workdir_complete, file_sorter, one_folder, marker_file = prepare_extraction_path(
                    nzo)

            newfiles = []
            # Run Stage 2: Unpack
            if flag_unpack:
                # Set the current nzo status to "Extracting...". Used in History
                nzo.status = Status.EXTRACTING
                logging.info("Running unpack_magic on %s", filename)
                unpack_error, newfiles = unpack_magic(nzo, workdir,
                                                      tmp_workdir_complete,
                                                      flag_delete, one_folder,
                                                      (), (), (), (), ())
                logging.info("Unpacked files %s", newfiles)

                if sabnzbd.WIN32:
                    # Sanitize the resulting files
                    newfiles = sanitize_files_in_folder(tmp_workdir_complete)
                logging.info("Finished unpack_magic on %s", filename)

            if cfg.safe_postproc():
                all_ok = all_ok and not unpack_error

            if all_ok:
                # Move any (left-over) files to destination
                nzo.status = Status.MOVING
                nzo.set_action_line(T("Moving"), "...")
                for root, _dirs, files in os.walk(workdir):
                    if not root.endswith(JOB_ADMIN):
                        for file_ in files:
                            path = os.path.join(root, file_)
                            new_path = path.replace(workdir,
                                                    tmp_workdir_complete)
                            ok, new_path = move_to_path(path, new_path)
                            if new_path:
                                newfiles.append(new_path)
                            if not ok:
                                nzo.set_unpack_info(
                                    "Unpack",
                                    T("Failed moving %s to %s") %
                                    (path, new_path))
                                all_ok = False
                                break

            # Set permissions right
            set_permissions(tmp_workdir_complete)

            if all_ok and marker_file:
                del_marker(os.path.join(tmp_workdir_complete, marker_file))
                remove_from_list(marker_file, newfiles)

            if all_ok:
                # Remove files matching the cleanup list
                cleanup_list(tmp_workdir_complete, skip_nzb=True)

                # Check if this is an NZB-only download, if so redirect to queue
                # except when PP was Download-only
                if flag_repair:
                    nzb_list = nzb_redirect(tmp_workdir_complete,
                                            nzo.final_name, nzo.pp, script,
                                            nzo.cat, nzo.priority)
                else:
                    nzb_list = None
                if nzb_list:
                    nzo.set_unpack_info("Download",
                                        T("Sent %s to queue") % nzb_list)
                    cleanup_empty_directories(tmp_workdir_complete)
                else:
                    # Full cleanup including nzb's
                    cleanup_list(tmp_workdir_complete, skip_nzb=False)

        script_output = ""
        script_ret = 0
        if not nzb_list:
            # Give destination its final name
            if cfg.folder_rename() and tmp_workdir_complete and not one_folder:
                if not all_ok:
                    # Rename failed folders so they are easy to recognize
                    workdir_complete = tmp_workdir_complete.replace(
                        "_UNPACK_", "_FAILED_")
                    workdir_complete = get_unique_path(workdir_complete,
                                                       create_dir=False)

                try:
                    newfiles = rename_and_collapse_folder(
                        tmp_workdir_complete, workdir_complete, newfiles)
                except:
                    logging.error(
                        T('Error renaming "%s" to "%s"'),
                        clip_path(tmp_workdir_complete),
                        clip_path(workdir_complete),
                    )
                    logging.info("Traceback: ", exc_info=True)
                    # Better disable sorting because filenames are all off now
                    file_sorter.sort_file = None

            if empty:
                job_result = -1
            else:
                job_result = int(par_error) + int(bool(unpack_error)) * 2

            if cfg.ignore_samples():
                remove_samples(workdir_complete)

            # TV/Movie/Date Renaming code part 2 - rename and move files to parent folder
            if all_ok and file_sorter.sort_file:
                if newfiles:
                    file_sorter.rename(newfiles, workdir_complete)
                    workdir_complete, ok = file_sorter.move(workdir_complete)
                else:
                    workdir_complete, ok = file_sorter.rename_with_ext(
                        workdir_complete)
                if not ok:
                    nzo.set_unpack_info("Unpack", T("Failed to move files"))
                    all_ok = False

            if cfg.deobfuscate_final_filenames() and all_ok and not nzb_list:
                # Deobfuscate the filenames
                logging.info("Running deobfuscate")
                deobfuscate.deobfuscate_list(newfiles, nzo.final_name)

            # Run the user script
            script_path = make_script_path(script)
            if (all_ok or not cfg.safe_postproc()) and (
                    not nzb_list) and script_path:
                # Set the current nzo status to "Ext Script...". Used in History
                nzo.status = Status.RUNNING
                nzo.set_action_line(T("Running script"), script)
                nzo.set_unpack_info("Script",
                                    T("Running user script %s") % script,
                                    unique=True)
                script_log, script_ret = external_processing(
                    script_path, nzo, clip_path(workdir_complete),
                    nzo.final_name, job_result)
                script_line = get_last_line(script_log)
                if script_log:
                    script_output = nzo.nzo_id
                if script_line:
                    nzo.set_unpack_info("Script", script_line, unique=True)
                else:
                    nzo.set_unpack_info("Script",
                                        T("Ran %s") % script,
                                        unique=True)
            else:
                script = ""
                script_line = ""
                script_ret = 0

        # Maybe bad script result should fail job
        if script_ret and cfg.script_can_fail():
            script_error = True
            all_ok = False
            nzo.fail_msg = T("Script exit code is %s") % script_ret
        else:
            script_error = False

        # Email the results
        if (not nzb_list) and cfg.email_endjob():
            if (cfg.email_endjob()
                    == 1) or (cfg.email_endjob() == 2 and
                              (unpack_error or par_error or script_error)):
                emailer.endjob(
                    nzo.final_name,
                    nzo.cat,
                    all_ok,
                    workdir_complete,
                    nzo.bytes_downloaded,
                    nzo.fail_msg,
                    nzo.unpack_info,
                    script,
                    script_log,
                    script_ret,
                )

        if script_output:
            # Can do this only now, otherwise it would show up in the email
            if script_ret:
                script_ret = "Exit(%s) " % script_ret
            else:
                script_ret = ""
            if len(script_log.rstrip().split("\n")) > 1:
                nzo.set_unpack_info(
                    "Script",
                    '%s%s <a href="./scriptlog?name=%s">(%s)</a>' %
                    (script_ret, script_line, encoding.xml_name(script_output),
                     T("More")),
                    unique=True,
                )
            else:
                # No '(more)' button needed
                nzo.set_unpack_info("Script",
                                    "%s%s " % (script_ret, script_line),
                                    unique=True)

        # Cleanup again, including NZB files
        if all_ok:
            cleanup_list(workdir_complete, False)

        # Force error for empty result
        all_ok = all_ok and not empty

        # Update indexer with results
        if cfg.rating_enable():
            if nzo.encrypted > 0:
                sabnzbd.Rating.update_auto_flag(nzo.nzo_id,
                                                sabnzbd.Rating.FLAG_ENCRYPTED)
            if empty:
                hosts = [s.host for s in sabnzbd.Downloader.nzo_servers(nzo)]
                if not hosts:
                    hosts = [None]
                for host in hosts:
                    sabnzbd.Rating.update_auto_flag(
                        nzo.nzo_id, sabnzbd.Rating.FLAG_EXPIRED, host)

    except:
        logging.error(T("Post Processing Failed for %s (%s)"), filename,
                      T("see logfile"))
        logging.info("Traceback: ", exc_info=True)

        nzo.fail_msg = T("Post-processing was aborted")
        notifier.send_notification(T("Download Failed"), filename, "failed",
                                   nzo.cat)
        nzo.status = Status.FAILED
        par_error = True
        all_ok = False

        if cfg.email_endjob():
            emailer.endjob(
                nzo.final_name,
                nzo.cat,
                all_ok,
                clip_path(workdir_complete),
                nzo.bytes_downloaded,
                nzo.fail_msg,
                nzo.unpack_info,
                "",
                "",
                0,
            )

    if all_ok:
        # If the folder only contains one file OR folder, have that as the path
        # Be aware that series/generic/date sorting may move a single file into a folder containing other files
        workdir_complete = one_file_or_folder(workdir_complete)
        workdir_complete = os.path.normpath(workdir_complete)

    # Clean up the NZO data
    try:
        nzo.purge_data(delete_all_data=all_ok)
    except:
        logging.error(T("Cleanup of %s failed."), nzo.final_name)
        logging.info("Traceback: ", exc_info=True)

    # Use automatic retry link on par2 errors and encrypted/bad RARs
    if par_error or unpack_error in (2, 3):
        try_alt_nzb(nzo)

    # Check if it was aborted
    if not nzo.pp_active:
        nzo.fail_msg = T("Post-processing was aborted")
        all_ok = False

    # Show final status in history
    if all_ok:
        notifier.send_notification(T("Download Completed"), filename,
                                   "complete", nzo.cat)
        nzo.status = Status.COMPLETED
    else:
        notifier.send_notification(T("Download Failed"), filename, "failed",
                                   nzo.cat)
        nzo.status = Status.FAILED

    # Log the overall time taken for postprocessing
    postproc_time = int(time.time() - start)

    with database.HistoryDB() as history_db:
        # Add the nzo to the database. Only the path, script and time taken is passed
        # Other information is obtained from the nzo
        history_db.add_history_db(nzo, workdir_complete, postproc_time,
                                  script_log, script_line)
        # Purge items
        history_db.auto_history_purge()

    sabnzbd.history_updated()
    return True