Exemple #1
0
    def _copyChunks(self):
        init = fs_encode(self.info.getChunkName(0))  #initial chunk name

        if self.info.getCount() > 1:
            fo = open(init, "rb+")  #first chunkfile
            for i in range(1, self.info.getCount()):
                #input file
                fo.seek(
                    self.info.getChunkRange(i - 1)[1] + 1
                )  #seek to beginning of chunk, to get rid of overlapping chunks
                fname = fs_encode("%s.chunk%d" % (self.filename, i))
                fi = open(fname, "rb")
                buf = 32 * 1024
                while True:  #copy in chunks, consumes less memory
                    data = fi.read(buf)
                    if not data:
                        break
                    fo.write(data)
                fi.close()
                if fo.tell() < self.info.getChunkRange(i)[1]:
                    fo.close()
                    remove(init)
                    self.info.remove()  #there are probably invalid chunks
                    raise Exception(
                        "Downloaded content was smaller than expected. Try to reduce download connections."
                    )
                remove(fname)  #remove chunk
            fo.close()

        if self.nameDisposition and self.disposition:
            self.filename = save_join(dirname(self.filename),
                                      self.nameDisposition)

        move(init, fs_encode(self.filename))
        self.info.remove()  #remove info file
Exemple #2
0
    def _copyChunks(self):
        init = fs_encode(self.info.getChunkName(0)) #initial chunk name

        if self.info.getCount() > 1:
            fo = open(init, "rb+") #first chunkfile
            for i in range(1, self.info.getCount()):
                #input file
                fo.seek(
                    self.info.getChunkRange(i - 1)[1] + 1) #seek to beginning of chunk, to get rid of overlapping chunks
                fname = fs_encode("%s.chunk%d" % (self.filename, i))
                fi = open(fname, "rb")
                buf = 32 * 1024
                while True: #copy in chunks, consumes less memory
                    data = fi.read(buf)
                    if not data:
                        break
                    fo.write(data)
                fi.close()
                if fo.tell() < self.info.getChunkRange(i)[1]:
                    fo.close()
                    remove(init)
                    self.info.remove() #there are probably invalid chunks
                    raise Exception("Downloaded content was smaller than expected. Try to reduce download connections.")
                remove(fname) #remove chunk
            fo.close()

        if self.nameDisposition and self.disposition:
            self.filename = save_join(dirname(self.filename), self.nameDisposition)

        move(init, fs_encode(self.filename))
        self.info.remove() #remove info file
Exemple #3
0
    def packageFinished(self, pypack):
        download_folder = save_join(self.config['general']['download_folder'], pypack.folder, "")

        for link in pypack.getChildren().itervalues():
            file_type = splitext(link["name"])[1][1:].lower()
            #self.logDebug(link, file_type)

            if file_type not in self.formats:
                continue

            hash_file = fs_encode(save_join(download_folder, link["name"]))
            if not isfile(hash_file):
                self.logWarning("File not found: %s" % link["name"])
                continue

            with open(hash_file) as f:
                text = f.read()

            for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text):
                data = m.groupdict()
                self.logDebug(link["name"], data)

                local_file = fs_encode(save_join(download_folder, data["name"]))
                algorithm = self.methods.get(file_type, file_type)
                checksum = computeChecksum(local_file, algorithm)
                if checksum == data["hash"]:
                    self.logInfo('File integrity of "%s" verified by %s checksum (%s).' % (data["name"],
                                                                                           algorithm,
                                                                                           checksum))
                else:
                    self.logWarning("%s checksum for file %s does not match (%s != %s)" % (algorithm,
                                                                                           data["name"],
                                                                                           checksum,
                                                                                           data["hash"]))
Exemple #4
0
 def findDuplicates(self, pyfile):
     """ Search all packages for duplicate links to "pyfile".
         Duplicates are links that would overwrite "pyfile".
         To test on duplicity the package-folder and link-name
         of twolinks are compared (basename(link.name)).
         So this method returns a list of all links with equal
         package-folders and filenames as "pyfile", but except
         the data for "pyfile" iotselöf.
         It does MOT check the link's status.
     """
     dups = []
     pyfile_name = fs_encode(basename(pyfile.name))
     # get packages (w/o files, as most file data is useless here)
     queue = self.core.api.getQueue()
     for package in queue:
         # check if package-folder equals pyfile's package folder
         if fs_encode(package.folder) == fs_encode(pyfile.package().folder):
             # now get packaged data w/ files/links
             pdata = self.core.api.getPackageData(package.pid)
             if pdata.links:
                 for link in pdata.links:
                     link_name = fs_encode(basename(link.name))
                     # check if link name collides with pdata's name
                     if link_name == pyfile_name:
                         # at last check if it is not pyfile itself
                         if link.fid != pyfile.id:
                             dups.append(link)
     return dups
 def findDuplicates(self, pyfile):
     """ Search all packages for duplicate links to "pyfile".
         Duplicates are links that would overwrite "pyfile".
         To test on duplicity the package-folder and link-name
         of twolinks are compared (basename(link.name)).
         So this method returns a list of all links with equal
         package-folders and filenames as "pyfile", but except
         the data for "pyfile" iotselöf.
         It does MOT check the link's status.
     """
     dups = []
     pyfile_name = fs_encode(basename(pyfile.name))
     # get packages (w/o files, as most file data is useless here)
     queue = self.core.api.getQueue()
     for package in queue:
         # check if package-folder equals pyfile's package folder
         if fs_encode(package.folder) == fs_encode(pyfile.package().folder):
             # now get packaged data w/ files/links
             pdata = self.core.api.getPackageData(package.pid)
             if pdata.links:
                 for link in pdata.links:
                     link_name = fs_encode(basename(link.name))
                     # check if link name collides with pdata's name
                     if link_name == pyfile_name:
                         # at last check if it is not pyfile itself
                         if link.fid != pyfile.id:
                             dups.append(link)
     return dups
Exemple #6
0
    def package_finished(self, pypack):
        download_folder = fs_join(self.pyload.config.get("general", "download_folder"), pypack.folder, "")

        for link in pypack.getChildren().values():
            file_type = os.path.splitext(link['name'])[1][1:].lower()

            if file_type not in self.formats:
                continue

            hash_file = fs_encode(fs_join(download_folder, link['name']))
            if not os.path.isfile(hash_file):
                self.log_warning(_("File not found"), link['name'])
                continue

            with open(hash_file) as f:
                text = f.read()

            for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text):
                data = m.groupdict()
                self.log_debug(link['name'], data)

                local_file = fs_encode(fs_join(download_folder, data['NAME']))
                algorithm = self.methods.get(file_type, file_type)
                checksum = computeChecksum(local_file, algorithm)
                if checksum is data['HASH']:
                    self.log_info(_('File integrity of "%s" verified by %s checksum (%s)') %
                                (data['NAME'], algorithm, checksum))
                else:
                    self.log_warning(_("%s checksum for file %s does not match (%s != %s)") %
                                   (algorithm, data['NAME'], checksum, data['HASH']))
Exemple #7
0
    def packageFinished(self, pypack):
        download_folder = save_join(self.config['general']['download_folder'], pypack.folder, "")

        for link in pypack.getChildren().itervalues():
            file_type = splitext(link["name"])[1][1:].lower()
            #self.logDebug(link, file_type)

            if file_type not in self.formats:
                continue

            hash_file = fs_encode(save_join(download_folder, link["name"]))
            if not isfile(hash_file):
                self.logWarning("File not found: %s" % link["name"])
                continue

            with open(hash_file) as f:
                text = f.read()

            for m in re.finditer(self.regexps.get(file_type, self.regexps['default']), text):
                data = m.groupdict()
                self.logDebug(link["name"], data)

                local_file = fs_encode(save_join(download_folder, data["name"]))
                algorithm = self.methods.get(file_type, file_type)
                checksum = computeChecksum(local_file, algorithm)
                if checksum == data["hash"]:
                    self.logInfo('File integrity of "%s" verified by %s checksum (%s).' % (data["name"],
                                                                                           algorithm,
                                                                                           checksum))
                else:
                    self.logWarning("%s checksum for file %s does not match (%s != %s)" % (algorithm,
                                                                                           data["name"],
                                                                                           checksum,
                                                                                           data["hash"]))
Exemple #8
0
def get_download(path):
    path = unquote(path).decode("utf8")
    #@TODO some files can not be downloaded

    root = PYLOAD.getConfigValue("general", "download_folder")

    path = path.replace("..", "")
    return static_file(fs_encode(path), fs_encode(root))
Exemple #9
0
def get_download(path):
    path = unquote(path).decode("utf8")
    # @TODO some files can not be downloaded

    root = PYLOAD.getConfigValue("general", "download_folder")

    path = path.replace("..", "")
    return static_file(fs_encode(path), fs_encode(root))
Exemple #10
0
    def scan(self, pyfile, thread):
        file     = fs_encode(pyfile.plugin.last_download)
        filename = os.path.basename(pyfile.plugin.last_download)
        cmdfile  = fs_encode(self.get_config('cmdfile'))
        cmdargs  = fs_encode(self.get_config('cmdargs').strip())

        if not os.path.isfile(file) or not os.path.isfile(cmdfile):
            return

        thread.addActive(pyfile)
        pyfile.setCustomStatus(_("virus scanning"))
        pyfile.setProgress(0)

        try:
            p = subprocess.Popen([cmdfile, cmdargs, file], bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)

            out, err = map(str.strip, p.communicate())

            if out:
                self.log_info(filename, out)

            if err:
                self.log_warning(filename, err)
                if not self.get_config('ignore-err'):
                    self.log_debug("Delete/Quarantine task is aborted")
                    return

            if p.returncode:
                pyfile.error = _("Infected file")
                action = self.get_config('action')
                try:
                    if action == "Delete":
                        if not self.get_config('deltotrash'):
                            os.remove(file)

                        else:
                            try:
                                send2trash.send2trash(file)

                            except NameError:
                                self.log_warning(_("Send2Trash lib not found, moving to quarantine instead"))
                                pyfile.setCustomStatus(_("file moving"))
                                shutil.move(file, self.get_config('quardir'))

                            except Exception, e:
                                self.log_warning(_("Unable to move file to trash: %s, moving to quarantine instead") % e.message)
                                pyfile.setCustomStatus(_("file moving"))
                                shutil.move(file, self.get_config('quardir'))

                            else:
                                self.log_debug("Successfully moved file to trash")

                    elif action == "Quarantine":
                        pyfile.setCustomStatus(_("file moving"))
                        shutil.move(file, self.get_config('quardir'))

                except (IOError, shutil.Error), e:
                    self.log_error(filename, action + " action failed!", e)
Exemple #11
0
def get_download(path):
    path = unquote(path).decode("utf8")
    #@TODO some files can not be downloaded

    root = PYLOAD.getConfigValue("general", "download_folder")

    path = path.replace("..", "")
    try:
        return static_file(fs_encode(path), fs_encode(root))

    except Exception, e:
        print e
        return HTTPError(404, "File not Found.")
Exemple #12
0
def get_download(path):
    path = unquote(path).decode("utf8")
    #@TODO some files can not be downloaded

    root = PYLOAD.getConfigValue("general", "download_folder")

    path = path.replace("..", "")
    try:
        return static_file(fs_encode(path), fs_encode(root), download=True)

    except Exception, e:
        print e
        return HTTPError(404, "File not Found.")
Exemple #13
0
def downloads():
    root = PYLOAD.getConfigValue("general", "download_folder")

    if not isdir(root):
        return base([_("Download directory not found.")])
    data = {"folder": [], "files": []}

    items = listdir(fs_encode(root))

    for item in sorted([fs_decode(x) for x in items]):
        if isdir(safe_join(root, item)):
            folder = {"name": item, "path": item, "files": []}
            files = listdir(safe_join(root, item))
            for file in sorted([fs_decode(x) for x in files]):
                try:
                    if isfile(safe_join(root, item, file)):
                        folder["files"].append(file)
                except:
                    pass

            data["folder"].append(folder)
        elif isfile(join(root, item)):
            data["files"].append(item)

    return render_to_response("downloads.html", {"files": data}, [pre_processor])
Exemple #14
0
def downloads():
    root = PYLOAD.getConfigValue("general", "download_folder")

    if not isdir(root):
        return base([_('Download directory not found.')])
    data = {'folder': [], 'files': []}

    items = listdir(fs_encode(root))

    for item in sorted([fs_decode(x) for x in items]):
        if isdir(save_join(root, item)):
            folder = {'name': item, 'path': item, 'files': []}
            files = listdir(save_join(root, item))
            for file in sorted([fs_decode(x) for x in files]):
                try:
                    if isfile(save_join(root, item, file)):
                        folder['files'].append(file)
                except:
                    pass

            data['folder'].append(folder)
        elif isfile(join(root, item)):
            data['files'].append(item)

    return render_to_response('downloads.html', {'files': data},
                              [pre_processor])
Exemple #15
0
    def extract(self, password=None):
        command = "x" if self.fullpath else "e"

        p = self.call_cmd(command,
                          fs_encode(self.filename),
                          self.out,
                          password=password)

        renice(p.pid, self.renice)

        # communicate and retrieve stderr
        self._progress(p)
        err = p.stderr.read().strip()

        if err:
            if self.re_wrongpwd.search(err):
                raise PasswordError

            elif self.re_wrongcrc.search(err):
                raise CRCError(err)

            else:  #: raise error if anything is on stderr
                raise ArchiveError(err)

        if p.returncode:
            raise ArchiveError(_("Process return code: %d") % p.returncode)

        self.files = self.list(password)
Exemple #16
0
    def process(self, pyfile):
        if not self.account:
            self.logError(_("Please enter your premium4.me account or deactivate this plugin"))
            self.fail("No premium4.me account provided")

        self.logDebug("premium4.me: Old URL: %s" % pyfile.url)

        tra = self.getTraffic()
        
        #raise timeout to 2min
        self.req.setOption("timeout", 120)
        
        self.download("http://premium4.me/api/getfile.php?authcode=%s&link=%s" % (self.account.authcode, quote(pyfile.url, "")), disposition=True)
        
        err = ''       
        if self.req.http.code == '420':
            # Custom error code send - fail
            lastDownload = fs_encode(self.lastDownload)
            
            if exists(lastDownload): 
                f = open(lastDownload, "rb")
                err = f.read(256).strip()
                f.close()
                remove(lastDownload)
            else:
                err = 'File does not exist'
        
        trb = self.getTraffic()
        self.logInfo("Filesize: %d, Traffic used %d, traffic left %d" % (pyfile.size, tra-trb, trb))
                    
        if err: self.fail(err)
Exemple #17
0
    def decrypt(self, pyfile):
        fs_filename = fs_encode(pyfile.url.strip())
        opener = urllib2.build_opener(
            MultipartPostHandler.MultipartPostHandler)

        dlc_content = opener.open(
            'http://service.jdownloader.net/dlcrypt/getDLC.php', {
                'src': "ccf",
                'filename': "test.ccf",
                'upload': open(fs_filename, "rb")
            }).read()

        download_folder = self.config['general']['download_folder']
        dlc_file = save_join(download_folder, "tmp_%s.dlc" % pyfile.name)

        try:
            dlc = re.search(r'<dlc>(.+)</dlc>', dlc_content,
                            re.S).group(1).decode('base64')

        except AttributeError:
            self.fail(_("Container is corrupted"))

        with open(dlc_file, "w") as tempdlc:
            tempdlc.write(dlc)

        self.urls = [dlc_file]
Exemple #18
0
    def extract(self, password=None):
        command = "x" if self.fullpath else "e"

        p = self.call_cmd(command, fs_encode(self.filename), self.out, password=password)

        renice(p.pid, self.renice)

        #: Communicate and retrieve stderr
        self._progress(p)
        err = p.stderr.read().strip()

        if err:
            if self.re_wrongpwd.search(err):
                raise PasswordError

            elif self.re_wrongcrc.search(err):
                raise CRCError(err)

            else:  #: Raise error if anything is on stderr
                raise ArchiveError(err)

        if p.returncode:
            raise ArchiveError(_("Process return code: %d") % p.returncode)

        self.files = self.list(password)
Exemple #19
0
    def load(name):
        fs_name = fs_encode("%s.chunks" % name)
        if not exists(fs_name):
            raise IOError()
        fh = codecs.open(fs_name, "r", "utf_8")
        name = fh.readline()[:-1]
        size = fh.readline()[:-1]
        if name.startswith("name:") and size.startswith("size:"):
            name = name[5:]
            size = size[5:]
        else:
            fh.close()
            raise WrongFormat()
        ci = ChunkInfo(name)
        ci.loaded = True
        ci.setSize(size)
        while True:
            if not fh.readline(): #skip line
                break
            name = fh.readline()[1:-1]
            range = fh.readline()[1:-1]
            if name.startswith("name:") and range.startswith("range:"):
                name = name[5:]
                range = range[6:].split("-")
            else:
                raise WrongFormat()

            ci.addChunk(name, (long(range[0]), long(range[1])))
        fh.close()
        return ci
Exemple #20
0
    def list(self, password=None):
        command = "vb" if self.fullpath else "lb"

        p = self.call_cmd(command,
                          "-v",
                          fs_encode(self.filename),
                          password=password)
        out, err = p.communicate()

        if "Cannot open" in err:
            raise ArchiveError(_("Cannot open file"))

        if err.strip():  #: only log error at this point
            self.manager.logError(err.strip())

        result = set()
        if not self.fullpath and self.VERSION.startswith('5'):
            # NOTE: Unrar 5 always list full path
            for f in fs_decode(out).splitlines():
                f = save_join(self.out, os.path.basename(f.strip()))
                if os.path.isfile(f):
                    result.add(save_join(self.out, os.path.basename(f)))
        else:
            for f in fs_decode(out).splitlines():
                f = f.strip()
                result.add(save_join(self.out, f))

        return list(result)
Exemple #21
0
    def list(self, password=None):
        command = "vb" if self.fullpath else "lb"

        p = self.call_cmd(command, "-v", fs_encode(self.filename), password=password)
        out, err = p.communicate()

        if "Cannot open" in err:
            raise ArchiveError(_("Cannot open file"))

        if err.strip():  #: only log error at this point
            self.manager.logError(err.strip())

        result = set()
        if not self.fullpath and self.VERSION.startswith('5'):
            # NOTE: Unrar 5 always list full path
            for f in fs_decode(out).splitlines():
                f = save_join(self.out, os.path.basename(f.strip()))
                if os.path.isfile(f):
                    result.add(save_join(self.out, os.path.basename(f)))
        else:
            for f in fs_decode(out).splitlines():
                f = f.strip()
                result.add(save_join(self.out, f))

        return list(result)
Exemple #22
0
    def load(name):
        fs_name = fs_encode("%s.chunks" % name)
        if not exists(fs_name):
            raise IOError()
        fh = codecs.open(fs_name, "r", "utf_8")
        name = fh.readline()[:-1]
        size = fh.readline()[:-1]
        if name.startswith("name:") and size.startswith("size:"):
            name = name[5:]
            size = size[5:]
        else:
            fh.close()
            raise WrongFormat()
        ci = ChunkInfo(name)
        ci.loaded = True
        ci.setSize(size)
        while True:
            if not fh.readline():  #skip line
                break
            name = fh.readline()[1:-1]
            range = fh.readline()[1:-1]
            if name.startswith("name:") and range.startswith("range:"):
                name = name[5:]
                range = range[6:].split("-")
            else:
                raise WrongFormat()

            ci.addChunk(name, (long(range[0]), long(range[1])))
        fh.close()
        return ci
Exemple #23
0
def downloads():
    root = PYLOAD.getConfigValue("general", "download_folder")

    if not isdir(root):
        return base([_('Download directory not found.')])
    data = {
        'folder': [],
        'files': []
    }

    items = listdir(fs_encode(root))

    for item in sorted([fs_decode(x) for x in items]):
        if isdir(save_join(root, item)):
            folder = {
                'name': item,
                'path': item,
                'files': []
            }
            files = listdir(save_join(root, item))
            for file in sorted([fs_decode(x) for x in files]):
                try:
                    if isfile(save_join(root, item, file)):
                        folder['files'].append(file)
                except:
                    pass

            data['folder'].append(folder)
        elif isfile(join(root, item)):
            data['files'].append(item)

    return render_to_response('downloads.html', {'files': data}, [pre_processor])
Exemple #24
0
    def downloadFinished(self, pyfile):
        """ 
        Compute checksum for the downloaded file and compare it with the hash provided by the hoster.
        pyfile.plugin.check_data should be a dictionary which can contain:
        a) if known, the exact filesize in bytes (e.g. "size": 123456789)
        b) hexadecimal hash string with algorithm name as key (e.g. "md5": "d76505d0869f9f928a17d42d66326307")    
        """
        if hasattr(pyfile.plugin, "check_data") and (isinstance(pyfile.plugin.check_data, dict)):
            data = pyfile.plugin.check_data.copy()
        elif hasattr(pyfile.plugin, "api_data") and (isinstance(pyfile.plugin.api_data, dict)):
            data = pyfile.plugin.api_data.copy()
        else:
            return

        self.logDebug(data)

        if not pyfile.plugin.lastDownload:
            self.checkFailed(pyfile, None, "No file downloaded")

        local_file = fs_encode(pyfile.plugin.lastDownload)
        #download_folder = self.config['general']['download_folder']
        #local_file = fs_encode(save_join(download_folder, pyfile.package().folder, pyfile.name))

        if not isfile(local_file):
            self.checkFailed(pyfile, None, "File does not exist")

            # validate file size
        if "size" in data:
            api_size = int(data['size'])
            file_size = getsize(local_file)
            if api_size != file_size:
                self.logWarning("File %s has incorrect size: %d B (%d expected)" % (pyfile.name, file_size, api_size))
                self.checkFailed(pyfile, local_file, "Incorrect file size")
            del data['size']

        # validate checksum
        if data and self.config['general']['checksum']:
            if "checksum" in data:
                data['md5'] = data['checksum']

            for key in self.algorithms:
                if key in data:
                    checksum = computeChecksum(local_file, key.replace("-", "").lower())
                    if checksum:
                        if checksum == data[key].lower():
                            self.logInfo('File integrity of "%s" verified by %s checksum (%s).' % (pyfile.name,
                                                                                                   key.upper(),
                                                                                                   checksum))
                            return
                        else:
                            self.logWarning("%s checksum for file %s does not match (%s != %s)" % (key.upper(),
                                                                                                   pyfile.name,
                                                                                                   checksum,
                                                                                                   data[key]))
                            self.checkFailed(pyfile, local_file, "Checksums do not match")
                    else:
                        self.logWarning("Unsupported hashing algorithm: %s" % key.upper())
            else:
                self.logWarning("Unable to validate checksum for file %s" % pyfile.name)
Exemple #25
0
    def downloadFinished(self, pyfile):
        """ 
        Compute checksum for the downloaded file and compare it with the hash provided by the hoster.
        pyfile.plugin.check_data should be a dictionary which can contain:
        a) if known, the exact filesize in bytes (e.g. "size": 123456789)
        b) hexadecimal hash string with algorithm name as key (e.g. "md5": "d76505d0869f9f928a17d42d66326307")    
        """
        if hasattr(pyfile.plugin, "check_data") and (isinstance(pyfile.plugin.check_data, dict)):
            data = pyfile.plugin.check_data.copy()
        elif hasattr(pyfile.plugin, "api_data") and (isinstance(pyfile.plugin.api_data, dict)):
            data = pyfile.plugin.api_data.copy()
        else:
            return

        self.logDebug(data)

        if not pyfile.plugin.lastDownload:
            self.checkFailed(pyfile, None, "No file downloaded")

        local_file = fs_encode(pyfile.plugin.lastDownload)
        #download_folder = self.config['general']['download_folder']
        #local_file = fs_encode(save_join(download_folder, pyfile.package().folder, pyfile.name))

        if not isfile(local_file):
            self.checkFailed(pyfile, None, "File does not exist")

            # validate file size
        if "size" in data:
            api_size = int(data['size'])
            file_size = getsize(local_file)
            if api_size != file_size:
                self.logWarning("File %s has incorrect size: %d B (%d expected)" % (pyfile.name, file_size, api_size))
                self.checkFailed(pyfile, local_file, "Incorrect file size")
            del data['size']

        # validate checksum
        if data and self.config['general']['checksum']:
            if "checksum" in data:
                data['md5'] = data['checksum']

            for key in self.algorithms:
                if key in data:
                    checksum = computeChecksum(local_file, key.replace("-", "").lower())
                    if checksum:
                        if checksum == data[key].lower():
                            self.logInfo('File integrity of "%s" verified by %s checksum (%s).' % (pyfile.name,
                                                                                                   key.upper(),
                                                                                                   checksum))
                            return
                        else:
                            self.logWarning("%s checksum for file %s does not match (%s != %s)" % (key.upper(),
                                                                                                   pyfile.name,
                                                                                                   checksum,
                                                                                                   data[key]))
                            self.checkFailed(pyfile, local_file, "Checksums do not match")
                    else:
                        self.logWarning("Unsupported hashing algorithm: %s" % key.upper())
            else:
                self.logWarning("Unable to validate checksum for file %s" % pyfile.name)
Exemple #26
0
    def verify(self):
        with zipfile.ZipFile(fs_encode(self.filename), 'r', allowZip64=True) as z:
            badfile = z.testzip()

            if badfile:
                raise CRCError(badfile)
            else:
                raise PasswordError
Exemple #27
0
    def repair(self):
        p = self.call_cmd("rc", fs_encode(self.filename))

        # communicate and retrieve stderr
        self._progress(p)
        err = p.stderr.read().strip()
        if err or p.returncode:
            return False
        return True
Exemple #28
0
    def verify(self):
        with zipfile.ZipFile(fs_encode(self.filename), 'r',
                             allowZip64=True) as z:
            badfile = z.testzip()

            if badfile:
                raise CRCError(badfile)
            else:
                raise PasswordError
Exemple #29
0
    def repair(self):
        p = self.call_cmd("rc", fs_encode(self.filename))

        #: Communicate and retrieve stderr
        self._progress(p)
        err = p.stderr.read().strip()
        if err or p.returncode:
            return False
        return True
Exemple #30
0
    def check(self, password):
        p = self.call_cmd("l", "-slt", fs_encode(self.filename))
        out, err = p.communicate()

        # check if output or error macthes the 'wrong password'-Regexp
        if self.re_wrongpwd.search(out):
            raise PasswordError

        if self.re_wrongcrc.search(out):
            raise CRCError(_("Header protected"))
Exemple #31
0
 def save(self):
     fs_name = fs_encode("%s.chunks" % self.name)
     fh = codecs.open(fs_name, "w", "utf_8")
     fh.write("name:%s\n" % self.name)
     fh.write("size:%s\n" % self.size)
     for i, c in enumerate(self.chunks):
         fh.write("#%d:\n" % i)
         fh.write("\tname:%s\n" % c[0])
         fh.write("\trange:%i-%i\n" % c[1])
     fh.close()
Exemple #32
0
    def check(self, password):
        p = self.call_cmd("l", "-slt", fs_encode(self.filename))
        out, err = p.communicate()

        # check if output or error macthes the 'wrong password'-Regexp
        if self.re_wrongpwd.search(out):
            raise PasswordError

        if self.re_wrongcrc.search(out):
            raise CRCError(_("Header protected"))
Exemple #33
0
 def save(self):
     fs_name = fs_encode("%s.chunks" % self.name)
     fh = codecs.open(fs_name, "w", "utf_8")
     fh.write("name:%s\n" % self.name)
     fh.write("size:%s\n" % self.size)
     for i, c in enumerate(self.chunks):
         fh.write("#%d:\n" % i)
         fh.write("\tname:%s\n" % c[0])
         fh.write("\trange:%i-%i\n" % c[1])
     fh.close()
Exemple #34
0
    def verify(self, password):
        p = self.call_cmd("t", "-v", fs_encode(self.filename), password=password)
        self._progress(p)
        err = p.stderr.read().strip()

        if self.re_wrongpwd.search(err):
            raise PasswordError

        if self.re_wrongcrc.search(err):
            raise CRCError(err)
Exemple #35
0
    def periodical(self):
        folder = fs_encode(self.getConfig('folder'))
        file = fs_encode(self.getConfig('file'))

        try:
            if not os.path.isdir(os.path.join(folder, "finished")):
                os.makedirs(os.path.join(folder, "finished"))

            if self.getConfig('watch_file'):
                with open(file, "a+") as f:
                    f.seek(0)
                    content = f.read().strip()

                if content:
                    f = open(file, "wb")
                    f.close()

                    name = "%s_%s.txt" % (file,
                                          time.strftime("%H-%M-%S_%d%b%Y"))

                    with open(save_join(folder, "finished", name), "wb") as f:
                        f.write(content)

                    self.core.api.addPackage(f.name, [f.name], 1)

            for f in os.listdir(folder):
                path = os.path.join(folder, f)

                if not os.path.isfile(path) or f.endswith("~") or f.startswith(
                        "#") or f.startswith("."):
                    continue

                newpath = os.path.join(
                    folder, "finished",
                    f if self.getConfig('keep') else "tmp_" + f)
                move(path, newpath)

                self.logInfo(_("Added %s from HotFolder") % f)
                self.core.api.addPackage(f, [newpath], 1)

        except (IOError, OSError), e:
            self.logError(e)
Exemple #36
0
    def reloadPasswords(self):
        try:
            passwords = []

            file = fs_encode(self.getConfig('passwordfile'))
            with open(file) as f:
                for pw in f.read().splitlines():
                    passwords.append(pw)

        except IOError, e:
            self.logError(e)
    def reload_passwords(self):
        try:
            passwords = []

            file = fs_encode(self.get_config('passwordfile'))
            with open(file) as f:
                for pw in f.read().splitlines():
                    passwords.append(pw)

        except IOError, e:
            self.log_error(e)
Exemple #38
0
    def addPassword(self, password):
        """  Adds a password to saved list"""
        try:
            self.passwords = uniqify([password] + self.passwords)

            file = fs_encode(self.getConfig('passwordfile'))
            with open(file, "wb") as f:
                for pw in self.passwords:
                    f.write(pw + '\n')

        except IOError, e:
            self.logError(e)
Exemple #39
0
    def addPassword(self, password):
        """  Adds a password to saved list"""
        try:
            self.passwords = uniqify([password] + self.passwords)

            file = fs_encode(self.getConfig('passwordfile'))
            with open(file, "wb") as f:
                for pw in self.passwords:
                    f.write(pw + '\n')

        except IOError, e:
            self.logError(e)
Exemple #40
0
    def decrypt(self, pyfile):
        try:
            file_enc = codecs.lookup(self.getConfig("encoding")).name
        except:
            file_enc = "utf-8"

        print repr(pyfile.url)
        print pyfile.url

        file_name = fs_encode(pyfile.url)

        txt = codecs.open(file_name, 'r', file_enc)
        links = txt.readlines()
        curPack = "Parsed links from %s" % pyfile.name

        packages = {
            curPack: [],
        }

        for link in links:
            link = link.strip()
            if not link: continue

            if link.startswith(";"):
                continue
            if link.startswith("[") and link.endswith("]"):
                # new package
                curPack = link[1:-1]
                packages[curPack] = []
                continue
            packages[curPack].append(link)
        txt.close()

        # empty packages fix

        delete = []

        for key, value in packages.iteritems():
            if not value:
                delete.append(key)

        for key in delete:
            del packages[key]

        if self.getConfig("clear"):
            try:
                txt = open(file_name, 'wb')
                txt.close()
            except:
                self.log.warning(_("LinkList could not be cleared."))

        for name, links in packages.iteritems():
            self.packages.append((name, links, name))
Exemple #41
0
    def decrypt(self, pyfile):
        try:
            file_enc = codecs.lookup(self.getConfig("encoding")).name
        except:
            file_enc = "utf-8"

        print repr(pyfile.url)
        print pyfile.url

        file_name = fs_encode(pyfile.url)

        txt = codecs.open(file_name, "r", file_enc)
        links = txt.readlines()
        curPack = "Parsed links from %s" % pyfile.name

        packages = {curPack: []}

        for link in links:
            link = link.strip()
            if not link:
                continue

            if link.startswith(";"):
                continue
            if link.startswith("[") and link.endswith("]"):
                # new package
                curPack = link[1:-1]
                packages[curPack] = []
                continue
            packages[curPack].append(link)
        txt.close()

        # empty packages fix

        delete = []

        for key, value in packages.iteritems():
            if not value:
                delete.append(key)

        for key in delete:
            del packages[key]

        if self.getConfig("clear"):
            try:
                txt = open(file_name, "wb")
                txt.close()
            except:
                self.log.warning(_("LinkList could not be cleared."))

        for name, links in packages.iteritems():
            self.packages.append((name, links, name))
Exemple #42
0
    def downloadPreparing(self, pyfile):
        uar = self.getConfig('uar')
        uaf = fs_encode(self.getConfig('uaf'))

        if uar and os.path.isfile(uaf):
            with open(uaf) as f:
                uas = random.choice([ua for ua in f.read().splitlines()])
        else:
            uas = self.getConfig('uas')

        if uas:
            self.logDebug("Use custom user-agent string: " + uas)
            pyfile.plugin.req.http.c.setopt(pycurl.USERAGENT, uas.encode('utf-8'))
Exemple #43
0
    def verify(self, password):
        # 7z can't distinguish crc and pw error in test
        p = self.call_cmd("l", "-slt", fs_encode(self.filename))
        out, err = p.communicate()

        if self.re_wrongpwd.search(out):
            raise PasswordError

        if self.re_wrongpwd.search(err):
            raise PasswordError

        if self.re_wrongcrc.search(err):
            raise CRCError(err)
Exemple #44
0
    def verify(self, password):
        p = self.call_cmd("t",
                          "-v",
                          fs_encode(self.filename),
                          password=password)
        self._progress(p)
        err = p.stderr.read().strip()

        if self.re_wrongpwd.search(err):
            raise PasswordError

        if self.re_wrongcrc.search(err):
            raise CRCError(err)
Exemple #45
0
    def verify(self, password):
        # 7z can't distinguish crc and pw error in test
        p = self.call_cmd("l", "-slt", fs_encode(self.filename))
        out, err = p.communicate()

        if self.re_wrongpwd.search(out):
            raise PasswordError

        if self.re_wrongpwd.search(err):
            raise PasswordError

        if self.re_wrongcrc.search(err):
            raise CRCError(err)
Exemple #46
0
 def _log(self, level, args):
     log = getattr(self.core.log, level)
     msg = " | ".join((
         fs_encode(a) if isinstance(a, unicode)
         else  #@NOTE: `fs_encode` -> `encode` in 0.4.10
         str(a)).strip() for a in args if a)
     log(
         "%(plugin)s%(id)s: %(msg)s" % {
             'plugin': self.__name__,
             'id':
             ("[%s]" % self.pyfile.id) if hasattr(self, 'pyfile') else "",
             'msg': msg or _(level.upper() + " MARK")
         })
Exemple #47
0
    def periodical(self):
        folder = fs_encode(self.get_config('folder'))
        file   = fs_encode(self.get_config('file'))

        try:
            if not os.path.isdir(os.path.join(folder, "finished")):
                os.makedirs(os.path.join(folder, "finished"))

            if self.get_config('watch_file'):
                with open(file, "a+") as f:
                    f.seek(0)
                    content = f.read().strip()

                if content:
                    f = open(file, "wb")
                    f.close()

                    name = "%s_%s.txt" % (file, time.strftime("%H-%M-%S_%d%b%Y"))

                    with open(fs_join(folder, "finished", name), "wb") as f:
                        f.write(content)

                    self.pyload.api.addPackage(f.name, [f.name], 1)

            for f in os.listdir(folder):
                path = os.path.join(folder, f)

                if not os.path.isfile(path) or f.endswith("~") or f.startswith("#") or f.startswith("."):
                    continue

                newpath = os.path.join(folder, "finished", f if self.get_config('keep') else "tmp_" + f)
                move(path, newpath)

                self.log_info(_("Added %s from HotFolder") % f)
                self.pyload.api.addPackage(f, [newpath], 1)

        except (IOError, OSError), e:
            self.log_error(e)
Exemple #48
0
    def getHandle(self):
        """ Returns a Curl handle ready to use for perform/multiperform."""

        self.setRequestContext(self.p.url, self.p.get, self.p.post,
                               self.p.referer, self.p.cj)
        self.c.setopt(pycurl.WRITEFUNCTION, self.writeBody)
        self.c.setopt(pycurl.HEADERFUNCTION, self.writeHeader)

        # request all bytes, since some servers in russia seems to have a defect arihmetic unit

        fs_name = fs_encode(self.p.info.getChunkName(self.id))
        if self.resume:
            self.fp = open(fs_name, "ab")
            self.arrived = self.fp.tell()
            if not self.arrived:
                self.arrived = stat(fs_name).st_size

            if self.range:
                # Do nothing if chunk already finished
                if self.arrived + self.range[0] >= self.range[1]:
                    return None

                # As last chunk dont set end range, so we get everything
                if self.id == len(self.p.info.chunks) - 1:
                    range = "%i-" % (self.arrived + self.range[0])
                else:
                    range = "%i-%i" % (self.arrived + self.range[0],
                                       min(self.range[1] + 1, self.p.size - 1))

                self.log.debug("Chunked resume with range %s" % range)
                self.c.setopt(pycurl.RANGE, range)
            else:
                self.log.debug("Resume File from %i" % self.arrived)
                self.c.setopt(pycurl.RESUME_FROM, self.arrived)

        else:
            if self.range:
                # See above
                if self.id == len(self.p.info.chunks) - 1:
                    range = "%i-" % self.range[0]
                else:
                    range = "%i-%i" % (self.range[0],
                                       min(self.range[1] + 1, self.p.size - 1))

                self.log.debug("Chunked with range %s" % range)
                self.c.setopt(pycurl.RANGE, range)

            self.fp = open(fs_name, "wb")

        return self.c
Exemple #49
0
    def decrypt(self, pyfile):
        KEY = binascii.unhexlify(self.KEY)
        IV  = binascii.unhexlify(self.IV)

        iv     = AES.new(KEY, AES.MODE_ECB).encrypt(IV)
        cipher = AES.new(KEY, AES.MODE_CFB, iv)

        try:
            fs_filename = fs_encode(pyfile.url.strip())
            with open(fs_filename, 'r') as rsdf:
                data = rsdf.read()

        except IOError, e:
            self.fail(e)
Exemple #50
0
    def extract(self, password=None):
        try:
            with zipfile.ZipFile(fs_encode(self.filename), 'r', allowZip64=True) as z:
                z.setpassword(password)

                badfile = z.testzip()

                if badfile:
                    raise CRCError(badfile)
                else:
                    z.extractall(self.out)

        except (zipfile.BadZipfile, zipfile.LargeZipFile), e:
            raise ArchiveError(e)
Exemple #51
0
    def decrypt(self, pyfile):
        KEY = binascii.unhexlify(self.KEY)
        IV = binascii.unhexlify(self.IV)

        iv = AES.new(KEY, AES.MODE_ECB).encrypt(IV)
        cipher = AES.new(KEY, AES.MODE_CFB, iv)

        try:
            fs_filename = fs_encode(pyfile.url.strip())
            with open(fs_filename, 'r') as rsdf:
                data = rsdf.read()

        except IOError, e:
            self.fail(e)
Exemple #52
0
    def check(self, password):
        p = self.call_cmd("l", "-v", fs_encode(self.filename), password=password)
        out, err = p.communicate()

        if self.re_wrongpwd.search(err):
            raise PasswordError

        if self.re_wrongcrc.search(err):
            raise CRCError(err)

        #: Output only used to check if passworded files are present
        for attr in self.re_filelist.findall(out):
            if attr[0].startswith("*"):
                raise PasswordError
Exemple #53
0
    def decrypt(self, pyfile):
        try:
            encoding = codecs.lookup(self.getConfig('encoding')).name

        except Exception:
            encoding = "utf-8"

        fs_filename = fs_encode(pyfile.url.strip())
        txt = codecs.open(fs_filename, 'r', encoding)
        curPack = "Parsed links from %s" % pyfile.name
        packages = {
            curPack: [],
        }

        for link in txt.readlines():
            link = link.strip()

            if not link:
                continue

            if link.startswith(";"):
                continue

            if link.startswith("[") and link.endswith("]"):
                # new package
                curPack = link[1:-1]
                packages[curPack] = []
                continue

            packages[curPack].append(link)

        txt.close()

        # empty packages fix
        for key, value in packages.iteritems():
            if not value:
                packages.pop(key, None)

        if self.getConfig('flush'):
            try:
                txt = open(fs_filename, 'wb')
                txt.close()

            except IOError:
                self.logWarning(_("Failed to flush list"))

        for name, links in packages.iteritems():
            self.packages.append((name, links, name))
Exemple #54
0
    def check_file(self):
        if self.check_download({'nopremium': "No premium account available"}):
            self.retry(60, 5 * 60, "No premium account available")

        err = ""
        if self.req.http.code == "420":
            #: Custom error code send - fail
            file = fs_encode(self.last_download)
            with open(file, "rb") as f:
                err = f.read(256).strip()
            os.remove(file)

        if err:
            self.fail(err)

        return super(PremiumTo, self).check_file()
Exemple #55
0
    def checkFile(self):
        if self.checkDownload({'nopremium': "No premium account available"}):
            self.retry(60, 5 * 60, "No premium account available")

        err = ''
        if self.req.http.code == '420':
            # Custom error code send - fail
            file = fs_encode(self.lastDownload)
            with open(file, "rb") as f:
                err = f.read(256).strip()
            os.remove(file)

        if err:
            self.fail(err)

        return super(PremiumTo, self).checkFile()
Exemple #56
0
    def checkFile(self, rules={}):
        if self.checkDownload({'nopremium': "No premium account available"}):
            self.retry(60, 5 * 60, "No premium account available")

        err = ''
        if self.req.http.code == '420':
            # Custom error code send - fail
            file = fs_encode(self.lastDownload)
            with open(file, "rb") as f:
                err = f.read(256).strip()
            remove(file)

        if err:
            self.fail(err)

        return super(PremiumTo, self).checkFile(rules)
Exemple #57
0
    def callScript(self, script, *args):
        try:
            cmd_args = [fs_encode(str(x) if not isinstance(x, basestring) else x) for x in args]
            cmd      = [script] + cmd_args

            self.logDebug("Executing: %s" % os.path.abspath(script), "Args: " + ' '.join(cmd_args))

            p = subprocess.Popen(cmd, bufsize=-1)  #@NOTE: output goes to pyload
            if self.getConfig('waitend'):
                p.communicate()

        except Exception, e:
            try:
                self.logError(_("Runtime error: %s") % os.path.abspath(script), e)
            except Exception:
                self.logError(_("Runtime error: %s") % os.path.abspath(script), _("Unknown error"))
Exemple #58
0
    def extract(self, password=None):
        try:
            with zipfile.ZipFile(fs_encode(self.filename),
                                 'r',
                                 allowZip64=True) as z:
                z.setpassword(password)

                badfile = z.testzip()

                if badfile:
                    raise CRCError(badfile)
                else:
                    z.extractall(self.out)

        except (zipfile.BadZipfile, zipfile.LargeZipFile), e:
            raise ArchiveError(e)
Exemple #59
0
    def packageFinished(self, pack):
        files = {}
        fid_dict = {}
        for fid, data in pack.getChildren().iteritems():
            if re.search("\.[0-9]{3}$", data["name"]):
                if data["name"][:-4] not in files:
                    files[data["name"][:-4]] = []
                files[data["name"][:-4]].append(data["name"])
                files[data["name"][:-4]].sort()
                fid_dict[data["name"]] = fid

        download_folder = self.core.config['general']['download_folder']

        if self.core.config['general']['folder_per_package']:
            download_folder = save_join(download_folder, pack.folder)

        for name, file_list in files.iteritems():
            self.core.log.info("Starting merging of %s" % name)
            final_file = open(join(download_folder, fs_encode(name)), "wb")

            for splitted_file in file_list:
                self.core.log.debug("Merging part %s" % splitted_file)
                pyfile = self.core.files.getFile(fid_dict[splitted_file])
                pyfile.setStatus("processing")
                try:
                    s_file = open(os.path.join(download_folder, splitted_file),
                                  "rb")
                    size_written = 0
                    s_file_size = int(
                        os.path.getsize(
                            os.path.join(download_folder, splitted_file)))
                    while True:
                        f_buffer = s_file.read(BUFFER_SIZE)
                        if f_buffer:
                            final_file.write(f_buffer)
                            size_written += BUFFER_SIZE
                            pyfile.setProgress(
                                (size_written * 100) / s_file_size)
                        else:
                            break
                    s_file.close()
                    self.core.log.debug("Finished merging part %s" %
                                        splitted_file)
                except Exception, e:
                    print traceback.print_exc()
                finally:
                    pyfile.setProgress(100)
Exemple #60
0
    def check(self, password):
        p = self.call_cmd("l",
                          "-v",
                          fs_encode(self.filename),
                          password=password)
        out, err = p.communicate()

        if self.re_wrongpwd.search(err):
            raise PasswordError

        if self.re_wrongcrc.search(err):
            raise CRCError(err)

        # output only used to check if passworded files are present
        for attr in self.re_filelist.findall(out):
            if attr[0].startswith("*"):
                raise PasswordError