Пример #1
0
    def initialSetUp(self):
        global app, md5StdNum, appSSL, appSSLData, canRelay
        # download eicar and trojan files before installing virus blocker
        self.ftp_user_name, self.ftp_password = global_functions.get_live_account_info(
            "ftp")
        remote_control.run_command(
            "rm -f /tmp/eicar /tmp/std_022_ftpVirusBlocked_file /tmp/temp_022_ftpVirusPassSite_file"
        )
        result = remote_control.run_command(
            "wget --user="******" --password='******' -q -O /tmp/eicar http://test.untangle.com/virus/eicar.com")
        assert (result == 0)
        result = remote_control.run_command(
            "wget --user="******" --password='******' -q -O /tmp/std_022_ftpVirusBlocked_file ftp://" +
            global_functions.ftp_server + "/virus/fedexvirus.zip")
        assert (result == 0)
        md5StdNum = remote_control.run_command(
            "\"md5sum /tmp/std_022_ftpVirusBlocked_file | awk '{print $1}'\"",
            stdout=True)
        self.md5StdNum = md5StdNum
        # print("md5StdNum <%s>" % md5StdNum)
        assert (result == 0)

        try:
            canRelay = global_functions.send_test_email(mailhost=testsiteIP)
        except Exception, e:
            canRelay = False
Пример #2
0
    def test_100_account_login(self):
        untangleEmail, untanglePassword = global_functions.get_live_account_info("Untangle")
        if untangleEmail == "message":
            raise unittest2.SkipTest('Skipping no accound found:' + str(untanglePassword))

        result = uvmContext.cloudManager().accountLogin( untangleEmail, untanglePassword )
        assert result.get('success')
Пример #3
0
 def test_025_protoRule_Ftp(self):
     touchProtoRule("FTP",False,False)
     pingResult = subprocess.call(["ping","-c","1",global_functions.ftp_server],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
     if pingResult:
         raise unittest2.SkipTest(global_functions.ftp_server + " not reachable")
     ftpUserName, ftpPassword = global_functions.get_live_account_info("ftp")            
     result1 = remote_control.run_command("wget --user="******" --password='******' -q -O /dev/null -4 -t 2 -o /dev/null --timeout=5 ftp://" + global_functions.ftp_server + "/")
     touchProtoRule("FTP",True,True)
     result2 = remote_control.run_command("wget --user="******" --password='******' -q -O /dev/null -4 -t 2 -o /dev/null --timeout=5 ftp://" + global_functions.ftp_server + "/")
     touchProtoRule("FTP",False,False)
     assert (result1 == 0)
     assert (result2 != 0)
    def test_140_compare_cloud_backup(self):
        raise unittest2.SkipTest(
            "dependent on python3-requests, skipping for now")
        """Compare a cloud backup with a local backup"""
        global app
        boxUID = uvmContext.getServerUID()
        #get authentication url and api key
        authUrl, authKey = global_functions.get_live_account_info(
            "UntangleAuth")
        boxBackupUrl = global_functions.get_live_account_info("BoxBackup")
        app.sendBackup()
        #remove previous backups/backup directories
        subprocess.call("rm -rf /tmp/localBackup*", shell=True)
        subprocess.call("rm -rf /tmp/cloudBackup*", shell=True)

        #download local backup
        subprocess.call(
            "wget -o /dev/null -O '/tmp/localBackup.backup' -t 2 --timeout 3 --post-data 'type=backup' http://localhost/admin/download",
            shell=True)
        #extract backup
        subprocess.call("mkdir /tmp/localBackup", shell=True)
        subprocess.call("tar -xf /tmp/localBackup.backup -C /tmp/localBackup",
                        shell=True)
        subprocess.call(
            "tar -xf " + glob.glob("/tmp/localBackup/files*.tar.gz")[0] +
            " -C /tmp/localBackup",
            shell=True)  #use glob since extracted file has timestamp
        localBackupPath = "/tmp/localBackup/usr"

        #set Token for boxbackup access
        authenticationUrl = authUrl
        authPayload = "{\n  \"token\": 123,\n  \"resourceIds\": [\"%s\"],\n  \"timeoutOverride\": \"5\"\n}" % (
            boxUID)
        authHeaders = {
            'Content-Type': "application/json",
            'AuthRequest': authKey,
            'Cache-Control': "no-cache"
        }
        requests.request("POST",
                         authenticationUrl,
                         data=authPayload,
                         headers=authHeaders)

        #get list of backups for the UID above
        bbUrl = boxBackupUrl
        bbQueryString = {"action": "list", "uid": boxUID, "token": "123"}
        bbHeaders = {'Cache-Control': 'no-cache'}
        bbResponse = requests.request("GET",
                                      bbUrl,
                                      headers=bbHeaders,
                                      params=bbQueryString)

        #convert response text to literal list
        backupList = ast.literal_eval(bbResponse.text)
        #grab the latest cloud backup from the list
        latestBackup = backupList[-1]
        print("latest backup from cloud: %s" % latestBackup)

        #download the latest backup and save it to /tmp
        dlUrl = boxBackupUrl
        dlQueryString = {
            "action": "get",
            "uid": boxUID,
            "token": "123",
            "filename": latestBackup
        }
        dlHeaders = {'Cache-Control': 'no-cache'}
        dlResponse = requests.request("GET",
                                      dlUrl,
                                      headers=dlHeaders,
                                      params=dlQueryString)
        with open("/tmp/cloudBackup.backup", "wb") as f:
            f.write(dlResponse.content)
        #extract cloud backup
        subprocess.call("mkdir /tmp/cloudBackup", shell=True)
        subprocess.call("tar -xf /tmp/cloudBackup.backup -C /tmp/cloudBackup",
                        shell=True)
        subprocess.call(
            "tar -xf " + glob.glob("/tmp/cloudBackup/files*.tar.gz")[0] +
            " -C /tmp/cloudBackup",
            shell=True)  #use glob since extracted file has timestamp
        cloudBackupPath = "/tmp/cloudBackup/usr"

        #compare directories
        def is_same(dir1, dir2):
            compared = filecmp.dircmp(dir1, dir2)
            if (compared.left_only or compared.right_only
                    or compared.diff_files or compared.funny_files):
                return False
            for subdir in compared.common_dirs:
                if not is_same(os.path.join(dir1, subdir),
                               os.path.join(dir2, subdir)):
                    return False
            return True

        assert (is_same(localBackupPath, cloudBackupPath))