def test_1(settings):
    test_data1 = struct.wrap({
        "header": ("date", "count", "mean-std", "mean", "mean+std", "reject"),
        "rows": [
            ("2013-Apr-05 13:55:00", "23", "655.048136994614", "668.5652173913044", "682.0822977879948"),
            ("2013-Apr-05 13:59:00", "23", "657.8717192954238", "673.3478260869565", "688.8239328784892"),
            ("2013-Apr-05 14:05:00", "23", "658.3247270429598", "673", "687.6752729570402"),
            ("2013-Apr-05 14:08:00", "23", "658.5476631609771", "673.6521739130435", "688.7566846651099"),
            ("2013-Apr-05 14:16:00", "23", "653.2311994952266", "666.1739130434783", "679.1166265917299"),
            ("2013-Apr-05 14:26:00", "23", "659.5613845589426", "671.8260869565217", "684.0907893541009"),
            ("2013-Apr-05 14:42:00", "23", "662.3517791831357", "677.1739130434783", "691.9960469038208"),
            ("2013-Apr-05 15:26:00", "23", "659.8270045518033", "672", "684.1729954481967"),
            ("2013-Apr-05 15:30:00", "23", "659.4023663187861", "674", "688.5976336812139"),
            ("2013-Apr-05 15:32:00", "23", "652.8643631817508", "666.9565217391304", "681.0486802965099"),
            ("2013-Apr-05 15:35:00", "23", "661.6037178485499", "675.1739130434783", "688.7441082384066"),
            ("2013-Apr-05 15:39:00", "23", "658.0124378440726", "670.1304347826087", "682.2484317211449"),
            ("2013-Apr-05 16:20:00", "46", "655.9645219644624", "667.4782608695652", "678.9919997746681"),
            ("2013-Apr-05 16:30:00", "23", "660.2572506418051", "671.8695652173913", "683.4818797929775"),
            ("2013-Apr-05 16:31:00", "23", "661.011102554583", "673.4347826086956", "685.8584626628083"),
            ("2013-Apr-05 16:55:00", "23", "655.9407699325201", "671.304347826087", "686.6679257196539"),
            ("2013-Apr-05 17:07:00", "23", "657.6412277100247", "667.5217391304348", "677.4022505508448"),
            #        ("2013-Apr-05 17:12:00", "23", "598.3432138277318", "617.7391304347826", "637.1350470418334"),   # <--DIP IN DATA
            ("2013-Apr-05 17:23:00", "23", "801.0537973113723", "822.1739130434783", "843.2940287755843", 1)  # <--SPIKE IN DATA
        ]
    })
    test_data1 = [
        struct.wrap({
            "timestamp": CNV.datetime2unix(CNV.string2datetime(t.date, "%Y-%b-%d %H:%M:%S")),
            "datetime": CNV.string2datetime(t.date, "%Y-%b-%d %H:%M:%S"),
            "count": int(t.count),
            "mean": float(t.mean),
            "variance": pow(float(t["mean+std"]) - float(t.mean), 2),
            "reject": t.reject
        })
        for t in CNV.table2list(test_data1.header, test_data1.rows)
    ]

    with DB(settings.perftest) as db:
        tester = test_alert_exception(db)
        tester.test_alert_generated(settings, test_data1)
    def _setup(self):
        uid = self.db.query("SELECT util.newid() uid FROM DUAL")[0].uid

        ## VERFIY THE alert_reason EXISTS
        exists = self.db.query("""
            SELECT
                count(1) num
            FROM
                reasons
            WHERE
                code={{reason}}
            """,
                               {"reason": REASON}
        )[0].num
        if exists == 0:
            Log.error("Expecting the database to have an alert_reason={{reason}}", {"reason": REASON})

        ## MAKE A 'PAGE' TO TEST
        self.db.execute("DELETE FROM pages")
        self.db.insert("pages", {
            "test_id": 0,
            "url": self.url
        })
        self.page_id = self.db.query("SELECT id FROM pages")[0].id

        ## ADD A THRESHOLD TO TEST WITH
        self.db.execute("""
            INSERT INTO alert_page_thresholds (
                id,
                page,
                threshold,
                severity,
                reason,
                time_added,
                contact
            ) VALUES (
                {{uid}},
                {{page_id}},
                {{threshold}},
                {{severity}},
                concat("(", {{url}}, ") for test"),
                now(),
                "*****@*****.**"
            )
            """, {
            "uid": uid,
            "url": self.url,
            "page_id": self.page_id,
            "severity": self.severity,
            "threshold": 800
        })

        ## ENSURE THERE ARE NO ALERTS IN DB
        self.db.execute("DELETE FROM alerts WHERE reason={{reason}}", {"reason": REASON})

        ## diff_time IS REQUIRED TO TRANSLATE THE TEST DATE DATES TO SOMETHING MORE CURRENT
        now_time = CNV.datetime2unix(datetime.utcnow())
        max_time = max([CNV.datetime2unix(CNV.string2datetime(t.date, "%Y-%b-%d %H:%M:%S")) for t in CNV.table2list(self.test_data.header, self.test_data.rows)])
        diff_time = now_time - max_time

        ## INSERT THE TEST RESULTS
        for t in CNV.table2list(self.test_data.header, self.test_data.rows):
            time = CNV.datetime2unix(CNV.string2datetime(t.date, "%Y-%b-%d %H:%M:%S"))
            time += diff_time

            self.db.insert("test_data_all_dimensions", {
                "id": SQL("util.newid()"),
                "test_run_id": SQL("util.newid()"),
                "product_id": 0,
                "operating_system_id": 0,
                "test_id": 0,
                "page_id": self.page_id,
                "date_received": time,
                "revision": "ba928cbd5191",
                "product": "Firefox",
                "branch": "Mozilla-Inbound",
                "branch_version": "23.0a1",
                "operating_system_name": "mac",
                "operating_system_version": "OS X 10.8",
                "processor": "x86_64",
                "build_type": "opt",
                "machine_name": "talos-mtnlion-r5-049",
                "pushlog_id": 19998363,
                "push_date": time,
                "test_name": "tp5o",
                "page_url": self.url,
                "mean": float(t.mean),
                "std": float(t["mean+std"]) - float(t.mean),
                "h0_rejected": 0,
                "p": None,
                "n_replicates": t.count,
                "fdr": 0,
                "trend_mean": None,
                "trend_std": None,
                "test_evaluation": 0,
                "status": 1
            })
def get_changesets(date_range=None, revision_range=None, repo=None):
    if date_range is not None:
        if date_range.max == None:
            if date_range.min == None:
                drange = ">0 0"
            else:
                drange = ">" + unicode(CNV.datetime2unix(date_range.min)) + " 0"
        else:
            if date_range.min == None:
                drange = "<" + unicode(CNV.datetime2unix(date_range.max) - 1) + " 0"
            else:
                drange = unicode(CNV.datetime2unix(date_range.min)) + " 0 to " + unicode(
                    CNV.datetime2unix(date_range.max) - 1) + " 0"


    #GET ALL CHANGESET INFO
    args = [
        "hg",
        "log",
        "--cwd",
        File(repo.directory).filename,
        "-v",
        # "-p",   #TO GET PATCH CONTENTS
        "--style",
        TEMPLATE_FILE.filename
    ]

    if date_range is not None:
        args.extend(["--date", drange])
    elif revision_range is not None:
        args.extend(["-r", str(revision_range.min)+":"+str(revision_range.max)])

    proc = subprocess.Popen(
        args,
        stdin=subprocess.PIPE,
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT,
        bufsize=-1
    )

    def iterator():
        try:
            while True:
                try:
                    line = proc.stdout.readline()
                    if line == '':
                        proc.wait()
                        if proc.returncode:
                            Log.error("Unable to pull hg log: return code {{return_code}}", {
                                "return_code": proc.returncode
                            })
                        return
                except Exception, e:
                    Log.error("Problem getting another line", e)

                if line.strip() == "":
                    continue
                Log.note(line)


                # changeset = "{date|hgdate|urlescape}\t{node}\t{rev}\t{author|urlescape}\t{branches}\t\t\t\t{p1rev}\t{p1node}\t{parents}\t{children}\t{tags}\t{desc|urlescape}\n"
                # branch = "{branch}%0A"
                # parent = "{parent}%0A"
                # tag = "{tag}%0A"
                # child = "{child}%0A"
                (
                    date,
                    node,
                    rev,
                    author,
                    branches,
                    files,
                    file_adds,
                    file_dels,
                    p1rev,
                    p1node,
                    parents,
                    children,
                    tags,
                    desc
                ) = (CNV.latin12unicode(urllib.unquote(c)) for c in line.split("\t"))

                file_adds = set(file_adds.split("\n")) - {""}
                file_dels = set(file_dels.split("\n")) - {""}
                files = set(files.split("\n")) - set()
                doc = {
                    "repo": repo.name,
                    "date": CNV.unix2datetime(CNV.value2number(date.split(" ")[0])),
                    "node": node,
                    "revision": rev,
                    "author": author,
                    "branches": set(branches.split("\n")) - {""},
                    "file_changes": files - file_adds - file_dels - {""},
                    "file_adds": file_adds,
                    "file_dels": file_dels,
                    "parents": set(parents.split("\n")) - {""} | {p1rev+":"+p1node},
                    "children": set(children.split("\n")) - {""},
                    "tags": set(tags.split("\n")) - {""},
                    "description": desc
                }
                doc = ElasticSearch.scrub(doc)
                yield doc
        except Exception, e:
            if isinstance(e, ValueError) and e.message.startswith("need more than "):
                Log.error("Problem iterating through log ({{message}})", {
                    "message": line
                }, e)


            Log.error("Problem iterating through log", e)
    def insert_test_results(self, test_data):
        ## diff_time IS REQUIRED TO TRANSLATE THE TEST DATE DATES TO SOMETHING MORE CURRENT
        now_time = CNV.datetime2unix(datetime.utcnow())
        max_time = max(Q.select(test_data, "timestamp"))
        diff_time = now_time - max_time

        ## INSERT THE TEST RESULTS
        for t in test_data:
            time = t.timestamp
            time += diff_time

            self.db.insert("test_data_all_dimensions", {
                "id": SQL("util.newid()"),
                "test_run_id": SQL("util.newid()"),
                "product_id": 0,
                "operating_system_id": 0,
                "test_id": 0,
                "page_id": self.page_id,
                "date_received": time,
                "revision": "ba928cbd5191",
                "product": "Firefox",
                "branch": "Mozilla-Inbound",
                "branch_version": "23.0a1",
                "operating_system_name": "mac",
                "operating_system_version": "OS X 10.8",
                "processor": "x86_64",
                "build_type": "opt",
                "machine_name": "talos-mtnlion-r5-049",
                "pushlog_id": 19998363,
                "push_date": time,
                "test_name": "tp5o",
                "page_url": self.url,
                "mean": float(t.mean),
                "std": sqrt(t.variance),
                "h0_rejected": 0,
                "p": None,
                "n_replicates": t.count,
                "fdr": 0,
                "trend_mean": None,
                "trend_std": None,
                "test_evaluation": 0,
                "status": 1
            })

        self.db.flush()
        self.db.execute("""
            INSERT INTO objectstore (id, test_run_id, date_loaded, processed_exception, branch, json_blob)
            SELECT
                {{id}},
                test_run_id,
                {{now}},
                'complete',
                branch,
                '{}'
            FROM
                test_data_all_dimensions
            GROUP BY
                test_run_id
        """, {
            "id": SQL("util.newid()"),
            "now": CNV.datetime2unix(datetime.utcnow())
        })