コード例 #1
0
    def test_put_campaign(self):
        """Test updating a campaign via API (PUT)."""
        from flask_restful.inputs import datetime_from_iso8601
        new_campaign_data = {
            'name': 'New Campaign Name',
            'paused': True,
            'account_id': 2,
            'channel_id': 2,
            'countries': ['CA', 'FR'],
            'start_date': '2015-10-30T14:04:55+00:00',
            'end_date': '2015-12-30T14:04:55+00:00',
        }

        # Create a new campaign.
        with session_scope() as session:
            campaign_id = insert_campaign(session, self.campaign_data)['id']

        # Update the campaign with the new data.
        url = url_for('api.campaign.campaign', campaign_id=campaign_id)
        data = json.dumps(new_campaign_data)
        response = self.client.put(url,
                                   data=data,
                                   content_type='application/json')
        assert_equal(response.status_code, 200)

        # Verify the data.
        campaign = get_campaign(campaign_id)
        for field in [
                'name', 'account_id', 'channel_id', 'paused', 'countries'
        ]:
            assert_equal(campaign[field], new_campaign_data[field])
        for field in ['start_date', 'end_date']:
            assert_equal(campaign[field],
                         datetime_from_iso8601(new_campaign_data[field]))
コード例 #2
0
ファイル: test_campaign.py プロジェクト: mozilla/splice
    def test_put_campaign(self):
        """Test updating a campaign via API (PUT)."""
        from flask_restful.inputs import datetime_from_iso8601
        new_campaign_data = {
            'name': 'New Campaign Name',
            'paused': True,
            'account_id': 2,
            'channel_id': 2,
            'countries': ['CA', 'FR'],
            'start_date': '2015-10-30T14:04:55+00:00',
            'end_date': '2015-12-30T14:04:55+00:00',
        }

        # Create a new campaign.
        with session_scope() as session:
            campaign_id = insert_campaign(session, self.campaign_data)['id']

        # Update the campaign with the new data.
        url = url_for('api.campaign.campaign', campaign_id=campaign_id)
        data = json.dumps(new_campaign_data)
        response = self.client.put(url, data=data, content_type='application/json')
        assert_equal(response.status_code, 200)

        # Verify the data.
        campaign = get_campaign(campaign_id)
        for field in ['name', 'account_id', 'channel_id', 'paused', 'countries']:
            assert_equal(campaign[field], new_campaign_data[field])
        for field in ['start_date', 'end_date']:
            assert_equal(campaign[field], datetime_from_iso8601(new_campaign_data[field]))
コード例 #3
0
def test_reverse_iso8601_datetime():
    dates = [
        ("2011-01-01T00:00:00+00:00", datetime(2011, 1, 1, tzinfo=UTC())),
        ("2011-01-01T23:59:59+00:00", datetime(2011, 1, 1, 23, 59, 59, tzinfo=UTC())),
        ("2011-01-01T23:59:59+02:00", datetime(2011, 1, 1, 21, 59, 59, tzinfo=UTC())),
    ]

    for date_string, expected in dates:
        yield assert_equal, inputs.datetime_from_iso8601(date_string), expected
コード例 #4
0
ファイル: query.py プロジェクト: nizox/docket
 def find(cls, fields):
     r = Config.redis()
     if not r:
         return []
     results = []
     ids = Query.get_unexpired()
     for i in ids:
         q = Query(q_id=i)
         if not q.query:
             # sometimes query meta data is incomplete, usually when I'm break^H^H^H^H^Htesting.
             continue
         for k, v in fields.items():
             if k in ('after-ago', 'after', 'before-ago', 'before'):
                 dur = parse_duration(v)
                 if dur:
                     v = (datetime.utcnow() - dur)
                 else:
                     v = inputs.datetime_from_iso8601(v)
                     pass
                 if (q.queried < v) and k in ('after-ago', 'after'):
                     q = None
                     break
                 elif (q.queried > v) and k in ('before-ago', 'before'):
                     q = None
                     break
                 pass
             elif k in ('sensors', ):
                 if frozenset(q.sensors) != frozenset(v):
                     q = None
                     break
             elif k in ('limit-packets', 'limit-bytes'):
                 continue
             elif k not in q.query:
                 Config.logger.info("Skipping: {} - {}".format(q.query, k))
                 q = None
                 break
             else:
                 if is_sequence(v) and v != [
                         vi for vi in v if q.query.find(vi) >= 0
                 ]:
                     Config.logger.info("Skipping: {} - {}".format(
                         q.query, v))
                     q = None
                     break
                 elif is_str(v) and v not in q.query:
                     Config.logger.info("Skipping: {} - {}".format(
                         q.query, v))
                     q = None
                     break
         if q:
             results.append(q.json())
     return results
コード例 #5
0
ファイル: test_inputs.py プロジェクト: NDevox/flask-restful
def test_reverse_iso8601_datetime():
    dates = [("2011-01-01T00:00:00+00:00", datetime(2011,
                                                    1,
                                                    1,
                                                    tzinfo=pytz.utc)),
             ("2011-01-01T23:59:59+00:00",
              datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc)),
             ("2011-01-01T23:59:59.001000+00:00",
              datetime(2011, 1, 1, 23, 59, 59, 1000, tzinfo=pytz.utc)),
             ("2011-01-01T23:59:59+02:00",
              datetime(2011, 1, 1, 21, 59, 59, tzinfo=pytz.utc))]

    for date_string, expected in dates:
        yield assert_equal, inputs.datetime_from_iso8601(date_string), expected
コード例 #6
0
from datetime import datetime
from flask_restful import inputs
import pytz


if __name__ == '__main__':
    time = inputs.datetime_from_iso8601("2019-07-14T02:08:07.018Z")
    zone = pytz.timezone('Asia/Chongqing')
    print(pytz.all_timezones)
コード例 #7
0
def type_datetime(value):
    return inputs.datetime_from_iso8601(value).replace(tzinfo=UTC)
コード例 #8
0
    def _build_query(self, fields):
        Config.logger.debug("_build_query {}".format(str(fields)))

        q_fields = {
            'host': [],
            'net': [],
            'port': [],
            'proto': None,
            'proto-name': None,
            'after-ago': None,
            'before-ago': None,
            'after': None,
            'before': None,
        }
        q_fields.update(fields)
        self.progress(Query.CREATED, state=Query.CREATED)

        start = self.query_time + self.LONG_AGO
        end = self.query_time + timedelta(minutes=1)

        qry_str = []
        weights = {'ip': 0, 'net': 0, 'port': 0}
        for host in sorted(q_fields['host']):
            Config.logger.debug("Parsing host: %s", host)
            if len(host) == 0:
                continue

            validate_ip(host)
            qry_str.append('host {}'.format(host))
            weights['ip'] += 1
        for net in sorted(q_fields['net']):
            Config.logger.debug("Parsing net: %s", net)
            if len(net) == 0:
                continue

            validate_net(net)
            qry_str.append('net {}'.format(net))
            weights['net'] += 1
        for port in sorted(q_fields['port']):
            Config.logger.debug("Parsing port: %s", port)
            try:
                if 0 < int(port) < 2**16:
                    qry_str.append('port {}'.format(int(port)))
                    weights['port'] += 1
                else:
                    raise ValueError()
            except ValueError:
                raise BadRequest("Port {} out of range: 1-65535".format(port))
        if q_fields['proto']:
            try:
                if 0 < int(q_fields['proto']) < 2**8:
                    qry_str.append('ip proto {}'.format(q_fields['proto']))
                else:
                    raise ValueError()
            except ValueError:
                raise BadRequest(
                    "protocol number {} out of range 1-255".format(
                        q_fields['proto']))
        if q_fields['proto-name']:
            if q_fields['proto-name'].upper() not in ['TCP', 'UDP', 'ICMP']:
                raise BadRequest(description="Bad proto-name: {}".format(
                    q_fields['proto-name']))
            qry_str.append(q_fields['proto-name'].lower())
        if q_fields['after-ago']:
            dur = parse_duration(q_fields['after-ago'])
            if not dur:
                raise BadRequest("can't parse duration: {}".format(
                    q_fields['after-ago']))
            start = enforce_time_window(self.query_time - dur)
        if q_fields['before-ago']:
            dur = parse_duration(q_fields['before-ago'])
            if not dur:
                raise BadRequest("can't parse duration: {}".format(
                    q_fields['before-ago']))
            end = enforce_time_window(self.query_time - dur)
        if q_fields['after']:
            print "Processing 'after': {}".format(q_fields['after'])
            dur = parse_duration(q_fields['after'])
            print "Duration {}".format(dur)
            if dur:
                start = enforce_time_window(self.query_time - dur)
                print "Start w/ duration: {}".format(start)
            else:
                start = enforce_time_window(
                    inputs.datetime_from_iso8601(
                        q_fields['after']).replace(tzinfo=None))
                print "Start w/o duration: {}".format(start)

        if q_fields['before']:
            dur = parse_duration(q_fields['before'])
            if dur:
                end = enforce_time_window(self.query_time - dur)
            else:
                end = enforce_time_window(
                    inputs.datetime_from_iso8601(
                        q_fields['before']).replace(tzinfo=None))
            end += timedelta(seconds=Config.get('TIME_WINDOW'))

        # Check the request's 'weight'
        if Query.WEIGHTS['enabled'] and not q_fields.get('ignore-weight'):
            req_weight = (Query.WEIGHTS['total'] *
                          ((end - start).total_seconds() /
                           (Query.WEIGHTS['hour'] * 3600)) / (sum(
                               (val * Query.WEIGHTS[k]
                                for k, val in weights.items())) or 1))
            if req_weight > Query.WEIGHTS['limit']:
                self.error(
                    'build_query', "Request is too heavy: {}/{}:\t{}".format(
                        req_weight, Query.WEIGHTS['limit'], jsonify(q_fields)))
                raise BadRequest("Request parameters exceed weight: %d/%d" %
                                 (req_weight, Query.WEIGHTS['limit']))

        qry_str.append('after {}'.format(start.strftime(ISOFORMAT)))
        qry_str.append('before {}'.format(end.strftime(ISOFORMAT)))

        self.query = " and ".join(qry_str)
        if not self.query:
            Config.logger.info("Bad request: {}".format(jsonify(q_fields)))
            return None

        Config.logger.debug("build_query: <{}>".format(self.query))

        # if we want to support limiting the query, it would require rethinking our duplicate detection
        #if q_fields['sensors']:
        #    self.sensors = q_fields['sensors']
        return self.id