def date_to_string(date_object=None):
    """ rfc3339 conform string represenation of a date
        can also be given as str YYYY-mm-dd HH:MM:SS """
    if date_object is not None:
        try:
            date_object = float(date_object)
        except ValueError:
            pass
        except TypeError:
            pass

    if type(date_object) == int:
        if date_object < 365:
            date_object = datetime.utcnow() + timedelta(days=date_object)
        else:
            date_object = datetime.utcfromtimestamp(date_object)
    if type(date_object) == float:
        date_object = datetime.utcfromtimestamp(date_object)
    if type(date_object) == str:
        try:
            date_object = datetime.strptime(date_object + "+0000",
                                            '%Y-%m-%d %H:%M:%S%z')
        except ValueError:
            date_object = string_to_date(date_object)
    if not date_object:
        return strict_rfc3339.now_to_rfc3339_utcoffset()
    else:
        if date_object.tzinfo is None:
            return strict_rfc3339.timestamp_to_rfc3339_utcoffset(
                pytz.UTC.localize(date_object).timestamp())
        else:
            return strict_rfc3339.timestamp_to_rfc3339_utcoffset(
                date_object.timestamp())
Beispiel #2
0
    def serialize_items(self, request, language, queryset):
        default_host = settings.XMPP_HOSTS[settings.DEFAULT_XMPP_HOST]
        base_url = default_host['CANONICAL_BASE_URL']
        feed_id = '%s%s' % (base_url, request.get_full_path())

        root = etree.Element("feed",
                             nsmap={
                                 None: self.atom_ns,
                                 'dc': 'http://purl.org/dc/elements/1.1/',
                             })
        self.sub(root, 'title', self.get_feed_title(request))
        self.sub(root, 'id', feed_id)

        try:
            updated = max([q.updated for q in queryset])
        except ValueError:
            updated = timezone.now()
        self.sub(root, 'updated',
                 timestamp_to_rfc3339_utcoffset(int(updated.timestamp())))

        self.sub(root, 'link', href=feed_id, rel='self')
        self.sub(root, 'icon', static('feed/atom_icon.png'))
        self.sub(root, 'logo', static('feed/atom.png'))
        self.sub(root, 'rights', '© 2010-%s, jabber.at' % updated.year)

        for post in queryset:
            canonical_url = post.get_canonical_url()
            content = absolutify_html(post.render_from_request(request),
                                      base_url)
            summary = absolutify_html(post.get_html_summary(request), base_url)

            entry = self.sub(root, 'entry')
            self.sub(entry, 'id', canonical_url)
            self.sub(entry, 'title', post.title.current)
            self.sub(
                entry, 'updated',
                timestamp_to_rfc3339_utcoffset(int(post.updated.timestamp())))
            self.sub(
                entry, 'published',
                timestamp_to_rfc3339_utcoffset(int(post.created.timestamp())))
            self.sub(entry, 'link', href=canonical_url)
            self.sub(entry, 'content', content, type="html")
            self.sub(entry, 'summary', summary, type="html")

            author = self.sub(entry, 'author')
            self.sub(author, 'name', post.author.node)

        return root
    def test_serialize_date(self):
        """Test serialize() function."""

        dt = date(2016, 8, 15)  # in UTC
        dt_rfc3339 = strict_rfc3339.timestamp_to_rfc3339_utcoffset(
            calendar.timegm(dt.timetuple()))
        self.assertEqual('"%s"' % dt_rfc3339, json.serialize(dt))
    def test_serialize_datetime(self):
        """Test serialize() function."""

        t = datetime(2016, 8, 15, 12, 0, 1, 999999)  # in UTC
        t_rfc3339 = strict_rfc3339.timestamp_to_rfc3339_utcoffset(
            calendar.timegm(t.timetuple()))
        self.assertEqual('"%s"' % t_rfc3339, json.serialize(t))
def local_to_rfc3339(date_to_transform):
    """
    Converts local datetime to RFC3339 format
    """
    ts = time.mktime(date_to_transform.timetuple())
    rfc = strict_rfc3339.timestamp_to_rfc3339_utcoffset(ts)
    return rfc
Beispiel #6
0
def to_rfc3339(timestamp):
    """Converts ``timestamp`` to an RFC 3339 date string format.

    ``timestamp`` can be either a ``datetime.datetime`` or a
    ``datetime.timedelta``.  Instances of the later are assumed to be a delta
    with the beginining of the unix epoch, 1st of January, 1970

    The returned string is always Z-normalized.  Examples of the return format:
    '1972-01-01T10:00:20.021Z'

    Args:
      timestamp (datetime|timedelta): represents the timestamp to convert

    Returns:
      string: timestamp converted to a rfc3339 compliant string as above

    Raises:
      ValueError: if timestamp is not a datetime.datetime or datetime.timedelta

    """
    if isinstance(timestamp, datetime.datetime):
        timestamp = timestamp - _EPOCH_START
    if not isinstance(timestamp, datetime.timedelta):
        logger.error('Could not convert %s to a rfc3339 time,', timestamp)
        raise ValueError('Invalid timestamp type')
    return strict_rfc3339.timestamp_to_rfc3339_utcoffset(
        timestamp.total_seconds())
Beispiel #7
0
def get_most_recent_transaction(user, account_id):
    last_day = datetime.datetime.utcnow() - datetime.timedelta(days=1)

    transactions_response = requests.get(
        URL_TRANSACTIONS,
        params={
            'account_id': account_id,
            'expand[]': 'merchant',
            'since': strict_rfc3339.timestamp_to_rfc3339_utcoffset(
                last_day.timestamp(),
            ),
        },
        headers=auth_header(user),
    )

    transactions = transactions_response.json()['transactions']

    if not transactions:
        return None

    transactions = sorted(
        transactions,
        key=lambda x: x['created'],
        reverse=True,
    )

    return transactions[0]
def to_rfc3339(timestamp):
    """Converts ``timestamp`` to an RFC 3339 date string format.

    ``timestamp`` can be either a ``datetime.datetime`` or a
    ``datetime.timedelta``.  Instances of the later are assumed to be a delta
    with the beginining of the unix epoch, 1st of January, 1970

    The returned string is always Z-normalized.  Examples of the return format:
    '1972-01-01T10:00:20.021Z'

    Args:
      timestamp (datetime|timedelta): represents the timestamp to convert

    Returns:
      string: timestamp converted to a rfc3339 compliant string as above

    Raises:
      ValueError: if timestamp is not a datetime.datetime or datetime.timedelta

    """
    if isinstance(timestamp, datetime.datetime):
        timestamp = timestamp - _EPOCH_START
    if not isinstance(timestamp, datetime.timedelta):
        logger.error(u'Could not convert %s to a rfc3339 time,', timestamp)
        raise ValueError(u'Invalid timestamp type')
    return strict_rfc3339.timestamp_to_rfc3339_utcoffset(
        timestamp.total_seconds())
Beispiel #9
0
def publish_typing(conversation, evt, at):
    """
    {
      'userid': {
        'event': 'begin',
        'at': '20161116T78:44:00Z'
      },
      'userid2': {
        'event': 'begin',
        'at': '20161116T78:44:00Z'
      }
    }
    """
    user_id = current_user_id()
    channels = conversation['participant_ids']
    data = {}
    data['user/' + user_id] = {
        'event': evt,
        'at': timestamp_to_rfc3339_utcoffset(at.timestamp())
    }
    encoder = _RecordEncoder()
    for user_id in channels:
        _publish_event(user_id, 'typing',
                       {encoder.encode_id(conversation.id): data})
    return {'status': 'OK'}
Beispiel #10
0
def publish_typing(conversation, evt, at):
    """
    {
      'userid': {
        'event': 'begin',
        'at': '20161116T78:44:00Z'
      },
      'userid2': {
        'event': 'begin',
        'at': '20161116T78:44:00Z'
      }
    }
    """
    user_id = current_user_id()
    channels = conversation['participant_ids']
    data = {}
    data['user/' + user_id] = {
        'event': evt,
        'at': timestamp_to_rfc3339_utcoffset(at.timestamp())
    }
    for user_id in channels:
        _publish_event(user_id, 'typing', {
            conversation['_id']: data
        })
    return {'status': 'OK'}
    def test_deserialize_date(self):
        """Test deserialize() function."""

        t = datetime(2016, 8, 15, 12, 0, 1, 999999)  # in UTC
        t_rfc3339 = strict_rfc3339.timestamp_to_rfc3339_utcoffset(
            calendar.timegm(t.timetuple()))
        self.assertEqual('2016-08-15T12:00:01Z',
                         json.deserialize('"%s"' % t_rfc3339))
    def test_serialize_object(self):
        """Test serialize() function."""
        class A(Object):
            def __init__(self, x=None, y=None, d=None, o=None):
                self.x = x
                self.y = y
                self.d = d
                self.o = o

            @property
            def d(self):
                return self._d

            @d.setter
            def d(self, value):
                if isinstance(value, string_types):
                    self._d = strict_rfc3339.rfc3339_to_timestamp(value)
                elif isinstance(value, date):
                    self._d = value
                else:
                    raise ValueError('Property d does not allow %s' %
                                     type(value))

            @property
            def o(self):
                return self._o

            @o.setter
            def o(self, value):
                if value is None:
                    self._o = None
                elif isinstance(value, dict):
                    self._o = A(**value)
                elif isinstance(value, A):
                    self._o = value
                else:
                    raise ValueError('Property o does not allow %s' %
                                     type(value))

        now = datetime.now()
        now_rfc3339 = strict_rfc3339.timestamp_to_rfc3339_utcoffset(
            calendar.timegm(now.timetuple()))
        o = A(1, 2, now, None)
        result = json.serialize(A(1, 2, now, o))
        result_dict = json.deserialize(result)

        assert len(result_dict) == 4
        assert 'x' in result_dict
        assert result_dict['x'] == 1
        assert 'y' in result_dict
        assert result_dict['y'] == 2
        assert 'd' in result_dict
        assert result_dict['d'] == now_rfc3339
        assert 'o' in result_dict
        assert result_dict['o']['x'] == o.x
        assert result_dict['o']['y'] == o.y
        assert result_dict['o']['d'] == now_rfc3339
        assert result_dict['o']['o'] == o.o
Beispiel #13
0
    def test_signer_availability(self):
        self.mock_request.return_value.text = json.dumps({
            'value': 'mock-token-value',
            'expired_at': timestamp_to_rfc3339_utcoffset(
                (datetime.now() + timedelta(minutes=10)).timestamp()),
            'extra': 'mock-token-extra'
            })
        signer1 = CloudAssetSigner.create(self.mock_options())
        assert signer1.available() is True

        self.mock_request.return_value.text = json.dumps({
            'value': 'mock-token-value',
            'expired_at': timestamp_to_rfc3339_utcoffset(
                (datetime.now() - timedelta(minutes=10)).timestamp()),
            'extra': 'mock-token-extra'
            })
        signer2 = CloudAssetSigner.create(self.mock_options())
        assert signer2.available() is False
Beispiel #14
0
def query_wait_time(api_client, project_id, branch=None, service=None):
    """Query the last hour of data for a branch and service."""
    query_filter = 'resource.type="global" AND metric.type="%s"' % (
        MA_RMV_WAIT_TIME_CUSTOM_METRIC_TYPE)
    if branch is not None:
        query_filter = '%s AND metric.label.branch="%s"' % (query_filter, branch)
    if service is not None:
        query_filter = '%s AND metric.label.service="%s"' % (query_filter, service)

    now = int(time.time())
    end_rfc3339 = strict_rfc3339.timestamp_to_rfc3339_utcoffset(now) 
    start_rfc3339 = strict_rfc3339.timestamp_to_rfc3339_utcoffset(now-60*60)

    request = api_client.projects().timeSeries().list(
        name="projects/%s" % project_id, filter=query_filter,
        interval_endTime=end_rfc3339, interval_startTime=start_rfc3339)
    response = request.execute()
    print("Query result: %s" % response)
Beispiel #15
0
    def test_signer_availability(self):
        self.mock_request.return_value.text = json.dumps({
            'value': 'mock-token-value',
            'expired_at': timestamp_to_rfc3339_utcoffset(
                (datetime.now() + timedelta(minutes=10)).timestamp()),
            'extra': 'mock-token-extra'
            })
        signer1 = CloudAssetSigner.create(self.mock_options())
        assert signer1.available() is True

        self.mock_request.return_value.text = json.dumps({
            'value': 'mock-token-value',
            'expired_at': timestamp_to_rfc3339_utcoffset(
                (datetime.now() - timedelta(minutes=10)).timestamp()),
            'extra': 'mock-token-extra'
            })
        signer2 = CloudAssetSigner.create(self.mock_options())
        assert signer2.available() is False
Beispiel #16
0
    def serialize_items(self, request, language, queryset):
        default_host = settings.XMPP_HOSTS[settings.DEFAULT_XMPP_HOST]
        base_url = default_host['CANONICAL_BASE_URL']
        feed_id = '%s%s' % (base_url, request.get_full_path())

        root = etree.Element("feed", nsmap={
            None: self.atom_ns,
            'dc': 'http://purl.org/dc/elements/1.1/',
        })
        self.sub(root, 'title', self.get_feed_title(request))
        self.sub(root, 'id', feed_id)

        try:
            updated = max([q.updated for q in queryset])
        except ValueError:
            updated = timezone.now()
        self.sub(root, 'updated', timestamp_to_rfc3339_utcoffset(int(updated.timestamp())))

        self.sub(root, 'link', href=feed_id, rel='self')
        self.sub(root, 'icon', static('feed/atom_icon.png'))
        self.sub(root, 'logo', static('feed/atom.png'))
        self.sub(root, 'rights', '© 2010-%s, jabber.at' % updated.year)

        for post in queryset:
            canonical_url = post.get_canonical_url()
            content = absolutify_html(post.render_from_request(request), base_url)
            summary = absolutify_html(post.get_html_summary(request), base_url)

            entry = self.sub(root, 'entry')
            self.sub(entry, 'id', canonical_url)
            self.sub(entry, 'title', post.title.current)
            self.sub(entry, 'updated', timestamp_to_rfc3339_utcoffset(
                int(post.updated.timestamp())))
            self.sub(entry, 'published', timestamp_to_rfc3339_utcoffset(
                int(post.created.timestamp())))
            self.sub(entry, 'link', href=canonical_url)
            self.sub(entry, 'content', content, type="html")
            self.sub(entry, 'summary', summary, type="html")

            author = self.sub(entry, 'author')
            self.sub(author, 'name', post.author.node)

        return root
Beispiel #17
0
    def _generate_csv_body(self, data: dict, event_id: int):  # no headers
        new_data = list()

        data['date'] = timestamp_to_rfc3339_utcoffset(data['date'])

        new_data.append(event_id)
        new_data.append(
            EventHandler._join_list([value for value in data.values()]))
        new_data.append(self.prefix)
        new_data.append(self.prefix + f"/{event_id}")

        return EventHandler._join_list(new_data)
 def get_hours_by_date(self,date):
     # not a fan of this method but it does the job..
     registry = getUtility(IRegistry)
      
     #date = date.replace(hour=2, minute=0, second=0, microsecond=0)
 
     url = registry['polklibrary.google.hours.api']
     cal_id = registry['polklibrary.google.hours.calendar']
     key = registry['polklibrary.google.hours.key']
     options = registry['polklibrary.google.hours.options']
     
     date = date.replace(hour=2, minute=0, second=0)
     start = time.mktime(date.timetuple())
     end =  time.mktime((date + datetime.timedelta(days=1)).timetuple())
     start_min = 'timeMin=' + urllib.quote(strict_rfc3339.timestamp_to_rfc3339_utcoffset(start))
     start_max = 'timeMax=' + urllib.quote(strict_rfc3339.timestamp_to_rfc3339_utcoffset(end))
     target = url + '/' + cal_id + '/events?key=' + key + '&' + options + '&' + start_min + '&' + start_max
     
     h = httplib2.Http(disable_ssl_certificate_validation=True)
     resp, content = h.request(target, "GET")
     feed = json.loads(content)
     
     return self.make_clean_google_hours_dictionary(feed)
Beispiel #19
0
    def _generate_dict_body(self, data: dict, event_id: int):
        new_data = dict()

        data['date'] = timestamp_to_rfc3339_utcoffset(data['date'])

        new_data["eventId"] = event_id
        new_data.update(data)
        new_data["_links"] = {
            "parent": {
                "href": self.prefix
            },
            "self": {
                "href": self.prefix + f"/{event_id}"
            }
        }

        return new_data
Beispiel #20
0
def as_date(dat):
    """Return the RFC3339 UTC string representation of the given date and time.

    Args:
        dat (:py:class:`datetime.date`): the object/type to be serialized.

    Raises:
        TypeError:
            when ``o`` is not an instance of ``datetime.date``.

    Returns:
        (str) JSON serializable type for the given object.
    """
    LOGGER.debug('as_date(%s)', dat)

    return strict_rfc3339.timestamp_to_rfc3339_utcoffset(
        calendar.timegm(dat.timetuple()))
Beispiel #21
0
def get_recurring_merchants(transactions):
    transactions = transactions.filter(merchant_group__isnull=False)
    if not transactions:
        return []

    dictified_txs = []
    for tx in transactions:
        merchant = tx.merchant_group
        dictified_txs.append(
            {
                "amount": tx.amount,
                "category": tx.category,
                "created": strict_rfc3339.timestamp_to_rfc3339_utcoffset(tx.mondo_created.timestamp()),
                "merchant": {"id": merchant.mondo_group_id, "group_id": merchant.mondo_group_id, "name": merchant.name},
            }
        )

    return detect_recurring_payments.process_transactions({"transactions": dictified_txs})
Beispiel #22
0
def date_to_string(date_object=None):
    """ creates a rfc3339 conform string represenation of a date,
        can also be given as str YYYY-mm-dd HH:MM:SS, assuming local
        machine timezone

        :param date_object: the date to be converted
        :type date_object: date object or str
    """
    if type(date_object) == int:
        date_object = datetime.datetime.fromtimestamp(date_object)
    if type(date_object) == float:
        date_object = datetime.datetime.fromtimestamp(date_object)
    if type(date_object) == str:
        date_object = datetime.datetime.strptime(date_object + "+0000",
                                                 '%Y-%m-%d %H:%M:%S%z')
    if not date_object:
        return strict_rfc3339.now_to_rfc3339_utcoffset()
    else:
        return strict_rfc3339.timestamp_to_rfc3339_utcoffset(
            date_object.timestamp())
Beispiel #23
0
def create_primitive_token(value, multiline_strings_allowed=True):
    """
    Creates and returns a single token for the given primitive atomic value.

    Raises NotPrimitiveError when the given value is not a primitive atomic value
    """
    if value is None:
        return create_primitive_token('')
    elif isinstance(value, bool):
        return tokens.Token(tokens.TYPE_BOOLEAN, u'true' if value else u'false')
    elif isinstance(value, int):
        return tokens.Token(tokens.TYPE_INTEGER, u'{}'.format(value))
    elif isinstance(value, float):
        return tokens.Token(tokens.TYPE_FLOAT, u'{}'.format(value))
    elif isinstance(value, (datetime.datetime, datetime.date, datetime.time)):
        ts = timestamp(value) // 1000
        return tokens.Token(tokens.TYPE_DATE, strict_rfc3339.timestamp_to_rfc3339_utcoffset(ts))
    elif isinstance(value, six.string_types):
        return create_string_token(value, multiline_strings_allowed=multiline_strings_allowed)

    raise NotPrimitiveError("{} of type {}".format(value, type(value)))
Beispiel #24
0
def get_transactions(user, account_id, start_date=datetime.datetime(2000, 1, 1)):
    params = {
        'account_id': account_id,
        'limit': 100,
        'expand[]': 'merchant',
        'since': start_date if isinstance(start_date, str) else strict_rfc3339.timestamp_to_rfc3339_utcoffset(
            start_date.timestamp(),
        )
    }

    transactions_response = requests.get(
        URL_TRANSACTIONS,
        headers=auth_header(user),
        params=params,
    )

    transactions = transactions_response.json()['transactions']

    return sorted(
        transactions,
        key=operator.itemgetter('created'),
    )
Beispiel #25
0
 def to_file(self,
             file: Union[str, pathlib.Path, _FileLike],
             *,
             content_type: Optional[str] = None) -> None:
     if self.version is None:
         raise ValueError('Version must be set before writing file')
     if content_type is not None:
         if content_type != 'application/x-hdf5':
             raise FileTypeError(
                 f'Expected application/x-hdf5, not {content_type}')
     else:
         if isinstance(file, (str, pathlib.Path)):
             path = pathlib.Path(file)
         else:
             path = pathlib.Path(file.name)  # type: ignore
         if path.suffix not in {'.h5', '.hdf5'}:
             raise FileTypeError(
                 f'Expected extension of .h5 or .hdf5, not {path.suffix} '
                 '(use content_type to override if necessary)')
     if isinstance(file, pathlib.Path):
         hdf5 = h5py.File(str(file), 'w')
     else:
         hdf5 = h5py.File(file, 'w')
     with hdf5:
         hdf5.attrs['model_version'] = self.version
         hdf5.attrs['model_type'] = self.model_type
         hdf5.attrs['model_format'] = self.model_format
         if self.comment is not None:
             hdf5.attrs['model_comment'] = self.comment
         if self.author is not None:
             hdf5.attrs['model_author'] = self.author
         if self.target is not None:
             hdf5.attrs['model_target'] = self.target
         if self.created is not None:
             hdf5.attrs[
                 'model_created'] = strict_rfc3339.timestamp_to_rfc3339_utcoffset(
                     self.created.timestamp())
         self.to_hdf5(hdf5)
Beispiel #26
0
def get_latest_videos_of_channel(channel_id, cap=10, since_minutes_ago=300):
    assert cap < 51  # 50 is the highest authorized value in 2017
    t = datetime.datetime.now() - datetime.timedelta(minutes=since_minutes_ago)
    t = strict_rfc3339.timestamp_to_rfc3339_utcoffset(int(t.strftime("%s")))
    url = 'https://www.googleapis.com/youtube/v3/search'
    parameters = {
        'key': YOUTUBE_API_KEY,
        'channelId': channel_id,
        'type': 'video',
        'part': 'snippet',
        'order': 'date',
        'publishedAfter': t,  # RFC 3339 with trailing Z, or it will not work
        'maxResults': '%d' % cap,
    }

    response = get(url, params=parameters)

    if not response.ok:
        cprint("Request to Youtube API failed with response :", "red")
        print(response.text)
        exit(1)

    return response.json()
Beispiel #27
0
def create_primitive_token(value, multiline_strings_allowed=True):
    """
    Creates and returns a single token for the given primitive atomic value.

    Raises NotPrimitiveError when the given value is not a primitive atomic value
    """
    if value is None:
        return create_primitive_token('')
    elif isinstance(value, bool):
        return tokens.Token(tokens.TYPE_BOOLEAN,
                            u'true' if value else u'false')
    elif isinstance(value, int):
        return tokens.Token(tokens.TYPE_INTEGER, u'{}'.format(value))
    elif isinstance(value, float):
        return tokens.Token(tokens.TYPE_FLOAT, u'{}'.format(value))
    elif isinstance(value, (datetime.datetime, datetime.date, datetime.time)):
        ts = timestamp(value) // 1000
        return tokens.Token(tokens.TYPE_DATE,
                            strict_rfc3339.timestamp_to_rfc3339_utcoffset(ts))
    elif isinstance(value, six.string_types):
        return create_string_token(
            value, multiline_strings_allowed=multiline_strings_allowed)

    raise NotPrimitiveError("{} of type {}".format(value, type(value)))
Beispiel #28
0
def http_post_update(doc, req):
    """
    Update function: ``payload_telemetry/_update/http_post``

    Creates a new payload_telemetry document with all keys present in the HTTP
    POST form data available in ``doc.data._fallbacks`` and the ``from`` HTTP
    querystring key as the receiver callsign if available. The ``data`` field
    will be base64 encoded and used as ``doc.data._raw``.

    This function has additional functionality specific to RockBLOCKs: if all
    of the keys ``imei``, ``momsn``, ``transmit_time``, ``iridium_latitude``,
    ``iridium_longitude``, ``iridium_cep`` and ``data`` are present in the form
    data, then:
    * ``imei`` will be copied to ``doc.data._fallbacks.payload`` so it can be
      used as a payload callsign.
    * ``iridium_latitude`` and ``iridium_longitude`` will be copied to
      ``doc.data._fallbacks.latitude`` and ``longitude`` respectively.
    * ``data`` will be hex decoded before base64 encoding so it can be directly
      used by the binary parser module.
    * ``transmit_time`` will be decoded into an RFC3339 timestamp and used for
      the ``time_created`` field in the receiver section.
    * ``transmit_time`` will be decoded into hours, minutes and seconds and
      copied to ``doc.data._fallbacks.time``.

    Usage::

        POST /habitat/_design/payload_telemetry/_update/http_post?from=callsign
        
        data=hello&imei=whatever&so=forth

    This update handler may not currently be used on existing documents or
    with a PUT request; such requests will fail.

    Returns "OK" if everything was fine, otherwise CouchDB will return a
    (hopefully instructive) error.
    """
    if doc is not None:
        resp = {"headers": {"code": 405,
                            "body": "This update function may only be used to "
                                    "create new documents via POST, not with  "
                                    "an existing document ID on a PUT request."
                           }
        }
        return doc, resp

    form = req["form"]
    tc = ts = now_to_rfc3339_utcoffset()
    rawdata = base64.b64encode(form["data"])
    if set(("imei", "momsn", "transmit_time", "iridium_latitude",
           "iridium_longitude", "iridium_cep", "data")) <= set(form.keys()):
        form["payload"] = form["imei"]
        form["latitude"] = float(form["iridium_latitude"])
        form["longitude"] = float(form["iridium_longitude"])
        rawdata = base64.b64encode(form["data"].decode("hex"))
        fmt = "%y-%m-%d %H:%M:%S"
        tc = datetime.datetime.strptime(form["transmit_time"], fmt)
        form["time"] = tc.strftime("%H:%M:%S")
        tc = timestamp_to_rfc3339_utcoffset(calendar.timegm(tc.timetuple()))
    receiver = req["query"]["from"] if "from" in req["query"] else "HTTP POST"
    doc_id = hashlib.sha256(rawdata).hexdigest()
    doc = {"_id": doc_id, "type": "payload_telemetry",
            "data": {"_raw": rawdata, "_fallbacks": form}, "receivers": {}}
    doc["receivers"][receiver] = {"time_created": tc, "time_uploaded": ts,
                                  "time_server": ts}
    return doc, "OK"
Beispiel #29
0
    try:
        db_connection = sqlite3.connect(
            '/home/mayank/Desktop/precog/youtube/create-database/youtube.db')
        print "Opened database successfully"
        db = db_connection.cursor()

        youtube = build(YOUTUBE_API_SERVICE_NAME,
                        YOUTUBE_API_VERSION,
                        developerKey=DEVELOPER_KEY)

        while True:
            idx = 1
            pageToken = ""
            videoIds = []

            publishedAfter = strict_rfc3339.timestamp_to_rfc3339_utcoffset(t)
            publishedBefore = strict_rfc3339.timestamp_to_rfc3339_utcoffset(
                t + (60 * 60))
            print("> %s - %s" % (publishedAfter, publishedBefore))

            while True:
                search = youtube.search().list(
                    part="snippet",
                    type="video",
                    order="viewCount",
                    publishedAfter=publishedAfter,
                    publishedBefore=publishedBefore,
                    maxResults=50,
                    pageToken=pageToken,
                    safeSearch="none",
                ).execute()
Beispiel #30
0
def timeString(ts):
    return strict_rfc3339.timestamp_to_rfc3339_utcoffset(ts)
Beispiel #31
0
def http_post_update(doc, req):
    """
    Update function: ``payload_telemetry/_update/http_post``

    Creates a new payload_telemetry document with all keys present in the HTTP
    POST form data available in ``doc.data._fallbacks`` and the ``from`` HTTP
    querystring key as the receiver callsign if available. The ``data`` field
    will be base64 encoded and used as ``doc.data._raw``.

    This function has additional functionality specific to RockBLOCKs: if all
    of the keys ``imei``, ``momsn``, ``transmit_time``, ``iridium_latitude``,
    ``iridium_longitude``, ``iridium_cep`` and ``data`` are present in the form
    data, then:
    * ``imei`` will be copied to ``doc.data._fallbacks.payload`` so it can be
      used as a payload callsign.
    * ``iridium_latitude`` and ``iridium_longitude`` will be copied to
      ``doc.data._fallbacks.latitude`` and ``longitude`` respectively.
    * ``data`` will be hex decoded before base64 encoding so it can be directly
      used by the binary parser module.
    * ``transmit_time`` will be decoded into an RFC3339 timestamp and used for
      the ``time_created`` field in the receiver section.
    * ``transmit_time`` will be decoded into hours, minutes and seconds and
      copied to ``doc.data._fallbacks.time``.

    Usage::

        POST /habitat/_design/payload_telemetry/_update/http_post?from=callsign
        
        data=hello&imei=whatever&so=forth

    This update handler may not currently be used on existing documents or
    with a PUT request; such requests will fail.

    Returns "OK" if everything was fine, otherwise CouchDB will return a
    (hopefully instructive) error.
    """
    if doc is not None:
        resp = {
            "headers": {
                "code":
                405,
                "body":
                "This update function may only be used to "
                "create new documents via POST, not with  "
                "an existing document ID on a PUT request."
            }
        }
        return doc, resp

    form = req["form"]
    tc = ts = now_to_rfc3339_utcoffset()
    rawdata = base64.b64encode(form["data"])
    if set(("imei", "momsn", "transmit_time", "iridium_latitude",
            "iridium_longitude", "iridium_cep", "data")) <= set(form.keys()):
        form["payload"] = form["imei"]
        form["latitude"] = float(form["iridium_latitude"])
        form["longitude"] = float(form["iridium_longitude"])
        rawdata = base64.b64encode(form["data"].decode("hex"))
        fmt = "%y-%m-%d %H:%M:%S"
        tc = datetime.datetime.strptime(form["transmit_time"], fmt)
        form["time"] = tc.strftime("%H:%M:%S")
        tc = timestamp_to_rfc3339_utcoffset(calendar.timegm(tc.timetuple()))
    receiver = req["query"]["from"] if "from" in req["query"] else "HTTP POST"
    doc_id = hashlib.sha256(rawdata).hexdigest()
    doc = {
        "_id": doc_id,
        "type": "payload_telemetry",
        "data": {
            "_raw": rawdata,
            "_fallbacks": form
        },
        "receivers": {}
    }
    doc["receivers"][receiver] = {
        "time_created": tc,
        "time_uploaded": ts,
        "time_server": ts
    }
    return doc, "OK"
Beispiel #32
0
 def _encode_datetime(self, dt):
     ts = dt.timestamp()
     return strict_rfc3339.timestamp_to_rfc3339_utcoffset(ts)
Beispiel #33
0
def _timestamp_to_rfc3339(dt):
    """
    Convert from a UNIX timestamp to a RFC3339 timestamp.
    """
    return strict_rfc3339.timestamp_to_rfc3339_utcoffset(dt)
Beispiel #34
0
 def encode_datetime(self, dt):
     ts = dt.timestamp()
     return {
         '$type': 'date',
         '$date': strict_rfc3339.timestamp_to_rfc3339_utcoffset(ts),
     }
Beispiel #35
0
def timeString(ts):
    return strict_rfc3339.timestamp_to_rfc3339_utcoffset(ts)
Beispiel #36
0
def rfc3339(dt):
  t = time.mktime(dt.timetuple())
  return strict_rfc3339.timestamp_to_rfc3339_utcoffset(t)
Beispiel #37
0
def _timestamp_to_rfc3339(dt):
    """
    Convert from a UNIX timestamp to a RFC3339 timestamp.
    """
    return strict_rfc3339.timestamp_to_rfc3339_utcoffset(dt)
Beispiel #38
0
def to_rfc3339_or_none(dt):
    if not dt:
        return None
    return timestamp_to_rfc3339_utcoffset(dt.timestamp())
Beispiel #39
0
 def _encode_datetime(self, dt):
     ts = dt.timestamp()
     return strict_rfc3339.timestamp_to_rfc3339_utcoffset(ts)
Beispiel #40
0
 def get_last_pet(self):
     last_pet = self.cache.get('watchdog_datetime')
     if last_pet is None:
         return strict_rfc3339.timestamp_to_rfc3339_utcoffset(0)
     else:
         return last_pet
Beispiel #41
0
def utc_timestamp(seconds):
    if not seconds:
        return None
    return strict_rfc3339.timestamp_to_rfc3339_utcoffset(seconds)
Beispiel #42
0
def utc_timestamp(seconds):
    if not seconds:
        return None
    return strict_rfc3339.timestamp_to_rfc3339_utcoffset(seconds)
Beispiel #43
0
 def default(self, obj):
     if isinstance(obj, datetime.datetime):
         ts = obj.timestamp()
         return strict_rfc3339.timestamp_to_rfc3339_utcoffset(ts)
Beispiel #44
0
def sync_with_google_drive(user_id):
    sys.stderr.write("sync_with_google_drive: starting\n")
    if worker_controller is None:
        initialize_db()
    sys.stderr.write("sync_with_google_drive: continuing\n")
    storage = RedisCredStorage(worker_controller.r, user_id, app='googledrive')
    credentials = storage.get()
    if not credentials or credentials.invalid:
        sys.stderr.write("sync_with_google_drive: credentials failed\n")
        return worker_controller.functions.ReturnValue(
            ok=False, error="credentials expired", restart=False)
    try:
        with worker_controller.flaskapp.app_context():
            http = credentials.authorize(httplib2.Http())
            service = worker_controller.apiclient.discovery.build('drive',
                                                                  'v3',
                                                                  http=http)
            key = 'da:googledrive:mapping:userid:' + str(user_id)
            the_folder = worker_controller.r.get(key)
            response = service.files().get(
                fileId=the_folder,
                fields="mimeType, id, name, trashed").execute()
            the_mime_type = response.get('mimeType', None)
            trashed = response.get('trashed', False)
            if trashed is True or the_mime_type != "application/vnd.google-apps.folder":
                return worker_controller.functions.ReturnValue(
                    ok=False,
                    error="error accessing Google Drive",
                    restart=False)
            local_files = dict()
            local_modtimes = dict()
            gd_files = dict()
            gd_ids = dict()
            gd_modtimes = dict()
            gd_deleted = dict()
            sections_modified = set()
            commentary = ''
            for section in [
                    'static', 'templates', 'questions', 'modules', 'sources'
            ]:
                local_files[section] = set()
                local_modtimes[section] = dict()
                if section == 'questions':
                    the_section = 'playground'
                elif section == 'templates':
                    the_section = 'playgroundtemplate'
                else:
                    the_section = 'playground' + section
                area = SavedFile(user_id, fix=True, section=the_section)
                for f in os.listdir(area.directory):
                    local_files[section].add(f)
                    local_modtimes[section][f] = os.path.getmtime(
                        os.path.join(area.directory, f))
                subdirs = list()
                page_token = None
                while True:
                    response = service.files().list(
                        spaces="drive",
                        fields="nextPageToken, files(id, name)",
                        q="mimeType='application/vnd.google-apps.folder' and trashed=false and name='"
                        + section + "' and '" + str(the_folder) +
                        "' in parents").execute()
                    for the_file in response.get('files', []):
                        if 'id' in the_file:
                            subdirs.append(the_file['id'])
                    page_token = response.get('nextPageToken', None)
                    if page_token is None:
                        break
                if len(subdirs) == 0:
                    return worker_controller.functions.ReturnValue(
                        ok=False,
                        error="error accessing " + section +
                        " in Google Drive",
                        restart=False)
                subdir = subdirs[0]
                gd_files[section] = set()
                gd_ids[section] = dict()
                gd_modtimes[section] = dict()
                gd_deleted[section] = set()
                page_token = None
                while True:
                    response = service.files().list(
                        spaces="drive",
                        fields=
                        "nextPageToken, files(id, name, modifiedTime, trashed)",
                        q="mimeType!='application/vnd.google-apps.folder' and '"
                        + str(subdir) + "' in parents").execute()
                    for the_file in response.get('files', []):
                        if re.search(r'(\.tmp|\.gdoc)$', the_file['name']):
                            continue
                        if re.search(r'^\~', the_file['name']):
                            continue
                        gd_ids[section][the_file['name']] = the_file['id']
                        gd_modtimes[section][the_file[
                            'name']] = strict_rfc3339.rfc3339_to_timestamp(
                                the_file['modifiedTime'])
                        sys.stderr.write("Google says modtime on " +
                                         unicode(the_file) + " is " +
                                         the_file['modifiedTime'] + "\n")
                        if the_file['trashed']:
                            gd_deleted[section].add(the_file['name'])
                            continue
                        gd_files[section].add(the_file['name'])
                    page_token = response.get('nextPageToken', None)
                    if page_token is None:
                        break
                gd_deleted[section] = gd_deleted[section] - gd_files[section]
                for f in gd_files[section]:
                    sys.stderr.write("Considering " + f + " on GD\n")
                    if f not in local_files[section] or gd_modtimes[section][
                            f] - local_modtimes[section][f] > 3:
                        sys.stderr.write("Considering " + f +
                                         " to copy to local\n")
                        sections_modified.add(section)
                        commentary += "Copied " + f + " from Google Drive.\n"
                        the_path = os.path.join(area.directory, f)
                        with open(the_path, 'wb') as fh:
                            response = service.files().get_media(
                                fileId=gd_ids[section][f])
                            downloader = worker_controller.apiclient.http.MediaIoBaseDownload(
                                fh, response)
                            done = False
                            while done is False:
                                status, done = downloader.next_chunk()
                                #sys.stderr.write("Download %d%%." % int(status.progress() * 100) + "\n")
                        os.utime(
                            the_path,
                            (gd_modtimes[section][f], gd_modtimes[section][f]))
                for f in local_files[section]:
                    sys.stderr.write("Considering " + f +
                                     ", which is a local file\n")
                    if f not in gd_deleted[section]:
                        sys.stderr.write("Considering " + f +
                                         " is not in Google Drive deleted\n")
                        if f not in gd_files[section]:
                            sys.stderr.write("Considering " + f +
                                             " is not in Google Drive\n")
                            the_path = os.path.join(area.directory, f)
                            if os.path.getsize(the_path) == 0:
                                sys.stderr.write("Found zero byte file: " +
                                                 the_path + "\n")
                                continue
                            sys.stderr.write("Copying " + f +
                                             " to Google Drive.\n")
                            commentary += "Copied " + f + " to Google Drive.\n"
                            extension, mimetype = worker_controller.get_ext_and_mimetype(
                                the_path)
                            the_modtime = strict_rfc3339.timestamp_to_rfc3339_utcoffset(
                                local_modtimes[section][f])
                            sys.stderr.write(
                                "Setting GD modtime on new file " +
                                unicode(f) + " to " + unicode(the_modtime) +
                                "\n")
                            file_metadata = {
                                'name': f,
                                'parents': [subdir],
                                'modifiedTime': the_modtime,
                                'createdTime': the_modtime
                            }
                            media = worker_controller.apiclient.http.MediaFileUpload(
                                the_path, mimetype=mimetype)
                            the_new_file = service.files().create(
                                body=file_metadata,
                                media_body=media,
                                fields='id').execute()
                            new_id = the_new_file.get('id')
                        elif local_modtimes[section][f] - gd_modtimes[section][
                                f] > 3:
                            sys.stderr.write(
                                "Considering " + f +
                                " is in Google Drive but local is more recent\n"
                            )
                            the_path = os.path.join(area.directory, f)
                            if os.path.getsize(the_path) == 0:
                                sys.stderr.write(
                                    "Found zero byte file during update: " +
                                    the_path + "\n")
                                continue
                            commentary += "Updated " + f + " on Google Drive.\n"
                            extension, mimetype = worker_controller.get_ext_and_mimetype(
                                the_path)
                            the_modtime = strict_rfc3339.timestamp_to_rfc3339_utcoffset(
                                local_modtimes[section][f])
                            sys.stderr.write(
                                "Setting GD modtime on modified " +
                                unicode(f) + " to " + unicode(the_modtime) +
                                "\n")
                            file_metadata = {'modifiedTime': the_modtime}
                            media = worker_controller.apiclient.http.MediaFileUpload(
                                the_path, mimetype=mimetype)
                            service.files().update(fileId=gd_ids[section][f],
                                                   body=file_metadata,
                                                   media_body=media).execute()
                for f in gd_deleted[section]:
                    sys.stderr.write("Considering " + f +
                                     " is deleted on Google Drive\n")
                    if f in local_files[section]:
                        sys.stderr.write(
                            "Considering " + f +
                            " is deleted on Google Drive but exists locally\n")
                        if local_modtimes[section][f] - gd_modtimes[section][
                                f] > 3:
                            sys.stderr.write(
                                "Considering " + f +
                                " is deleted on Google Drive but exists locally and needs to be undeleted on GD\n"
                            )
                            commentary += "Undeleted and updated " + f + " on Google Drive.\n"
                            the_path = os.path.join(area.directory, f)
                            extension, mimetype = worker_controller.get_ext_and_mimetype(
                                the_path)
                            the_modtime = strict_rfc3339.timestamp_to_rfc3339_utcoffset(
                                local_modtimes[section][f])
                            sys.stderr.write(
                                "Setting GD modtime on undeleted file " +
                                unicode(f) + " to " + unicode(the_modtime) +
                                "\n")
                            file_metadata = {
                                'modifiedTime': the_modtime,
                                'trashed': False
                            }
                            media = worker_controller.apiclient.http.MediaFileUpload(
                                the_path, mimetype=mimetype)
                            service.files().update(fileId=gd_ids[section][f],
                                                   body=file_metadata,
                                                   media_body=media).execute()
                        else:
                            sys.stderr.write(
                                "Considering " + f +
                                " is deleted on Google Drive but exists locally and needs to deleted locally\n"
                            )
                            sections_modified.add(section)
                            commentary += "Deleted " + f + " from Playground.\n"
                            the_path = os.path.join(area.directory, f)
                            if os.path.isfile(the_path):
                                area.delete_file(f)
                area.finalize()
            for key in worker_controller.r.keys(
                    'da:interviewsource:docassemble.playground' +
                    str(user_id) + ':*'):
                worker_controller.r.incr(key)
            if commentary != '':
                sys.stderr.write(commentary + "\n")
        if 'modules' in sections_modified:
            do_restart = True
        else:
            do_restart = False
        return worker_controller.functions.ReturnValue(ok=True,
                                                       summary=commentary,
                                                       restart=do_restart)
    except Exception as e:
        return worker_controller.functions.ReturnValue(
            ok=False,
            error="Error syncing with Google Drive: " + str(e),
            restart=False)
Beispiel #45
0
def to_rfc3339(date: dt):
    return timestamp_to_rfc3339_utcoffset(to_ts(date))
Beispiel #46
0
 def default(self, obj):
     if isinstance(obj, datetime.datetime):
         ts = obj.timestamp()
         return strict_rfc3339.timestamp_to_rfc3339_utcoffset(ts)
Beispiel #47
0
def rfc3339(dt):
  t = time.mktime(dt.timetuple())
  return strict_rfc3339.timestamp_to_rfc3339_utcoffset(t)
Beispiel #48
0
def utc_timestamp(seconds: Union[float, int]) -> Union[str, None]:
    if not seconds:
        return None
    return strict_rfc3339.timestamp_to_rfc3339_utcoffset(seconds)
Beispiel #49
0
def utc_timestamp(seconds: Union[float, int]) -> Union[str, None]:
    if not seconds:
        return None
    return strict_rfc3339.timestamp_to_rfc3339_utcoffset(seconds)