def get_all_pull_requests(r):
    '''Gets all pull requests for the repo r, regardless of whether they are open or closed'''
    
    for p in r.get_pulls(state='all'):
        created_at_ist = p.created_at.replace(tzinfo=tzutc()).astimezone(tzlocal()).ctime()
        if p.closed_at is not None:
            closed_at_ist = p.closed_at.replace(tzinfo=tzutc()).astimezone(tzlocal()).ctime()
        else:
            closed_at_ist = None

        pull_requests[p.id] = {
                'user': p.user.name,
                'username' : p.user.login,
                'created_at': created_at_ist,
                'closed_at': closed_at_ist,
                'additions':p.additions,
                'deletions':p.deletions,
                'changed_files': p.changed_files,
                'review_comments' : p.review_comments,
                'merged' : p.is_merged()
            }

        if p.user.name is None or p.user.name.strip() == '':
            pull_requests[p.id]['user'] = p.user.login

    return pull_requests
Example #2
0
 def process_bind_param(self, value, engine):
   if value is not None:
     if value.tzinfo is None:
       # TODO: do we want to assume that unqualified datetimes are UTC?
       return value.replace(tzinfo=tzutc())
     else:
       return value.astimezone(tzutc())
    def test_verify_certificate_with_invalid_date_range(self):
        start = datetime.utcnow() + relativedelta(months=2)
        start = start.replace(tzinfo=tzutc())
        environ = self.make_environ(
            '/C=MX',
            '/C=MX',
            start=start
        )

        assert not verify_certificate(
            environ,
            'SSL_CLIENT_VERIFY',
            'SSL_CLIENT_V_START',
            'SSL_CLIENT_V_END'
        )

        end = datetime.utcnow() + relativedelta(days=-5)
        end = end.replace(tzinfo=tzutc())
        environ = self.make_environ(
            '/C=MX',
            '/C=MX',
            end=end
        )

        assert not verify_certificate(
            environ,
            'SSL_CLIENT_VERIFY',
            'SSL_CLIENT_V_START',
            'SSL_CLIENT_V_END'
        )
Example #4
0
 def _make_offset_aware(self, mapping=None):
     # root
     if mapping is None:
         mapping = self
     naive_types = (datetime.datetime, datetime.time)
     for key, value in mapping.iteritems():
         new_key, new_value = key, value
         # timezone naive key
         if isinstance(key, naive_types) and key.tzinfo is None:
             new_key = key.replace(tzinfo=tzutc())
             del mapping[key]
         # timezone naive value
         if isinstance(value, naive_types) and value.tzinfo is None:
             new_value = value.replace(tzinfo=tzutc())
         # list
         elif isinstance(value, list) and len(value) and \
              any([isinstance(v, naive_types) for v in value]):
             new_value = list()
             for v in value:
                 if isinstance(v, naive_types):
                     v = v.replace(tzinfo=tzutc())
                 new_value.append(v)
         # recursive value
         elif isinstance(value, dict):
             new_value = self._make_offset_aware(value)
         mapping[new_key] = new_value
     return mapping
Example #5
0
    def test_is_rhic_lookup_task_expired(self):
        # Create a valid, in_progress task
        task_a = RHICLookupTask(uuid="11a1aa11-a11a-1a11-111a-a11111111111", completed=False)
        task_a.save()
        self.assertFalse(identity.is_rhic_lookup_task_expired(task_a))

        # Create a completed task
        task_b = RHICLookupTask(uuid="11a1aa11-a11a-1a11-111a-a22222222222", completed=True,
            initiated=datetime.now(tzutc()),
            modified=datetime.now(tzutc()))
        task_b.save()
        self.assertFalse(identity.is_rhic_lookup_task_expired(task_b))

        # Create a timedout incomplete task
        cfg = config.get_rhic_serve_config_info()
        timeout_in_minutes = cfg["single_rhic_lookup_timeout_in_minutes"]
        expired_time = datetime.now(tzutc()) - timedelta(minutes=timeout_in_minutes+1)
        task_c = RHICLookupTask(uuid="11a1aa11-a11a-1a11-111a-a333333333333", completed=False, initiated=expired_time)
        task_c.save()
        self.assertTrue(identity.is_rhic_lookup_task_expired(task_c))

        # Create a completed expired task
        expired_hours = cfg["single_rhic_lookup_cache_unknown_in_hours"]
        expired_time = datetime.now(tzutc()) - timedelta(hours=expired_hours+1)
        task_d = RHICLookupTask(uuid="11a1aa11-a11a-1a11-111a-a444444444444", completed=True, modified=expired_time)
        task_d.save()
        self.assertTrue(identity.is_rhic_lookup_task_expired(task_d))
Example #6
0
    def test_purge_expired_rhic_lookups(self):
        cfg = config.get_rhic_serve_config_info()
        # Create a valid, in_progress task
        task_a = RHICLookupTask(uuid="11a1aa11-a11a-1a11-111a-a11111111111", completed=False)
        task_a.save()
        # Create a completed task
        task_b = RHICLookupTask(uuid="11a1aa11-a11a-1a11-111a-a22222222222", completed=True,
            initiated=datetime.now(tzutc()),
            modified=datetime.now(tzutc()))
        task_b.save()
        # Create a timedout incomplete task
        timeout_in_minutes = cfg["single_rhic_lookup_timeout_in_minutes"]
        expired_time = datetime.now(tzutc()) - timedelta(minutes=timeout_in_minutes+1)
        task_c = RHICLookupTask(uuid="11a1aa11-a11a-1a11-111a-a333333333333", completed=False, initiated=expired_time)
        task_c.save()
        # Create a completed expired task
        expired_hours = cfg["single_rhic_lookup_cache_unknown_in_hours"]
        expired_time = datetime.now(tzutc()) - timedelta(hours=expired_hours+1)
        task_d = RHICLookupTask(uuid="11a1aa11-a11a-1a11-111a-a444444444444", completed=True, modified=expired_time)
        task_d.save()

        identity.purge_expired_rhic_lookups()
        found = RHICLookupTask.objects()
        self.assertEquals(len(found), 2)
        for f in found:
            self.assertTrue(f.uuid in [task_a.uuid, task_b.uuid])
            self.assertTrue(f.uuid not in [task_c.uuid, task_d.uuid])
Example #7
0
    def process_value_type(self, sentinel, value):
        if self.vtype == 'normalize':
            return sentinel, value.strip().lower()
        elif self.vtype == 'integer':
            try:
                v = int(value.strip())
            except ValueError:
                v = 0
        elif self.vtype == 'size':
            try:
                return sentinel, len(value)
            except TypeError:
                return sentinel, 0
        elif self.vtype == 'age':
            if not isinstance(sentinel, datetime):
                sentinel = datetime.now(tz=tzutc()) - timedelta(sentinel)

            if not isinstance(value, datetime):
                value = parse(value)

            # Reverse the age comparison, we want to compare the value being
            # greater than the sentinel typically. Else the syntax for age
            # comparisons is intuitively wrong.
            return value, sentinel

        # Allows for expiration filtering, for events in the future as opposed
        # to events in the past which age filtering allows for.
        elif self.vtype == 'expiration':
            if not isinstance(sentinel, datetime):
                sentinel = datetime.now(tz=tzutc()) + timedelta(sentinel)

            if not isinstance(value, datetime):
                value = parse(value)

            return sentinel, value
Example #8
0
    def _parse_tzstr(self, tzstr, zero_as_utc=True):
        if tzstr == b'Z' or tzstr == b'z':
            return tz.tzutc()

        if len(tzstr) not in {3, 5, 6}:
            raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters')

        if tzstr[0:1] == b'-':
            mult = -1
        elif tzstr[0:1] == b'+':
            mult = 1
        else:
            raise ValueError('Time zone offset requires sign')

        hours = int(tzstr[1:3])
        if len(tzstr) == 3:
            minutes = 0
        else:
            minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):])

        if zero_as_utc and hours == 0 and minutes == 0:
            return tz.tzutc()
        else:
            if minutes > 59:
                raise ValueError('Invalid minutes in time zone offset')

            if hours > 23:
                raise ValueError('Invalid hours in time zone offset')

            return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60)
Example #9
0
def is_rhic_lookup_task_expired(current_task):
    cfg = config.get_rhic_serve_config_info()
    if not current_task.completed:
        # Task is in progress, ensure that it's initiated time is within timeout range
        timeout_in_minutes = cfg["single_rhic_lookup_timeout_in_minutes"]
        threshold = current_task.initiated + timedelta(minutes=timeout_in_minutes)
        if not threshold.tzinfo:
            threshold = pytz.UTC.localize(threshold)
        if threshold < datetime.now(tzutc()):
            _LOG.info("Task has timed out, threshold was: %s.  Task = <%s>" % (threshold, current_task))
            # Current time is greater than the threshold this task had to stay alive
            # It is expired
            return True
    else:
        # Task has completed, check if it's within cached time boundaries
        valid_hours = cfg["single_rhic_lookup_cache_unknown_in_hours"]
        modified = current_task.modified
        if not modified.tzinfo:
            modified = pytz.UTC.localize(modified)
        threshold = datetime.now(tzutc()) - timedelta(hours=valid_hours)
        if modified < threshold:
            _LOG.info("Cached task has expired, threshold was: %s. Task = <%s>" % (threshold, current_task))
            # Task was modified more than # hours ago
            # It is expired
            return True
    return False
Example #10
0
 def save(cls, data):
     """
     
     Save an entity in DB.
     
     :param dict data: Item from the 'data' section of the dictionary that was received from
                       the Facebook API request /posts/
     :return: Nothing
     """
     record = cls.get_or_insert(
         data["id"],
         from_ = data.get("from", {}),
         to = data.get("to", {}),
         message = data.get("message", ""),
         message_tags = data.get("message_tags", {}),
         picture = data.get("picture", ""),
         link = data.get("link", ""),
         actions = data.get("actions", {}),
         type = data.get("type", ""),
         status_type = data.get("status_type", ""),
         # gae date issue solving:
         # http://hype-free.blogspot.ru/2013/02/converting-datetime-to-utc-in-python.html
         created_time = parse(data["created_time"]).astimezone(tz.tzutc()).replace(tzinfo=None),
         updated_time = parse(data["updated_time"]).astimezone(tz.tzutc()).replace(tzinfo=None),
         shares = data.get("shares", {}),
         likes = data.get("likes", {}),
         )
     return None
    def get_replication_info(self, instance):
        if self.check_instance_is_master(instance=instance):
            return 0

        with self.pymongo() as client:

            replSetGetStatus = client.admin.command('replSetGetStatus')
            primary_opttime = None
            for member in replSetGetStatus['members']:
                if member['stateStr'] == 'PRIMARY':
                    primary_opttime = member['optimeDate'].replace(tzinfo=tz.tzutc()).astimezone(tz.tzlocal())

            if primary_opttime is None:
                raise Exception("There is not any Primary in the Replica Set")

            instance_opttime = None
            for member in replSetGetStatus['members']:
                if member["name"] == "{}:{}".format(instance.address, instance.port):
                    instance_opttime = member['optimeDate'].replace(tzinfo=tz.tzutc()).astimezone(tz.tzlocal())
                    instance_member = member

            if instance_opttime is None:
                raise Exception("Could not find the instance in the Replica Set")

        delay = primary_opttime - instance_opttime
        seconds_delay = delay.days * 24 * 3600 + delay.seconds
        LOG.info("The instance {} is {} seconds behind Primary".format(instance, seconds_delay))

        if seconds_delay == 0 and instance_member["stateStr"] not in ["PRIMARY", "SECONDARY"]:
            LOG.info("The instance {} is 0 seconds behind Primary, but it is not Secondary. It is {}".format(instance, instance_member["stateStr"]))
            return 100000

        return seconds_delay
Example #12
0
    def _parse(self, text):
        lines = text.split("\n")
        issued = datetime.strptime(lines[3], "%I%M %p %Z %a %b %d %Y")
        if 'CST' in lines[3]:
            self.issued = issued.replace(tzinfo=SPCSWO._CST).astimezone(tzutc())
        elif 'CDT'in lines[3]:
            self.issued = issued.replace(tzinfo=SPCSWO._CDT).astimezone(tzutc())

        match = re.search(r"([\d]{6})Z \- ([\d]{6})Z", lines[5])
        valid_start_str, valid_end_str = match.groups()
        valid_start = datetime.strptime(valid_start_str, "%d%H%M").replace(tzinfo=tzutc())
        valid_end = datetime.strptime(valid_end_str, "%d%H%M").replace(tzinfo=tzutc())
        if valid_end < valid_start:
            valid_end += ((valid_start - valid_end) + timedelta(days=1))
        valid_len = valid_end - valid_start

        self.valid_start = valid_start.replace(year=issued.year, month=issued.month)
        self.valid_end = self.valid_start + valid_len

        products = re.findall(r"\.\.\. ([A-Z ]+) \.\.\.", text)
        prods = {}
        for prod in products:
            match = re.search("\.\.\. %s \.\.\.([\w\d\s\.]+)\&\&" % prod, text, re.S)
            cont_str = match.groups()[0].strip()

            prods[prod] = SPCSWOContours(prod, cont_str, self._conus) 
        return prods
 def as_dict(self):
     #time_fmt='%c'
     time_utc = "%Y-%m-%d %H:%M:%S %Z%z"
     d = {
          'created':self.created.replace(tzinfo=tz.tzutc()).strftime(time_utc),
          'last_modified':self.last_modified.replace(tzinfo=tz.tzutc()).strftime(time_utc)
         }
Example #14
0
    def test_start_end(self):
        """ LogFile: test .start and .end property work correctly """

        logfile = LogFile(self.file_year_rollover)
        
        assert logfile.start == datetime(2013, 12, 30, 00, 13, 01, 661000, tzutc())
        assert logfile.end == datetime(2014, 01, 02, 23, 27, 11, 720000, tzutc())
 def _assert_default_entity(self, entity):
     '''
     Asserts that the entity passed in matches the default entity.
     '''
     self.assertEqual(entity.age, 39)
     self.assertEqual(entity.sex, 'male')
     self.assertEqual(entity.name, 'John Doe')
     self.assertEqual(entity.married, True)
     self.assertEqual(entity.deceased, False)
     self.assertFalse(hasattr(entity, "optional"))
     self.assertFalse(hasattr(entity, "aquarius"))
     self.assertEqual(entity.ratio, 3.1)
     self.assertEqual(entity.evenratio, 3.0)
     self.assertEqual(entity.large, 933311100)
     self.assertEqual(entity.Birthday, datetime(1973, 10, 4, tzinfo=tzutc()))
     self.assertEqual(entity.birthday, datetime(1970, 10, 4, tzinfo=tzutc()))
     self.assertIsInstance(entity.binary, EntityProperty)
     self.assertEqual(entity.binary.type, EdmType.BINARY)
     self.assertEqual(entity.binary.value, b'binary')
     self.assertIsInstance(entity.other, EntityProperty)
     self.assertEqual(entity.other.type, EdmType.INT32)
     self.assertEqual(entity.other.value, 20)
     self.assertIsInstance(entity.clsid, EntityProperty)
     self.assertEqual(entity.clsid.type, EdmType.GUID)
     self.assertEqual(entity.clsid.value,
                      'c9da6455-213d-42c9-9a79-3e9149a57833')
     self.assertTrue(hasattr(entity, "Timestamp"))
     self.assertIsInstance(entity.Timestamp, datetime)
     self.assertIsNotNone(entity.etag)
Example #16
0
    def get_docs_from_xml(self, root):
        docs = []
        for channel in root:
            for item in channel.findall("item"):
                new_doc = Document()
                new_doc.title = item.find("title").text or ""

                new_doc.download_date = datetime.now(tz.tzutc())
                new_doc.publish_date = dateparser.parse(item.find("pubDate").text, "") or new_doc.download_date
                if new_doc.publish_date.tzinfo is None or self.force_timezone:
                    new_doc.publish_date=new_doc.publish_date.replace(tzinfo=self.timezone)
                new_doc.publish_date = new_doc.publish_date.astimezone(tz.tzutc())

                new_doc.source_url = item.find("link").text or ""

                new_doc.original_summary = strip_html(item.find("description").text or "")

                if item.find("guid"):
                    new_doc.guid = hashlib.md5(item.find("guid").encode('utf-8')).hexdigest()
                else:
                    new_doc.guid = hashlib.md5(new_doc.source_url.encode('utf-8')).hexdigest()
                new_doc.provider = self.name

                if new_doc.guid not in self.processed_guids:
                    self.processed_guids[new_doc.guid] = True
                    self.document_count += 1
                    docs.append(new_doc)

        return docs
Example #17
0
def datetime_to_serial_date(dt):
    """ Convert datetime.datetime object to a serial date. """
    if dt.tzname() is None:
        dt = dt.replace(tzinfo=tz.tzutc())
    origin = datetime.datetime(1899, 12, 30, tzinfo=tz.tzutc())
    delta = dt - origin
    return float(delta.days) + (float(delta.seconds) / 86400.0)
Example #18
0
    def setup(self):
        """ get start end end date of logfile before starting to parse. """

        if self.mlogfilter.is_stdin:
            # assume this year (we have no other info)
            now = datetime.now()
            self.startDateTime = datetime(now.year, 1, 1, tzinfo=tzutc())
            self.endDateTime = datetime(MAXYEAR, 12, 31, tzinfo=tzutc())

        else:
            logfiles = self.mlogfilter.args['logfile']
            self.startDateTime = min([lf.start+timedelta(hours=self.mlogfilter.args['timezone'][i]) for i, lf in enumerate(logfiles)])
            self.endDateTime = max([lf.end+timedelta(hours=self.mlogfilter.args['timezone'][i]) for i, lf in enumerate(logfiles)])

        # now parse for further changes to from and to datetimes
        dtbound = DateTimeBoundaries(self.startDateTime, self.endDateTime)
        self.fromDateTime, self.toDateTime = dtbound(self.mlogfilter.args['from'] or None,
                                                     self.mlogfilter.args['to'] or None)

        # define start_limit for mlogfilter's fast_forward method
        self.start_limit = self.fromDateTime

        # for single logfile, get file seek position of `to` datetime
        if len(self.mlogfilter.args['logfile']) == 1 and not self.mlogfilter.is_stdin:

            if self.mlogfilter.args['to'] != "end":
                # fast forward, get seek value, then reset file
                logfile = self.mlogfilter.args['logfile'][0]
                logfile.fast_forward(self.toDateTime)
                self.seek_to = logfile.filehandle.tell()
                logfile.filehandle.seek(0)
            else:
                self.seek_to = -1
        else:
            self.seek_to = False
Example #19
0
def iso8601_to_rostime(iso):
    """Converts ISO 8601 time to ROS Time.

    Args:
        iso: ISO 8601 encoded string.

    Returns:
        std_msgs/Time.
    """
    # Convert to datetime in UTC.
    t = dateutil.parser.parse(iso)
    if not t.utcoffset():
        t = t.replace(tzinfo=tzutc())

    # Convert to time from epoch in UTC.
    epoch = datetime.utcfromtimestamp(0)
    epoch = epoch.replace(tzinfo=tzutc())
    dt = t - epoch

    # Create ROS message.
    time = Time()
    time.data.secs = int(dt.total_seconds())
    time.data.nsecs = dt.microseconds * 1000

    return time
Example #20
0
    def test_check_for_alert_with_no_matched_zones(self):
        """Alerts are only valid if the availability zone in the history matches an availability zone in the subscription"""

        history = [{u'Timestamp': datetime.datetime(2015, 12, 31, 22, 13, 43,
                                                    tzinfo=tzutc()),
                    u'ProductDescription': 'Windows',
                    u'InstanceType': 'g2.2xlarge',
                    u'SpotPrice': '0.105200',
                    u'AvailabilityZone': 'us-east-1d'},
                   {u'Timestamp': datetime.datetime(2015, 12, 31, 21, 56, 18,
                                                    tzinfo=tzutc()),
                    u'ProductDescription': 'Windows',
                    u'InstanceType': 'g2.2xlarge',
                    u'SpotPrice': '0.104400',
                    u'AvailabilityZone': 'us-east-1d'}]

        just_1a = {'name': 'Just 1a',
                          'threshold':'0.05',
                          'region':'us-east-1',
                          'zone': 'us-east-1a',
                          'instance_type':'g2.2xlarge',
                          'product':'Windows',
                          'user':'******',
                          'last_alert':'Under'}

        result = alert.check_for_alert(history, just_1a)
        assert result is None, 'There should not be an alert for Just 1a'
Example #21
0
 def test_log_new_events_prints_correct_event(self, mock_describe_events):
     mock_describe_events.return_value = {
         "StackEvents": [
             {
                 "Timestamp": datetime.datetime(
                     2016, 3, 15, 14, 2, 0, 0, tzinfo=tzutc()
                 ),
                 "LogicalResourceId": "id-2",
                 "ResourceType": "type-2",
                 "ResourceStatus": "resource-status"
             },
             {
                 "Timestamp": datetime.datetime(
                     2016, 3, 15, 14, 1, 0, 0, tzinfo=tzutc()
                 ),
                 "LogicalResourceId": "id-1",
                 "ResourceType": "type-1",
                 "ResourceStatus": "resource",
                 "ResourceStatusReason": "User Initiated"
             }
         ]
     }
     self.stack.most_recent_event_datetime = (
         datetime.datetime(2016, 3, 15, 14, 0, 0, 0, tzinfo=tzutc())
     )
     self.stack._log_new_events()
Example #22
0
def arrow(date=None, tz=None):
    def _tz_now(tzinfo):

        date = datetime.utcnow()
        date = date.replace(tzinfo=_tz.tzutc())

        return date.astimezone(tzinfo)

    date_expr = None
    tz_expr = None

    if date is None:
        if tz is None:
            date_expr = datetime.utcnow()
            tz_expr = TimeZone(_tz.tzutc())

        else:
            tz_expr = TimeZone(tz)
            date_expr = _tz_now(tz_expr.tzinfo)

    else:
        if tz is None:
            try:
                tz_expr = TimeZone(date)
                date_expr = _tz_now(tz_expr.tzinfo)
            except:
                date_expr = date
                tz_expr = TimeZone(_tz.tzutc())

        else:
            date_expr = date
            tz_expr = tz

    return Arrow(date_expr, tz_expr)
Example #23
0
def create_call(number,time,*args):
	timezone = args[0] if len(args) > 0 and len(args[0]) > 2 else None
	message = args[1] if len(args) > 1 else None
	call = Call(None)
	number_obj = Number(number)
	now = datetime.datetime.utcnow().replace(tzinfo=tz.tzutc())
	if not number_obj.exists():
		number_obj.create()
	if timezone:
		number_obj.set("tz",timezone)
	dt = parse(time)
	if dt.tzinfo == None:
		try:
			stored_timezone = tz.gettz(number_obj.get("tz")) if tz.gettz(number_obj.get("tz")) else tz.gettz(timezone)
			dt = dt.replace(tzinfo=stored_timezone)
		except TypeError:
			dt = dt.replace(tzinfo=tz.gettz(timezone))
	else:
		dt = dt.replace(tzinfo=tz.gettz(timezone))
	if dt.tzinfo == None:
		dt = dt.replace(tzinfo=tz.tzutc())
	if dt < now:
		dt = dt + datetime.timedelta(days=1)

	resp = twilio.twiml.Response()
	resp.message("Alarm created for %s" % (dt.strftime("%c %Z")))
	dt = dt.astimezone(tz.tzutc())
	call.create(number,dt,message)
	return str(resp)
Example #24
0
    def get_records(self, lastrun="1970-01-01"):
        """Get records from given date

        :param lastrun: date to get records from (optional).

        If CONF.extract_from is present, it will be used instead of the
        lastrun parameter. If CONF.extract_to is present, it will be used
        instead of the extract_to parameter
        """
        extract_from = CONF.extract_from or lastrun
        extract_to = CONF.extract_to or datetime.datetime.utcnow()

        if isinstance(extract_from, six.string_types):
            extract_from = dateutil.parser.parse(extract_from)
        if isinstance(extract_to, six.string_types):
            extract_to = dateutil.parser.parse(extract_to)

        if extract_from.tzinfo is None:
            extract_from.replace(tzinfo=tz.tzutc())
        if extract_to.tzinfo is None:
            extract_to.replace(tzinfo=tz.tzutc())

        if self.records is None:
            self._extract(extract_from, extract_to)
        return self.records
def QA_SU_save_binance(frequency):
    symbol_list = QA_fetch_symbol()
    col = QASETTING.client.binance[frequency]
    col.create_index(
        [("symbol", pymongo.ASCENDING), ("start_time", pymongo.ASCENDING)],unique=True)

    end = datetime.datetime.now(tzutc())
    end += FREQUANCY_DICT.get(frequency)

    for index, symbol_info in enumerate(symbol_list):
        QA_util_log_info('The {} of Total {}'.format
                         (symbol_info['symbol'], len(symbol_list)))
        QA_util_log_info('DOWNLOAD PROGRESS {} '.format(str(
            float(index / len(symbol_list) * 100))[0:4] + '%')
                         )
        ref = col.find({"symbol": symbol_info['symbol']}).sort("start_time", -1)

        if ref.count() > 0:
            start_stamp = ref.next()['close_time'] / 1000
            start_time = datetime.datetime.fromtimestamp(start_stamp, tz=tzutc())
            QA_util_log_info('UPDATE_SYMBOL {} Trying updating {} from {} to {}'.format(
                frequency, symbol_info['symbol'], start_time, end))
        else:
            start_time = BINANCE_MIN_DATE
            QA_util_log_info('NEW_SYMBOL {} Trying downloading {} from {} to {}'.format(
                frequency, symbol_info['symbol'], start_time, end))

        data = QA_fetch_kline(symbol_info['symbol'],
                              time.mktime(start_time.utctimetuple()), time.mktime(end.utctimetuple()), frequency)
        if data is None:
            QA_util_log_info('SYMBOL {} from {} to {} has no data'.format(
                symbol_info['symbol'], start_time, end))
            continue
        col.insert_many(data)
Example #26
0
def QA_SU_save_bitmex(frequency):
    symbol_list = QA_fetch_bitmex_symbols(active=True)
    symbol_list = symbol_list
    col = QASETTING.client.bitmex[frequency]
    col.create_index(
        [("symbol", pymongo.ASCENDING), ("timestamp", pymongo.ASCENDING)], unique=True)

    end = datetime.datetime.now(tzutc()) + relativedelta(days=-1, hour=0, minute=0, second=0, microsecond=0)

    for index, symbol_info in enumerate(symbol_list):
        QA_util_log_info('The {} of Total {}'.format
                         (symbol_info['symbol'], len(symbol_list)))
        QA_util_log_info('DOWNLOAD PROGRESS {} '.format(str(
            float(index / len(symbol_list) * 100))[0:4] + '%')
                         )
        ref = col.find({"symbol": symbol_info['symbol']}).sort("timestamp", -1)

        if ref.count() > 0:
            start_stamp = ref.next()['timestamp'] / 1000
            start_time = datetime.datetime.fromtimestamp(start_stamp+1,tz=tzutc())
            QA_util_log_info('UPDATE_SYMBOL {} Trying updating {} from {} to {}'.format(
                frequency, symbol_info['symbol'], start_time, end))
        else:
            start_time = symbol_info.get('listing', "2018-01-01T00:00:00Z")
            start_time = parse(start_time)
            QA_util_log_info('NEW_SYMBOL {} Trying downloading {} from {} to {}'.format(
                frequency, symbol_info['symbol'], start_time, end))

        data = QA_fetch_bitmex_kline(symbol_info['symbol'],
                                      start_time, end, frequency)
        if data is None:
            QA_util_log_info('SYMBOL {} from {} to {} has no data'.format(
                symbol_info['symbol'], start_time, end))
            continue
        col.insert_many(data)
    def testOutlook3(self):
        self.portal.invokeFactory('Folder', id='test_calendar')
        cal = self.portal.test_calendar
        self.file = open(join(_prefix, 'data', 'outlook3.ics'), 'r')
        self.items = fromFile(cal, self.file)
        self.file.close()

        item = self.items[0][1]
        attendees = item.getAttendees()
        self.failUnless(attendees)
        values = (
            '"Sidnei da Silva" <*****@*****.**>',
            '"George Alan Runyan" <*****@*****.**>',
            '"Toby Roberts" <*****@*****.**>',
            '"Andy McKay" <*****@*****.**>')
        for value in values:
            self.failUnless(value in attendees, (value, attendees))

        self.assertEquals(item.Title(), 'Test Event: Weekly Meeting')
        self.assertEquals(item.getLocation(), 'Enfold Systems')
        self.assertEquals(item.Description().strip(), '')
        self.assertEquals(toTime(item.start_date), '15:30:00')
        self.assertEquals(toTime(item.end_date), '16:00:00')
        self.assertEquals(item.start_date,
                          datetime.datetime(2005, 10, 21, 15, 30, tzinfo=tzutc()))
        self.assertEquals(item.end_date,
                          datetime.datetime(2005, 10, 21, 16, 0, tzinfo=tzutc()))
        self.assertEquals(tuple(item.Subject()), ())
Example #28
0
    def create_candlepin_pool_json(pool_uuid=None, account_id=None, product_id=None, product_name=None):
        if not pool_uuid:
            pool_uuid = "unit_test_pool_id"
        if not account_id:
            account_id = "1"
        if not product_id:
            product_id = "unit_test_product_id"
        if not product_name:
            product_name = "unit_test_product_name"
        entry = {
            "objects": [
                {
                    "uuid": pool_uuid,
                    "account": account_id,
                    "active": True,
                    "product_id": product_id,
                    "product_name": product_name,
                    "created": datetime.now(tzutc()),
                    "start_date": datetime.now(tzutc()),
                    "end_date": datetime.now(tzutc()),
                    "updated": datetime.now(tzutc()),
                    "quantity": 1,
                }
            ]
        }

        return entry
Example #29
0
    def _parse_date(self, str_value):
        """
        Converts a string to a float representing miliseconds since the epoch.

        Since the elasticsearch date parser is not run on search_after,
        the date must be converted to ms since the epoch as that is how
        the dates are stored in the elasticsearch index.
        """
        # Dates like "2017" can also be cast as floats so if a number is less
        # than 9999 it is assumed to be a year and not ms since the epoch.
        try:
            date = float(str_value)
            if date < 9999:
                raise ValueError("This is not in the form ms since the epoch.")
            return date
        except ValueError:
            try:
                date = parse(str_value, default=DEFAULT_DATE)
                # If timezone isn't specified assume it's utc.
                if not date.tzinfo:
                    date = date.replace(tzinfo=tz.tzutc())
                epoch = dt.utcfromtimestamp(0).replace(tzinfo=tz.tzutc())
                return (date - epoch).total_seconds() * 1000.0
            except ValueError:
                pass
 def _assert_merged_entity(self, entity):
     '''
     Asserts that the entity passed in matches the default entity
     merged with the updated entity.
     '''
     self.assertEqual(entity.age, 'abc')
     self.assertEqual(entity.sex, 'female')
     self.assertEqual(entity.sign, 'aquarius')
     self.assertEqual(entity.married, True)
     self.assertEqual(entity.deceased, False)
     self.assertEqual(entity.sign, 'aquarius')
     self.assertEqual(entity.ratio, 3.1)
     self.assertEqual(entity.evenratio, 3.0)
     self.assertEqual(entity.large, 933311100)
     self.assertEqual(entity.Birthday, datetime(1973, 10, 4, tzinfo=tzutc()))
     self.assertEqual(entity.birthday, datetime(1991, 10, 4, tzinfo=tzutc()))
     self.assertIsInstance(entity.other, EntityProperty)
     self.assertEqual(entity.other.type, EdmType.INT32)
     self.assertEqual(entity.other.value, 20)
     self.assertIsInstance(entity.clsid, EntityProperty)
     self.assertEqual(entity.clsid.type, EdmType.GUID)
     self.assertEqual(entity.clsid.value,
                      'c9da6455-213d-42c9-9a79-3e9149a57833')
     self.assertTrue(hasattr(entity, "Timestamp"))
     self.assertIsNotNone(entity.etag)
Example #31
0
def get_date_age(date):
    return (datetime.now(tz=tzutc()) - parser.parse(date)).days
Example #32
0
def QA_SU_save_bitmex_day(frequency='1d', ui_log=None, ui_progress=None):
    """
    获取 bitmex K线 日线数据,统一转化字段保存数据为 crypto_asset_day
    """
    market = 'bitmex'
    symbol_list = QA_fetch_crypto_asset_list(market=market)
    col = DATABASE.crypto_asset_day
    col.create_index([('market', pymongo.ASCENDING),
                      ("symbol", pymongo.ASCENDING),
                      ("date_stamp", pymongo.ASCENDING)],
                     unique=True)

    end = datetime.datetime.now(tzutc())

    QA_util_log_info(
        'Starting DOWNLOAD PROGRESS of day Klines from bitmex... ',
        ui_log=ui_log,
        ui_progress=ui_progress)
    for index in range(len(symbol_list)):
        symbol_info = symbol_list.iloc[index]
        QA_util_log_info('The "{}" #{} of total in {}'.format(
            symbol_info['symbol'], index, len(symbol_list)),
                         ui_log=ui_log,
                         ui_progress=ui_progress)
        QA_util_log_info('DOWNLOAD PROGRESS {} '.format(
            str(float(index / len(symbol_list) * 100))[0:4] + '%'),
                         ui_log=ui_log,
                         ui_progress=ui_progress)
        query_id = {
            "symbol": symbol_info['symbol'],
            'market': symbol_info['market']
        }
        ref = col.find(query_id).sort('date_stamp', -1)

        if (col.count_documents(query_id) > 0):
            start_stamp = ref.next()['date_stamp']
            start_time = datetime.datetime.fromtimestamp(start_stamp + 1,
                                                         tz=tzutc())
            QA_util_log_info(
                'UPDATE_SYMBOL "{}" Trying updating "{}" from {} to {}'.format(
                    symbol_info['symbol'], Bitmex2QA_FREQUENCY_DICT[frequency],
                    QA_util_timestamp_to_str(start_time),
                    QA_util_timestamp_to_str(end)),
                ui_log=ui_log,
                ui_progress=ui_progress)
        else:
            start_time = symbol_info.get('listing', "2018-01-01T00:00:00Z")
            start_time = parse(start_time)
            QA_util_log_info(
                'NEW_SYMBOL "{}" Trying downloading "{}" from {} to {}'.format(
                    symbol_info['symbol'], Bitmex2QA_FREQUENCY_DICT[frequency],
                    QA_util_timestamp_to_str(start_time),
                    QA_util_timestamp_to_str(end)),
                ui_log=ui_log,
                ui_progress=ui_progress)

        data = QA_fetch_bitmex_kline(
            symbol_info['symbol'],
            QA_util_datetime_to_Unix_timestamp(start_time),
            QA_util_datetime_to_Unix_timestamp(end),
            frequency,
            callback_func=QA_SU_save_data_bitmex_callback)
        if data is None:
            QA_util_log_info('SYMBOL "{}" from {} to {} has no data'.format(
                symbol_info['symbol'], QA_util_timestamp_to_str(start_time),
                QA_util_timestamp_to_str(end)),
                             ui_log=ui_log,
                             ui_progress=ui_progress)
            continue
    QA_util_log_info(
        'DOWNLOAD PROGRESS of day Klines from bitmex accomplished ',
        ui_log=ui_log,
        ui_progress=ui_progress)
Example #33
0
def _isoformat(value):
    return value and value.astimezone(tzutc()).isoformat()
Example #34
0
from dateutil.tz import tzutc

from mock import patch, call
import pytest

from pynamodb.constants import UTC, DATETIME_FORMAT
from inpynamodb.models import Model

from pynamodb.attributes import (
    BinarySetAttribute, BinaryAttribute, NumberSetAttribute, NumberAttribute,
    UnicodeAttribute, UnicodeSetAttribute, UTCDateTimeAttribute,
    BooleanAttribute, LegacyBooleanAttribute, MapAttribute, MapAttributeMeta,
    ListAttribute, JSONAttribute, DEFAULT_ENCODING, NUMBER, STRING, STRING_SET,
    NUMBER_SET, BINARY_SET, BINARY, BOOLEAN, _get_value_for_deserialize)

UTC = tzutc()


class AttributeTestModel(Model):
    class Meta:
        host = 'http://localhost:8000'
        table_name = 'test'

    binary_attr = BinaryAttribute()
    binary_set_attr = BinarySetAttribute()
    number_attr = NumberAttribute()
    number_set_attr = NumberSetAttribute()
    unicode_attr = UnicodeAttribute()
    unicode_set_attr = UnicodeSetAttribute()
    datetime_attr = UTCDateTimeAttribute()
    bool_attr = BooleanAttribute()
Example #35
0
from sqlalchemy import (Table, Column, Integer, String, ForeignKey, Float,
                        DateTime, Boolean, )

from sqlalchemy.sql import join, select, func, label
from sqlalchemy.orm import mapper, relationship, column_property
from sqlalchemy.ext.declarative import declarative_base
from dateutil.parser import parse
from dateutil.tz import tzutc

import wof.models as wof_base

Base = declarative_base()

# Instantiate zome useful time zones
utc = tzutc()

def init_model(db_session):
    Base.query = db_session.query_property()

#TODO: Andy, please check
param_to_medium_dict = {
    'water_ph': wof_base.SampleMediumTypes.SURFACE_WATER,
    'water_y_velocity': wof_base.SampleMediumTypes.SURFACE_WATER,
    'water_x_velocity': wof_base.SampleMediumTypes.SURFACE_WATER,
    'water_temperature': wof_base.SampleMediumTypes.SURFACE_WATER,
    'upward_water_velocity': wof_base.SampleMediumTypes.SURFACE_WATER,
    'water_turbidity': wof_base.SampleMediumTypes.SURFACE_WATER,
    'water_total_dissolved_salts': wof_base.SampleMediumTypes.SURFACE_WATER,
    'seawater_salinity': wof_base.SampleMediumTypes.SURFACE_WATER,
    'northward_water_velocity': wof_base.SampleMediumTypes.SURFACE_WATER,
Example #36
0
import json
import datetime
from dateutil.tz import tzutc

class DateEncoder(json.JSONEncoder):
    def default(self, obj):
        if isinstance(obj, datetime.datetime):
            return obj.strftime('%Y-%m-%d %H:%M:%S')
        elif isinstance(obj, datetime.date):
            return obj.strftime("%Y-%m-%d")
        else:
            return json.JSONEncoder.default(self, obj)

if __name__ == '__main__':
    data = {"name": "Tom", "birthday": datetime.datetime(2019, 11, 20, 7, 34, 15, tzinfo = tzutc())}
    print(type(data))
    print(json.dumps(data, cls=DateEncoder))
Example #37
0
    def fillListViewTracks(self):
        """Fills list with tracks"""
        files = glob(self.DATAPATH + '/search/temp/*')
        for f in files:
            if os.path.isfile(f):
                os.remove(f)
        #If Windows
        if sys.platform.startswith('win'):
            #Get drive from user select
            drive = self.getDrive()
            #If not selected than C:, that should be always present
            if drive is None:
                #drive = "C:/" Very dangerous feature. Reads all GPX from the C: drive. It can take a lot of time.
                #removed
                QgsMessageLog.logMessage(
                    u"Nebyl vybrán žádný disk. Nebudu hledat data.", "Patrac")
                return
            #TODO - do it better to handle another devices than Garmin
            self.path = drive[:-1] + '/'
        # For Linux is path set just for testing purposes
        # TODO - change to have real connected devices
        else:
            drive = self.getDriveLinux()
            if drive is None:
                #drive = "C:/" Very dangerous feature. Reads all GPX from the C: drive. It can take a lot of time.
                #removed
                QgsMessageLog.logMessage(
                    u"Nebyl vybrán žádný disk. Nebudu hledat data.", "Patrac")
                return
            self.path = drive

        #for f in glob.iglob('E:/Garmin/GPX/*/*.gpx'):  # generator, search immediate subdirectories
        i = 0
        for root, dirnames, filenames in os.walk(self.path):
            for f in fnmatch.filter(filenames, '*.gpx'):
                #for f in iglob(self.path, recursive=True):
                #copyfile(f, self.DATAPATH + '/search/gpx/' + SECTOR + '/' + os.path.basename(f))
                #First copy original file to search/gpx/ directory
                #shutil.copyfile(os.path.join(root, f.decode('utf8')), self.DATAPATH + u'/search/gpx/' + os.path.basename(f.decode('utf8')))
                #Then copy the same file to search/tem/ directory and name it according to position in list
                shutil.copyfile(
                    os.path.join(root, f),
                    self.DATAPATH + '/search/temp/' + str(i) + '.gpx')
                #Notice size of list.csv
                listSize = 0
                if i > 0:
                    listSize = os.path.getsize(self.DATAPATH +
                                               '/search/temp/list.csv')
                #Run transformation to get time extent of the GPX
                #Extent is added to search/temp/list.csv
                if sys.platform.startswith('win'):
                    # QgsMessageLog.logMessage(str(f), "Patrac")
                    p = subprocess.Popen(
                        (self.pluginPath + '/xslt/run_xslt_extent.bat',
                         self.pluginPath,
                         self.DATAPATH + '/search/temp/' + str(i) + '.gpx',
                         self.DATAPATH + '/search/temp/list.csv'))
                    p.wait()
                else:
                    # QgsMessageLog.logMessage(str(f), "Patrac")
                    p = subprocess.Popen(
                        ('bash', self.pluginPath + '/xslt/run_xslt_extent.sh',
                         self.pluginPath,
                         self.DATAPATH + '/search/temp/' + str(i) + '.gpx',
                         self.DATAPATH + '/search/temp/list.csv'))
                    p.wait()
                i = i + 1
                listSizeAfterXSLT = os.path.getsize(self.DATAPATH +
                                                    '/search/temp/list.csv')
                if listSize == listSizeAfterXSLT:
                    #Something bad happend with XSLT
                    #Add empty row to /search/temp/list.csv'
                    listFile = open(self.DATAPATH + '/search/temp/list.csv',
                                    'w+')
                    listFile.write(";\n")
                    listFile.close()

        #if some GPX were found
        if os.path.isfile(self.DATAPATH + '/search/temp/list.csv'):
            self.listViewModel = QStandardItemModel()
            from_zone = tz.tzutc()
            to_zone = tz.tzlocal()
            #Loop via GPX tracks
            with open(self.DATAPATH + '/search/temp/list.csv') as fp:
                for cnt, line in enumerate(fp):
                    track = u'Track ' + str(cnt) + ' '
                    items = line.split(';')
                    start = ''
                    end = ''
                    #This is some workatound, becaouse the list can contain more than one time information for each track
                    if len(items[0]) > 30:
                        items2 = items[0].split(' ')
                        start = items2[0]
                    else:
                        start = items[0]

                    if len(items[1]) > 30:
                        items2 = items[1].split(' ')
                        end = items2[len(items2) - 1]
                    else:
                        end = items[1]

                    if len(start) > 10 and len(end) > 10:
                        #Convert to local time zone from UTC
                        start_local = self.iso_time_to_local(start)
                        end_local = self.iso_time_to_local(end)
                        track += '(' + start_local + ' <-> ' + end_local + ')'
                        item = QStandardItem(track)
                        #check = Qt.Checked if randint(0, 1) == 1 else Qt.Unchecked
                        #item.setCheckState(check)
                        item.setCheckable(True)
                        self.listViewModel.appendRow(item)
                    else:
                        item = QStandardItem("Another Type of GPX")
                        item.setCheckable(False)
                        self.listViewModel.appendRow(item)
                        #print("Line {}: {}".format(cnt, line))
            self.listViewTracks.setModel(self.listViewModel)
        else:
            QgsMessageLog.logMessage(u"Nebyl nalezen žádný záznam:", "Patrac")
Example #38
0
from sipsimple.configuration.settings import SIPSimpleSettings
from sipsimple.payloads import caps, pidf, prescontent, rpid
from sipsimple.payloads import cipid
from sipsimple.threading.green import run_in_green_thread
from sipsimple.util import ISOTimestamp

from blink.configuration.datatypes import IconDescriptor, FileURL, PresenceState
from blink.configuration.settings import BlinkSettings
from blink.resources import IconManager, Resources
from blink.util import run_in_gui_thread

del cipid  # this only needs to be imported to register its namespace and extensions

__all__ = ['PresenceManager', 'PendingWatcherDialog']

epoch = datetime.fromtimestamp(0, tzutc())


class BlinkPresenceState(object):
    def __init__(self, account):
        self.account = account

    @property
    def online_state(self):
        blink_settings = BlinkSettings()

        state = blink_settings.presence.current_state.state
        note = blink_settings.presence.current_state.note

        state = 'offline' if state == 'Invisible' else state.lower()
Example #39
0
    def test_26_periodictask(self):
        current_utc_time = datetime(2018, 3, 4, 5, 6, 8)
        with mock.patch('privacyidea.models.datetime') as mock_dt:
            mock_dt.utcnow.return_value = current_utc_time

            task1 = PeriodicTask("task1", False, "0 5 * * *", ["localhost"], "some.module", 2, {
                "key1": "value2",
                "KEY2": True,
                "key3": u"öfføff",
            })
            task2 = PeriodicTask("some other task", True, "0 6 * * *", ["localhost"], "some.other.module", 1, {
                "foo": "bar"
            })

        self.assertEqual(PeriodicTask.query.filter_by(name="task1").one(), task1)
        self.assertEqual(PeriodicTask.query.filter_by(name="some other task").one(), task2)
        self.assertEqual(PeriodicTaskOption.query.filter_by(periodictask_id=task1.id, key="KEY2").one().value,
                         "True")
        # Values are converted to strings
        self.assertEqual(task1.get(), {
            "id": task1.id,
            "name": "task1",
            "active": False,
            "interval": "0 5 * * *",
            # we get a timezone-aware datetime here
            "last_update": current_utc_time.replace(tzinfo=tzutc()),
            "nodes": ["localhost"],
            "taskmodule": "some.module",
            "ordering": 2,
            "options": {
                "key1": "value2",
                "KEY2": "True",
                "key3": u"öfføff",
            },
            "last_runs": {}})

        # register a run
        task1.set_last_run("localhost", datetime(2018, 3, 4, 5, 6, 7))

        # assert we can update the task
        later_utc_time = current_utc_time + timedelta(seconds=1)
        with mock.patch('privacyidea.models.datetime') as mock_dt:
            mock_dt.utcnow.return_value = later_utc_time
            PeriodicTask("task one", True, "0 8 * * *", ["localhost", "otherhost"], "some.module", 3, {
                "KEY2": "value number 2",
                "key 4": 1234
            }, id=task1.id)
        # the first run for otherhost
        task1.set_last_run("otherhost", datetime(2018, 8, 9, 10, 11, 12))
        result = PeriodicTask.query.filter_by(name="task one").one().get()
        self.assertEqual(result,
                         {
                             "id": task1.id,
                             "active": True,
                             "name": "task one",
                             "interval": "0 8 * * *",
                             "last_update": later_utc_time.replace(tzinfo=tzutc()),
                             "nodes": ["localhost", "otherhost"],
                             "taskmodule": "some.module",
                             "ordering": 3,
                             "options": {"KEY2": "value number 2",
                                         "key 4": "1234"},
                             "last_runs": {
                                 "localhost": datetime(2018, 3, 4, 5, 6, 7, tzinfo=tzutc()),
                                 "otherhost": datetime(2018, 8, 9, 10, 11, 12, tzinfo=tzutc()),
                             }
                         })
        # assert all old options are removed
        self.assertEqual(PeriodicTaskOption.query.filter_by(periodictask_id=task1.id, key="key3").count(), 0)
        # the second run for localhost
        task1.set_last_run("localhost", datetime(2018, 3, 4, 5, 6, 8))
        result = PeriodicTask.query.filter_by(name="task one").one().get()
        self.assertEqual(result,
                         {
                             "id": task1.id,
                             "active": True,
                             "name": "task one",
                             "interval": "0 8 * * *",
                             "last_update": later_utc_time.replace(tzinfo=tzutc()),
                             "nodes": ["localhost", "otherhost"],
                             "taskmodule": "some.module",
                             "ordering": 3,
                             "options": {"KEY2": "value number 2",
                                         "key 4": "1234"},
                             "last_runs": {
                                 "localhost": datetime(2018, 3, 4, 5, 6, 8, tzinfo=tzutc()),
                                 "otherhost": datetime(2018, 8, 9, 10, 11, 12, tzinfo=tzutc()),
                             }
                         })

        # remove "localhost", assert the last run is removed
        PeriodicTask("task one", True, "0 8 * * *", ["otherhost"], "some.module", 4, {"foo": "bar"}, id=task1.id)
        self.assertEqual(PeriodicTaskOption.query.filter_by(periodictask_id=task1.id).count(), 1)
        self.assertEqual(PeriodicTaskLastRun.query.filter_by(periodictask_id=task1.id).one().node, "otherhost")
        # naive timestamp in the database
        self.assertEqual(PeriodicTaskLastRun.query.filter_by(periodictask_id=task1.id).one().timestamp,
                         datetime(2018, 8, 9, 10, 11, 12, tzinfo=None))
        self.assertEqual(PeriodicTaskLastRun.query.filter_by(periodictask_id=task1.id).one().aware_timestamp,
                         datetime(2018, 8, 9, 10, 11, 12, tzinfo=tzutc()))

        # remove the tasks, everything is removed
        task1.delete()
        self.assertEqual(PeriodicTaskOption.query.count(), 1) # from task2
        self.assertEqual(PeriodicTaskLastRun.query.count(), 0)
        task2.delete()
        self.assertEqual(PeriodicTaskOption.query.count(), 0)
Example #40
0
def QA_SU_save_bitmex_min(frequency='1m', ui_log=None, ui_progress=None):
    """
    获取 bitmex K线 分钟线数据,统一转化字段保存数据为 crypto_asset_min
    """
    market = 'bitmex'
    symbol_list = QA_fetch_crypto_asset_list(market=market)
    col = DATABASE.crypto_asset_min
    col.create_index([('market', pymongo.ASCENDING),
                      ("symbol", pymongo.ASCENDING),
                      ('time_stamp', pymongo.ASCENDING),
                      ('date_stamp', pymongo.ASCENDING)])
    col.create_index([('market', pymongo.ASCENDING),
                      ("symbol", pymongo.ASCENDING),
                      ("type", pymongo.ASCENDING),
                      ('time_stamp', pymongo.ASCENDING)],
                     unique=True)

    end = datetime.datetime.now(tzutc())

    QA_util_log_info(
        'Starting DOWNLOAD PROGRESS of min Klines from bitmex... ',
        ui_log=ui_log,
        ui_progress=ui_progress)
    for index in range(len(symbol_list)):
        symbol_info = symbol_list.iloc[index]
        QA_util_log_info('The "{}" #{} of total in {}'.format(
            symbol_info['symbol'], index, len(symbol_list)),
                         ui_log=ui_log,
                         ui_progress=ui_progress)
        QA_util_log_info('DOWNLOAD PROGRESS {} '.format(
            str(float(index / len(symbol_list) * 100))[0:4] + '%'),
                         ui_log=ui_log,
                         ui_progress=ui_progress)
        query_id = {
            "symbol": symbol_info['symbol'],
            'market': symbol_info['market'],
            'type': Bitmex2QA_FREQUENCY_DICT[frequency]
        }
        ref = col.find(query_id).sort('time_stamp', -1)
        if (col.count_documents(query_id) > 0):
            start_stamp = ref.next()['time_stamp']
            start_time = datetime.datetime.fromtimestamp(start_stamp + 1,
                                                         tz=tzutc())
            QA_util_log_info(
                'UPDATE_SYMBOL "{}" Trying updating "{}" from {} to {}'.format(
                    symbol_info['symbol'], Bitmex2QA_FREQUENCY_DICT[frequency],
                    QA_util_timestamp_to_str(start_time),
                    QA_util_timestamp_to_str(end)),
                ui_log=ui_log,
                ui_progress=ui_progress)

            # 查询到 Kline 缺漏,点抓取模式,按缺失的时间段精确请求K线数据
            missing_data_list = QA_util_find_missing_kline(
                symbol_info['symbol'],
                Bitmex2QA_FREQUENCY_DICT[frequency],
                market='bitmex')[::-1]
        else:
            start_time = symbol_info.get('listing', "2018-01-01T00:00:00Z")
            start_time = parse(start_time)
            QA_util_log_info(
                'NEW_SYMBOL "{}" Trying downloading "{}" from {} to {}'.format(
                    symbol_info['symbol'], Bitmex2QA_FREQUENCY_DICT[frequency],
                    QA_util_timestamp_to_str(start_time),
                    QA_util_timestamp_to_str(end)),
                ui_log=ui_log,
                ui_progress=ui_progress)
            miss_kline = pd.DataFrame(
                [[
                    QA_util_datetime_to_Unix_timestamp(start_time),
                    QA_util_datetime_to_Unix_timestamp(end), '{} 到 {}'.format(
                        start_time, end)
                ]],
                columns=['expected', 'between', 'missing'])
            missing_data_list = miss_kline.values

        if len(missing_data_list) > 0:
            # 查询确定中断的K线数据起止时间,缺分时数据,补分时数据
            expected = 0
            between = 1
            missing = 2
            reqParams = {}
            for i in range(len(missing_data_list)):
                reqParams['from'] = missing_data_list[i][expected]
                reqParams['to'] = missing_data_list[i][between]
                if (reqParams['from'] >
                    (QA_util_datetime_to_Unix_timestamp() + 3600)):
                    # 出现“未来”时间,一般是默认时区设置错误造成的
                    raise Exception(
                        'A unexpected \'Future\' timestamp got, Please check self.missing_data_list_func param \'tzlocalize\' set. More info: {:s}@{:s} at {:s} but current time is {}'
                        .format(
                            symbol_info['symbol'], frequency,
                            QA_util_print_timestamp(reqParams['from']),
                            QA_util_print_timestamp(
                                QA_util_datetime_to_Unix_timestamp())))
                QA_util_log_info(
                    'Fetch "{:s}" slices "{:s}" kline:{:s} to {:s}'.format(
                        symbol_info['symbol'], frequency,
                        QA_util_timestamp_to_str(
                            missing_data_list[i][expected])[2:16],
                        QA_util_timestamp_to_str(
                            missing_data_list[i][between])[2:16]))
                data = QA_fetch_bitmex_kline_min(
                    symbol_info['symbol'],
                    start_time=reqParams['from'],
                    end_time=reqParams['to'],
                    frequency=frequency,
                    callback_func=QA_SU_save_data_bitmex_callback)

        if data is None:
            QA_util_log_info(
                'SYMBOL "{}" from {} to {} has no MORE data'.format(
                    symbol_info['symbol'],
                    QA_util_timestamp_to_str(start_time),
                    QA_util_timestamp_to_str(end)))
            continue

    QA_util_log_info(
        'DOWNLOAD PROGRESS of min Klines from bitmex accomplished ',
        ui_log=ui_log,
        ui_progress=ui_progress)
Example #41
0
import csv
from datetime import datetime
from dateutil import tz
from matplotlib import pyplot as plt

# --- start config ---

DATA_PATH = '/mnt/storage/speedtest-log/'

TIME_ROW = 3
DOWNLOAD_ROW = 6
UPLOAD_ROW = 7

DELIMITER = ','
TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
FROM_TIMEZONE = tz.tzutc()
TO_TIMEZONE = tz.tzlocal()

TMP_IMAGE_FOLDER = 'images'
OUTPUT = 'speedtest.avi'
FPS = 1
FIG_DPI = 300

# --- end config ---


def plot(download, upload, file_name, image_folder, title='', dpi=100):
    fig = plt.figure(dpi=dpi)
    fig.suptitle(title)

    hours = range(24)
Example #42
0
def datetime_to_fedoratime(datetime):
    # format a date-time in a format fedora can handle
    # make sure time is in UTC, since the only time-zone notation Fedora seems able to handle is 'Z'
    utctime = datetime.astimezone(tzutc())
    return utctime.strftime(
        '%Y-%m-%dT%H:%M:%S') + '.%03d' % (utctime.microsecond / 1000) + 'Z'
Example #43
0
def main():
    argument_spec = aci_argument_spec()
    argument_spec.update(
        aaa_password=dict(type='str', no_log=True),
        aaa_password_lifetime=dict(type='int'),
        aaa_password_update_required=dict(type='bool'),
        aaa_user=dict(type='str', required=True,
                      aliases=['name'
                               ]),  # Not required for querying all objects
        clear_password_history=dict(type='bool'),
        description=dict(type='str', aliases=['descr']),
        email=dict(type='str'),
        enabled=dict(type='bool'),
        expiration=dict(type='str'),
        expires=dict(type='bool'),
        first_name=dict(type='str'),
        last_name=dict(type='str'),
        phone=dict(type='str'),
        state=dict(type='str',
                   default='present',
                   choices=['absent', 'present', 'query']),
    )

    module = AnsibleModule(
        argument_spec=argument_spec,
        supports_check_mode=True,
        required_if=[
            ['state', 'absent', ['aaa_user']],
            ['state', 'present', ['aaa_user']],
            ['expires', True, ['expiration']],
        ],
    )

    aci = ACIModule(module)

    if not HAS_DATEUTIL:
        module.fail_json(msg='dateutil required for this module')

    aaa_password = module.params['aaa_password']
    aaa_password_lifetime = module.params['aaa_password_lifetime']
    aaa_password_update_required = aci.boolean(
        module.params['aaa_password_update_required'])
    aaa_user = module.params['aaa_user']
    clear_password_history = module.params['clear_password_history']
    description = module.params['description']
    email = module.params['email']
    enabled = aci.boolean(module.params['enabled'], 'active', 'inactive')
    expires = aci.boolean(module.params['expires'])
    first_name = module.params['first_name']
    last_name = module.params['last_name']
    phone = module.params['phone']
    state = module.params['state']

    expiration = module.params['expiration']
    if expiration is not None and expiration != 'never':
        try:
            expiration = aci.iso8601_format(
                dateutil.parser.parse(expiration).replace(tzinfo=tzutc()))
        except Exception as e:
            module.fail_json(msg="Failed to parse date format '%s', %s" %
                             (module.params['expiration'], e))

    aci.construct_url(root_class=dict(
        aci_class='aaaUser',
        aci_rn='userext/user-{0}'.format(aaa_user),
        filter_target='eq(aaaUser.name, "{0}")'.format(aaa_user),
        module_object=aaa_user,
    ), )
    aci.get_existing()

    if state == 'present':
        aci.payload(
            aci_class='aaaUser',
            class_config=dict(
                accountStatus=enabled,
                clearPwdHistory=clear_password_history,
                email=email,
                expiration=expiration,
                expires=expires,
                firstName=first_name,
                lastName=last_name,
                name=aaa_user,
                phone=phone,
                pwd=aaa_password,
                pwdLifeTime=aaa_password_lifetime,
                pwdUpdateRequired=aaa_password_update_required,
            ),
        )

        aci.get_diff(aci_class='aaaUser')

        aci.post_config()

    elif state == 'absent':
        aci.delete_config()

    aci.exit_json()
Example #44
0
# ----------------------------------------------------------------
# Time zones
# ----------------------------------------------------------------
TIMEZONES = [
    None,
    "UTC",
    "US/Eastern",
    "Asia/Tokyo",
    "dateutil/US/Pacific",
    "dateutil/Asia/Singapore",
    "+01:15",
    "-02:15",
    "UTC+01:15",
    "UTC-02:15",
    tzutc(),
    tzlocal(),
    FixedOffset(300),
    FixedOffset(0),
    FixedOffset(-300),
    timezone.utc,
    timezone(timedelta(hours=1)),
    timezone(timedelta(hours=-1), name="foo"),
]
TIMEZONE_IDS = [repr(i) for i in TIMEZONES]


@td.parametrize_fixture_doc(str(TIMEZONE_IDS))
@pytest.fixture(params=TIMEZONES, ids=TIMEZONE_IDS)
def tz_naive_fixture(request):
    """
Example #45
0
def get_utc_time():
    return datetime.datetime.now(tzlocal()).astimezone(tzutc())
Example #46
0
from dateutil import tz

from keras.callbacks import EarlyStopping
from keras.models import Sequential
from keras.layers import Dense, Dropout

import multiprocessing as mp
from threading import Thread

ALG_LEN = 3

names = ["Logistic Regression", "Linear SVM", "Neural Net"]

topic_name = 'raw_nmea_numeric'

from_zone = tz.tzutc()
to_zone = tz.tzlocal()


def main(file):

    # Create figure for plotting
    fig = plt.figure(figsize=(7, 3))
    axes = []
    for i in range(1, ALG_LEN + 1):
        axes.append(fig.add_subplot(1, 3, i))

    plt.subplots_adjust(top=0.8,
                        bottom=0.25,
                        left=0.17,
                        hspace=1.1,
Example #47
0
def test_state_machine_get_execution_history_contains_expected_success_events_when_started(
):
    expected_events = [
        {
            "timestamp": datetime(2020, 1, 1, 0, 0, 0, tzinfo=tzutc()),
            "type": "ExecutionStarted",
            "id": 1,
            "previousEventId": 0,
            "executionStartedEventDetails": {
                "input": "{}",
                "inputDetails": {
                    "truncated": False
                },
                "roleArn": _get_default_role(),
            },
        },
        {
            "timestamp": datetime(2020, 1, 1, 0, 0, 10, tzinfo=tzutc()),
            "type": "PassStateEntered",
            "id": 2,
            "previousEventId": 0,
            "stateEnteredEventDetails": {
                "name": "A State",
                "input": "{}",
                "inputDetails": {
                    "truncated": False
                },
            },
        },
        {
            "timestamp": datetime(2020, 1, 1, 0, 0, 10, tzinfo=tzutc()),
            "type": "PassStateExited",
            "id": 3,
            "previousEventId": 2,
            "stateExitedEventDetails": {
                "name": "A State",
                "output": "An output",
                "outputDetails": {
                    "truncated": False
                },
            },
        },
        {
            "timestamp": datetime(2020, 1, 1, 0, 0, 20, tzinfo=tzutc()),
            "type": "ExecutionSucceeded",
            "id": 4,
            "previousEventId": 3,
            "executionSucceededEventDetails": {
                "output": "An output",
                "outputDetails": {
                    "truncated": False
                },
            },
        },
    ]

    client = boto3.client("stepfunctions", region_name=region)
    sm = client.create_state_machine(
        name="test-state-machine",
        definition=simple_definition,
        roleArn=_get_default_role(),
    )
    execution = client.start_execution(stateMachineArn=sm["stateMachineArn"])
    execution_history = client.get_execution_history(
        executionArn=execution["executionArn"])
    execution_history["events"].should.have.length_of(4)
    execution_history["events"].should.equal(expected_events)
Example #48
0
 def _create_list_objs(self, entity_factory, list_scopes):
     """Create and return list of objects used entity factory and UI data
 (list of scopes UI text elements {"header": "item", ...} remapped to
 list of dicts {"attr": "value", ...}).
 Return list of created objects.
 """
     list_factory_objs = [
         entity_factory().obj_inst() for _ in xrange(len(list_scopes))
     ]
     list_scopes_with_upper_keys = [
         StringMethods.dict_keys_to_upper_case(scope)
         for scope in list_scopes
     ]
     list_scopes_to_convert = StringMethods.exchange_dicts_items(
         transform_dict=Representation.remap_collection(),
         dicts=list_scopes_with_upper_keys,
         is_keys_not_values=True)
     # convert and represent values in scopes
     for scope in list_scopes_to_convert:
         # convert u'None', u'No person' to None type
         StringMethods.update_dicts_values(scope, ["None", "No person"],
                                           None)
         for key, val in scope.iteritems():
             if val:
                 if key in ["mandatory", "verified"]:
                     # convert u'false', u'true' like to Boolean
                     scope[key] = StringMethods.get_bool_value_from_arg(val)
                 if key in ["updated_at", "created_at"]:
                     # UI like u'08/20/2017' to date=2017-08-20, timetz=00:00:00
                     datetime_val = parser.parse(val)
                     if str(datetime_val.time()) != "00:00:00":
                         # UI like u'08/20/2017 07:30:45 AM +03:00' to date=2017-08-20,
                         # timetz=04:30:45+00:00 if 'tzinfo', else:
                         # CSV like u'08-20-2017 04:30:45' to date=2017-08-20,
                         # timetz=04:30:45+00:00
                         datetime_val = (datetime_val.astimezone(
                             tz=tz.tzutc()) if datetime_val.tzinfo else
                                         datetime_val.replace(
                                             tzinfo=tz.tzutc()))
                     scope[key] = datetime_val
                 if (key == "comments" and isinstance(val, list) and all(
                         isinstance(comment, dict) for comment in val)):
                     # extract datetime from u'(Creator) 08/20/2017 07:30:45 AM +03:00'
                     scope[key] = [{
                         k: (parser.parse(
                             re.sub(regex.TEXT_W_PARENTHESES, Symbols.BLANK,
                                    v)).astimezone(tz=tz.tzutc())
                             if k == "created_at" else v)
                         for k, v in comment.iteritems()
                     } for comment in val]
                 # convert multiple values to list of strings and split if need it
                 if (key in Representation.people_attrs_names
                         and not isinstance(val, list)):
                     # split Tree View values if need 'Ex1, Ex2 F' to ['Ex1', 'Ex2 F']
                     # Info Widget values will be represent by internal methods
                     scope[key] = val.split(", ")
                 # convert 'slug' from CSV for snapshoted objects u'*23eb72ac-4d9d'
                 if (key == "slug" and
                     (self.obj_name in objects.ALL_SNAPSHOTABLE_OBJS)
                         and Symbols.STAR in val):
                     scope[key] = val.replace(Symbols.STAR, Symbols.BLANK)
     return [
         factory_obj.update_attrs(is_allow_none=True, **scope) for scope,
         factory_obj in zip(list_scopes_to_convert, list_factory_objs)
     ]
def additional_facility_details(request):
    data = {}
    from_zone = tz.tzutc()
    to_zone = tz.gettz('Asia/Kolkata')
    sid = transaction.savepoint()
    booking_detail_no = int(datetime.now().strftime('%Y%m%d%H%M%S'))
    try:
        data_list = []
        equipment_name_list = []
        facility_list = []
        hall_equipment_list = []
        hall_equipment_rate = []
        hall_equipment_dict = {}
        print "request in | hallbookingapp | view | hallbooking_landing.py | additional_facility_details"
        user_type = request.GET.get('user_type_val')
        from_hour = request.GET.get('from_hour')
        to_hour = request.GET.get('to_hour')
        from_minute = request.GET.get('from_minute')
        to_minute = request.GET.get('to_minute')
        hall_id = request.GET.get('hall_id')
        from_date = request.GET.get('fromdate')
        to_date = request.GET.get('todate')
        from_period = request.GET.get('from_period')
        to_period = request.GET.get('to_period')
        company_name = request.GET.get('companyindividualname')
        date_value = request.GET.get('date_value')
        event_date = datetime.now()
        hall_detail_id = HallDetail.objects.get(id=hall_id)
        hall_facility_list = hall_detail_id.hall_equipment.all()
        hall_equipment_obj = HallEquipment.objects.filter(
            hall_detail=hall_detail_id.id)
        for hall_equipment in hall_equipment_obj:
            if user_type == 'nm':
                hall_equipment_val = float(hall_equipment.non_member_charges)
            else:
                hall_equipment_val = float(hall_equipment.member_charges)
            hall_equipment_dict = {
                'facility_list':
                str(hall_equipment.hall_functioning_equipment.equipment_name),
                'hall_rate':
                float(hall_equipment_val),
                'from_hour':
                str(from_hour),
                'from_minute':
                str(from_minute),
                'to_minute':
                str(to_minute),
                'to_hour':
                str(to_hour),
                'from_period':
                str(from_period),
                'to_period':
                str(to_period),
            }
            hall_equipment_list.append(hall_equipment_dict)

        length = len(from_date)
        if length:
            local_date = datetime.strptime(str(from_date), '%d/%m/%Y')
            local_from_time = ''
            local_to_time = ''

            if from_period == "PM":
                if from_hour == '12':
                    local_from_time = time(int(from_hour), int(from_minute))
                else:
                    local_from_time = time(
                        int(from_hour) + 12, int(from_minute))
            else:
                if int(from_hour) == 12:
                    local_from_time = time(int(00), int(from_minute))
                else:
                    local_from_time = time(int(from_hour), int(from_minute))
            if to_period == "PM":
                if to_hour == '12':
                    local_to_time = time(int(to_hour), int(to_minute))
                else:
                    local_to_time = time(int(to_hour) + 12, int(to_minute))
            else:
                if int(to_hour) == 12:
                    local_to_time = time(int(00), int(to_minute))
                else:
                    local_to_time = time(int(to_hour), int(to_minute))

            from_time = datetime.strptime(str(local_from_time), '%H:%M:%S')
            to_time = datetime.strptime(str(local_to_time), '%H:%M:%S')
            f_time = datetime.strftime(from_time, '%H,%M')
            t_time = datetime.strftime(to_time, '%H,%M')
            f_time = datetime.strptime(f_time, '%H,%M').time()
            t_time = datetime.strptime(t_time, '%H,%M').time()

            from_date_time = datetime.combine(local_date.date(), f_time)
            to_date_time = datetime.combine(local_date.date(), t_time)

            date_time_dict = {}

            date_time_dict.setdefault((from_date_time.date()),
                                      []).append(from_date_time.time())
            date_time_dict.setdefault((to_date_time.date()),
                                      []).append(to_date_time.time())

            hall_obj = HallDetail.objects.get(id=request.GET.get('hall_id'))

            for key, value in date_time_dict.iteritems():
                utc_to_date = datetime.combine(key, value[1])
                utc_from_date = datetime.combine(key, value[0])
                utc_to_date = utc_to_date.replace(tzinfo=from_zone)
                final_to_date = utc_to_date.astimezone(to_zone)

                utc_from_date = utc_from_date.replace(tzinfo=from_zone)
                final_from_date = utc_from_date.astimezone(to_zone)

                # Payment Calculation
                hall_rent_dict = {}
                if utc_from_date.date() in [
                        item.holiday_date for item in Holiday.objects.filter(
                            holiday_date__gte=utc_from_date.date(),
                            status=True,
                            is_deleted=False,
                            is_booking_available=True)
                ]:
                    week_day = '6'
                else:
                    week_day = utc_from_date.strftime('%w')
                minutes = int(
                    ceil((final_to_date - final_from_date).seconds / 60.0))

                if request.GET.get('user_type_val') == 'm':
                    member_obj = UserDetail.objects.get(
                        id=request.POST.get('company_list'))
                    if member_obj.valid_invalid_member:
                        hall_rent_dict = get_total_hall_rent(
                            week_day, minutes, hall_obj, True)
                    else:
                        hall_rent_dict = get_total_hall_rent(
                            week_day, minutes, hall_obj, False)
                else:
                    hall_rent_dict = get_total_hall_rent(
                        week_day, minutes, hall_obj, False)
        data = {
            'success': 'true',
            'hall_name': hall_detail_id.hall_name,
            'hall_equipment': hall_equipment_list,
            'event_nature': str(request.GET.get('NatureoftheEvent')),
            'event_date': event_date.strftime('%I:%M %p'),
            'booking_from_time': from_date,
            'hall_detail_id': hall_detail_id.id,
            'booking_to_time': to_date,
            'company_name_val': company_name,
            'hall_rent': hall_rent_dict['total_rent'],
            'date_value': date_value
        }
        return HttpResponse(json.dumps(data), content_type='application/json')
    except Exception, e:
        print "Responce Out | hallbookingapp | view | hallbooking_landing.py | additional_facility_details", str(
            traceback.print_exc())
        return HttpResponse(json.dumps(data), content_type='application/json')
Example #50
0
import datetime
import re

from dateutil import parser
from dateutil import tz

from googlecloudsdk.core.util import times_data

try:
    from dateutil import tzwin  # pylint: disable=g-import-not-at-top, Windows
except ImportError:
    tzwin = None

LOCAL = tz.tzlocal()  # The local timezone.
UTC = tz.tzutc()  # The UTC timezone.


def GetTimeZone(name):
    """Returns a datetime.tzinfo object for name.

  Args:
    name: A timezone name string, None for the local timezone.

  Returns:
    A datetime.tzinfo object for name, local timezone if name is unknown.
  """
    if name in ('UTC', 'Z'):
        return UTC
    if name in ('LOCAL', 'L'):
        return LOCAL
Example #51
0
 def python_value(self, value):
     return arrow.get(value, tzinfo=tzutc()) if value is not None else None
Example #52
0
    def load_series(
        self,
        train_start_date: datetime,
        train_end_date: datetime,
        tag_list: List[SensorTag],
        dry_run: Optional[bool] = False,
        **kwargs,
    ):
        """
        See GordoBaseDataProvider for documentation
        """
        if dry_run:
            raise NotImplementedError(
                "Dry run for IrocReader is not implemented")
        if not tag_list:
            logger.warning(
                "Iroc reader called with empty tag_list, returning none")
            return
        if train_end_date < train_start_date:
            raise ValueError(
                f"Iroc reader called with train_end_date: {train_end_date} before train_start_date: {train_start_date}"
            )

        base_paths_from_assets = list(
            map(lambda tag: self.base_path_from_asset(tag.asset), tag_list))
        if len(set(base_paths_from_assets)) != 1:
            raise ValueError(
                "Iroc reader found either more than one asset or no asset from the tag list provided"
            )
        elif None in base_paths_from_assets:
            raise ValueError(
                "Iroc reader could not associate some tags to an asset.")

        base_path = base_paths_from_assets[0]

        # We query with an extra day on both sides since the way the files are
        # organized in the datalake does not account for timezones, so some timestamps
        # are in the wrong files

        all_base_paths = (
            f"{base_path}/{t.year:0>4d}/{t.month:0>2d}/{t.day:0>2d}/"
            for t in pd.date_range(
                start=train_start_date.astimezone(tz.tzutc()) -
                pd.Timedelta("1D"),
                end=train_end_date.astimezone(tz.tzutc()) + pd.Timedelta("1D"),
                freq="D",
            ))

        fetched_tags = self._fetch_all_iroc_files_from_paths(
            all_base_paths, train_start_date, train_end_date, tag_list)
        if len(fetched_tags) < 0:
            raise ValueError(
                f"Found no data for tags {tag_list} in the daterange {train_start_date} to "
                f"{train_end_date}")

        concatted = pd.concat(fetched_tags, copy=False)

        if len(concatted.columns) != len(tag_list):
            raise ValueError(
                f"Did not find data for all tags, the missing tags are "
                f"{set(to_list_of_strings(tag_list))-set(concatted.columns)}")

        for col in concatted.columns:
            withouth_na = concatted[col].dropna()
            withouth_na.sort_index(inplace=True)
            yield withouth_na
def utc_now():
    # utcnow() doesn't include a tzinfo.
    return datetime.datetime.utcnow().replace(tzinfo=tz.tzutc())
Example #54
0
def calendar_events(request, calendar_slug):
    """
    JSON events feed class conforming to the JQuery FullCalendar and
    jquery-week-calendar CalEvent standard.

    [1]: http://code.google.com/p/jquery-week-calendar/
    [2]: http://arshaw.com/fullcalendar
    Corresponds to: http://arshaw.com/fullcalendar/docs/#calevent-objects
    """
    calendar = get_object_or_404(Calendar, slug=calendar_slug)

    start = request.GET.get('start', None)
    end = request.GET.get('end', None)
    if settings.USE_TZ:
        start = start and datetime.datetime.fromtimestamp(int(start), tzutc())
        end = end and datetime.datetime.fromtimestamp(int(end), tzutc())
    else:
        start = start and datetime.datetime.fromtimestamp(int(start))
        end = end and datetime.datetime.fromtimestamp(int(end))

    events = GET_EVENTS_FUNC(request, calendar)
    period = Period(events, start, end)
    cal_events = []
    for o in period.get_occurrences():
        audience_bits = [
            x for x in o.event.appropriate_for.get_set_bits()
            if x in VALID_AUDIENCES
        ]
        audiences = [AUDIENCE_TYPES[x]['name'][0] for x in audience_bits]
        if o.event.all_day:
            start = o.start.date().isoformat()
            diff = o.end - o.start
            end = o.start.date() + datetime.timedelta(days=diff.days)
            end = end.isoformat()
        else:
            start = o.start.isoformat()
            end = o.end.isoformat()
        occurrence_id = encode_occurrence(o)
        cal_event = {
            'id':
            occurrence_id,
            'allDay':
            o.event.all_day,
            'event_id':
            o.event.pk,
            'start':
            start,
            'end':
            end,
            'title':
            "%s %s" % ("".join(audiences), o.title),
            'description':
            o.description,
            'delete_url':
            "%s?id=%s&amp;action=cancel" %
            (reverse('ajax_edit_event',
                     kwargs={'calendar_slug': calendar_slug}), o.event.pk),
            'delete_occurrence_url':
            "%s?id=%s&amp;action=cancel" %
            (reverse('ajax_edit_event',
                     kwargs={'calendar_slug': calendar_slug}), occurrence_id),
            'edit_url':
            reverse('admin:events_event_change', args=(o.event.pk, )),
            'update_url':
            reverse('ajax_edit_event', kwargs={'calendar_slug':
                                               calendar_slug}),
            'update_occurrence_url':
            "%s?id=%s" %
            (reverse('ajax_edit_event',
                     kwargs={'calendar_slug': calendar_slug}), occurrence_id),
            'repeating_id':
            o.event.rule_id,
            'repeating_name':
            getattr(o.event.rule, "name", ""),
            'repeats':
            o.event.rule is not None,
            'audiences':
            audiences,
        }
        cal_events.append(cal_event)

    return JSONResponse(cal_events)
Example #55
0
 def when(self, value: str) -> None:
     timestamp = datetime.fromisoformat(value)
     if timestamp.tzinfo is None:
         self._when = timestamp.replace(tzinfo=tzutc())
     self._when = timestamp.astimezone(tzutc())
Example #56
0
 def test_rsvp_last_viewed_updated(self):
     guest = save_guest(EXAMPLE_GUEST_1)
     self.client.get(f"/rsvp/{guest.id}", follow_redirects=True)
     edited_guest = get_guest(guest.id)
     self.assertAlmostEqual(edited_guest.last_viewed, datetime.datetime.now(tzutc()),
                            delta=datetime.timedelta(seconds=5))
Example #57
0
        def convert_repr_rest_to_ui(obj):
            """Convert object's attributes from REST to UI like representation."""
            def convert_attr_val_repr_dict_to_unicode(attr_name, attr_value):
                """Convert attribute value from dictionary to unicode representation
        (get value by key from dictionary 'attr_value' where key determine
        according to 'attr_name').
        """
                if isinstance(attr_value, dict):
                    converted_attr_value = attr_value
                    if attr_name in Representation.people_attrs_names + [
                            "created_by", "modified_by"
                    ]:
                        converted_attr_value = unicode(attr_value.get("email"))
                    if attr_name in [
                            "custom_attribute_definitions", "program", "audit",
                            "mapped_objects"
                    ]:
                        converted_attr_value = (
                            unicode(attr_value.get("title")) if
                            attr_name != "custom_attribute_definitions" else {
                                attr_value.get("id"):
                                attr_value.get("title").upper()
                            })
                    if attr_name in ["custom_attribute_values"]:
                        converted_attr_value = {
                            attr_value.get("custom_attribute_id"):
                            attr_value.get("attribute_value")
                        }
                    if obj_attr_name == "comments":
                        converted_attr_value = {
                            k: (parser.parse(v).replace(
                                tzinfo=tz.tzutc()) if k == "created_at"
                                and isinstance(v, unicode) else v)
                            for k, v in attr_value.iteritems() if k in
                            ["modified_by", "created_at", "description"]
                        }
                    if attr_name == "assertions":
                        for name, assertion_id in ControlEntity.ASSERTIONS.iteritems(
                        ):
                            if assertion_id == attr_value["id"]:
                                converted_attr_value = name
                    return converted_attr_value

            origin_obj = copy.deepcopy(obj)
            for obj_attr_name in obj.__dict__.keys():
                # 'Ex', u'Ex', 1, None to 'Ex', u'Ex', 1, None
                obj_attr_value = getattr(obj, obj_attr_name)
                # REST like u'08-20-2017T04:30:45' to date=2017-08-20,
                # timetz=04:30:45+00:00
                if (obj_attr_name in ["updated_at", "created_at"]
                        and isinstance(obj_attr_value, unicode)):
                    obj_attr_value = (parser.parse(obj_attr_value).replace(
                        tzinfo=tz.tzutc()))
                if isinstance(obj_attr_value, dict) and obj_attr_value:
                    # "modified_by" {"type": "Person", "id": x} to u'*****@*****.**'
                    # todo: deprecated?
                    if obj_attr_name == "modified_by":
                        from lib.service import rest_service
                        obj_attr_value = getattr(
                            rest_service.ObjectsInfoService().get_obj(
                                obj=Representation.repr_dict_to_obj(
                                    obj_attr_value)), "email")
                    # {'name': u'Ex1', 'type': u'Ex2', ...} to u'Ex1'
                    else:
                        obj_attr_value = convert_attr_val_repr_dict_to_unicode(
                            obj_attr_name, obj_attr_value)
                # [el1, el2, ...] or [{item1}, {item2}, ...] to [u'Ex1, u'Ex2', ...]
                if (isinstance(obj_attr_value, list) and all(
                        isinstance(item, dict) for item in obj_attr_value)):
                    obj_attr_value = [
                        convert_attr_val_repr_dict_to_unicode(
                            obj_attr_name, item) for item in obj_attr_value
                    ]
                setattr(obj, obj_attr_name, obj_attr_value)
            # merge "custom_attribute_definitions" and "custom_attribute_values"
            obj_cas_attrs_names = [
                "custom_attributes", "custom_attribute_definitions",
                "custom_attribute_values"
            ]
            if set(obj_cas_attrs_names).issubset(obj.__dict__.keys()):
                cas_def = obj.custom_attribute_definitions
                cas_val = obj.custom_attribute_values
                # form CAs values of CAs definitions exist but CAs values not, or CAs
                # definitions have different then CAs values lengths
                if (cas_def and
                    (not cas_val or (isinstance(cas_def and cas_val, list))
                     and len(cas_def) != len(cas_val))):
                    from lib.entities.entities_factory import (
                        CustomAttributeDefinitionsFactory)
                    cas_val_dicts_keys = ([_.keys()[0]
                                           for _ in cas_val] if isinstance(
                                               cas_val, list) else [None])
                    _cas_val = [
                        {
                            k: v
                        } for k, v in CustomAttributeDefinitionsFactory.
                        generate_ca_title_id([
                            Representation.repr_dict_to_obj(cad)
                            for cad in origin_obj.custom_attribute_definitions
                        ]).iteritems() if k not in cas_val_dicts_keys
                    ]
                    cas_val = _cas_val if not cas_val else cas_val + _cas_val
                cas_def_dict = (dict(
                    [_def.iteritems().next() for _def in cas_def]) if
                                (isinstance(cas_def, list) and all(
                                    isinstance(_def, dict)
                                    for _def in cas_def)) else None)
                cas_val_dict = (dict(
                    [_val.iteritems().next() for _val in cas_val]) if
                                (isinstance(cas_def, list) and all(
                                    isinstance(_def, dict)
                                    for _def in cas_def)) else None)
                cas = StringMethods.merge_dicts_by_same_key(
                    cas_def_dict, cas_val_dict)
                if obj.custom_attributes:
                    cas.update(obj.custom_attributes)
                if cas in [{None: None}, {}]:
                    cas = None
                setattr(obj, "custom_attributes", cas)
            return obj
Example #58
0
        def convert_obj_repr_from_rest_to_ui(obj):
            """Convert object's attributes from REST to UI like representation."""
            def convert_attr_value_from_dict_to_unicode(attr_name, attr_value):
                """Convert attribute value from dictionary to unicode representation
        (get value by key from dictionary 'attr_value' where key determine
        according to 'attr_name').
        """
                if isinstance(attr_value, dict):
                    converted_attr_value = attr_value
                    if attr_name in [
                            "contact", "manager", "owners", "assessor",
                            "creator", "verifier", "created_by", "modified_by",
                            "Assessor", "Creator", "Verifier"
                    ]:
                        converted_attr_value = unicode(attr_value.get("email"))
                    if attr_name in [
                            "custom_attribute_definitions", "program", "audit",
                            "objects_under_assessment"
                    ]:
                        converted_attr_value = (
                            unicode(attr_value.get("title")) if
                            attr_name != "custom_attribute_definitions" else {
                                attr_value.get("id"):
                                attr_value.get("title").upper()
                            })
                    if attr_name in ["custom_attribute_values"]:
                        converted_attr_value = {
                            attr_value.get("custom_attribute_id"):
                            attr_value.get("attribute_value")
                        }
                    if obj_attr_name == "comments":
                        converted_attr_value = {
                            k: (parser.parse(v).replace(
                                tzinfo=tz.tzutc()) if k == "created_at"
                                and isinstance(v, unicode) else v)
                            for k, v in attr_value.iteritems() if k in
                            ["modified_by", "created_at", "description"]
                        }
                    return converted_attr_value

            origin_obj = copy.deepcopy(obj)
            for obj_attr_name in obj.__dict__.keys():
                # 'Ex', u'Ex', 1, None to 'Ex', u'Ex', 1, None
                obj_attr_value = (obj.assignees.get(obj_attr_name.title()) if (
                    obj_attr_name in ["assessor", "creator", "verifier"]
                    and "assignees" in obj.__dict__.keys()) else getattr(
                        obj, obj_attr_name))
                # REST like u'08-20-2017T04:30:45' to date=2017-08-20,
                # timetz=04:30:45+00:00
                if (obj_attr_name in ["updated_at", "created_at"]
                        and isinstance(obj_attr_value, unicode)):
                    obj_attr_value = (parser.parse(obj_attr_value).replace(
                        tzinfo=tz.tzutc()))
                if isinstance(obj_attr_value, dict) and obj_attr_value:
                    # to "assignees" = {"Assessor": [], "Creator": [], "Verifier": []}
                    if obj_attr_name == "assignees":
                        obj_attr_value = {
                            k: ([
                                convert_attr_value_from_dict_to_unicode(k, _v)
                                for _v in v
                            ] if isinstance(v, list) else
                                convert_attr_value_from_dict_to_unicode(k, v))
                            for k, v in obj_attr_value.iteritems()
                            if k in ["Assessor", "Creator", "Verifier"]
                        }
                    # "modified_by" {"type": "Person", "id": x} to u'*****@*****.**'
                    if obj_attr_name == "modified_by":
                        from lib.service import rest_service
                        obj_attr_value = getattr(
                            rest_service.ObjectsInfoService().get_obj(
                                obj=Entity.convert_dict_to_obj_repr(
                                    obj_attr_value)), "email")
                    # {'name': u'Ex1', 'type': u'Ex2', ...} to u'Ex1'
                    else:
                        obj_attr_value = convert_attr_value_from_dict_to_unicode(
                            obj_attr_name, obj_attr_value)
                # [el1, el2, ...] or [{item1}, {item2}, ...] to [u'Ex1, u'Ex2', ...]
                if (isinstance(obj_attr_value, list) and all(
                        isinstance(item, dict) for item in obj_attr_value)):
                    obj_attr_value = [
                        convert_attr_value_from_dict_to_unicode(
                            obj_attr_name, item) for item in obj_attr_value
                    ]
                setattr(obj, obj_attr_name, obj_attr_value)
            # merge "custom_attribute_definitions" and "custom_attribute_values"
            obj_cas_attrs_names = [
                "custom_attributes", "custom_attribute_definitions",
                "custom_attribute_values"
            ]
            if set(obj_cas_attrs_names).issubset(obj.__dict__.keys()):
                cas_def = obj.custom_attribute_definitions
                cas_val = obj.custom_attribute_values
                # form CAs values of CAs definitions exist but CAs values not, or CAs
                # definitions have different then CAs values lengths
                if (cas_def and
                    (not cas_val or (isinstance(cas_def and cas_val, list))
                     and len(cas_def) != len(cas_val))):
                    from lib.entities.entities_factory import (
                        CustomAttributeDefinitionsFactory)
                    cas_val_dicts_keys = ([_.keys()[0]
                                           for _ in cas_val] if isinstance(
                                               cas_val, list) else [None])
                    _cas_val = [
                        {
                            k: v
                        } for k, v in CustomAttributeDefinitionsFactory.
                        generate_ca_values(list_ca_def_objs=origin_obj.
                                           custom_attribute_definitions,
                                           is_none_values=True).iteritems()
                        if k not in cas_val_dicts_keys
                    ]
                    cas_val = _cas_val if not cas_val else cas_val + _cas_val
                cas_def_dict = (dict(
                    [_def.iteritems().next() for _def in cas_def]) if
                                (isinstance(cas_def, list) and all(
                                    isinstance(_def, dict)
                                    for _def in cas_def)) else {
                                        None: None
                                    })
                cas_val_dict = (dict(
                    [_val.iteritems().next() for _val in cas_val]) if
                                (isinstance(cas_def, list) and all(
                                    isinstance(_def, dict)
                                    for _def in cas_def)) else {
                                        None: None
                                    })
                cas = string_utils.merge_dicts_by_same_key(
                    cas_def_dict, cas_val_dict)
                setattr(obj, "custom_attributes", cas)
            return obj
Example #59
0
def test_metadata2asset_simple1(schema_dir):
    data = metadata2asset({
        "contentSize":
        69105,
        "digest":
        "783ad2afe455839e5ab2fa659861f58a423fd17f",
        "digest_type":
        "sha1",
        "encodingFormat":
        "application/x-nwb",
        "nwb_version":
        "2.2.5",
        "experiment_description":
        "experiment_description1",
        "experimenter": ("experimenter1", ),
        "identifier":
        "identifier1",
        "institution":
        "institution1",
        "keywords": ["keyword1", "keyword 2"],
        "lab":
        "lab1",
        "related_publications": ("related_publications1", ),
        "session_description":
        "session_description1",
        "session_id":
        "session_id1",
        "session_start_time":
        datetime(2017, 4, 15, 12, 0, tzinfo=tzutc()),
        "age":
        None,
        "date_of_birth":
        None,
        "genotype":
        None,
        "sex":
        None,
        "species":
        None,
        "subject_id":
        None,
        "number_of_electrodes":
        0,
        "number_of_units":
        0,
        "nd_types": [],
        "tissue_sample_id":
        "tissue42",
    })
    assert data == AssetMeta.unvalidated(
        schemaVersion="1.0.0-rc1",
        identifier="identifier1",
        name=None,
        description=None,
        contributor=None,
        about=None,
        studyTarget=None,
        license=None,
        protocol=None,
        ethicsApproval=None,
        keywords=["keyword1", "keyword 2"],
        acknowledgement=None,
        access=[
            AccessRequirements(
                status=AccessType.Open,
                email=None,
                contactPoint=None,
                description=None,
                embargoedUntil=None,
            )
        ],
        url=None,
        repository="https://dandiarchive.org/",
        relatedResource=None,
        wasGeneratedBy=None,
        contentSize=69105,
        encodingFormat="application/x-nwb",
        digest=Digest(value="783ad2afe455839e5ab2fa659861f58a423fd17f",
                      cryptoType=DigestType.sha1),
        path=None,
        dataType=None,
        sameAs=None,
        modality=None,
        measurementTechnique=None,
        variableMeasured=None,
        wasDerivedFrom=[
            BioSample(identifier="tissue42",
                      assayType=None,
                      anatomy=None,
                      wasDerivedFrom=None)
        ],
        wasAttributedTo=[],
        contentUrl=None,
    )
    # We need to convert `data` to a `dict` this way instead of with `.dict()`
    # so that enums will be converted to strings.
    data_as_dict = json.loads(data.json(exclude_unset=True, exclude_none=True))
    validate_asset_json(data_as_dict, schema_dir)
Example #60
0
 def when(self) -> str:
     if self._when.tzinfo is None:
         return self._when.replace(tzinfo=tzutc()).isoformat(timespec='seconds')
     return self._when.astimezone(tzutc()).isoformat(timespec='seconds')