Example #1
0
    def decode_message(self, message, bodies):
        """
        """
        try:
            body = message.get_body()
            body = body.replace(', ,', ',')
            msgDict = self.JSON_format(body)
            try:
                if msgDict['fallback'] == True:
                    msgDict['fallback'] = '1'
                else:
                    msgDict['fallback'] = '0'
            except:
                msgDict['fallback'] = '-'

            # convert time since Epoch to datetime
            msgDict['start_date' ]  = datetime.utcfromtimestamp(int( msgDict['start_time'] ) )
            msgDict['end_date'   ]  = datetime.utcfromtimestamp(int( msgDict['end_time'] ) )

            #self._logger.info(msgDict)
        
            bodies.append(msgDict)

        except ValueError as msg:
            self._logger.warning("Impossible to decode the message: %s by JSON" % message)
            self._logger.error(msg)
            #raise msg
        except Exception as msg:
            self._logger.warning("Exception: %s" % msg)
Example #2
0
    def generate_scenario_report(self, scenario, stats):
        """Format a report based on calculated statistics for an executed
        scenario.

        :stats: A python data structure with calculated statistics
        :returns: A report (string) suitable for printing, emailing, etc.
        """

        template = Template(self.scenario_template())
        tmpl_vars = {
            'size_data': [],
            'stat_list': [
                ('TOTAL', stats['agg_stats'], stats['size_stats']),
                ('CREATE', stats['op_stats'][ssbench.CREATE_OBJECT],
                 stats['op_stats'][ssbench.CREATE_OBJECT]['size_stats']),
                ('READ', stats['op_stats'][ssbench.READ_OBJECT],
                 stats['op_stats'][ssbench.READ_OBJECT]['size_stats']),
                ('UPDATE', stats['op_stats'][ssbench.UPDATE_OBJECT],
                 stats['op_stats'][ssbench.UPDATE_OBJECT]['size_stats']),
                ('DELETE', stats['op_stats'][ssbench.DELETE_OBJECT],
                 stats['op_stats'][ssbench.DELETE_OBJECT]['size_stats']),
            ],
            'agg_stats': stats['agg_stats'],
            'nth_pctile': stats['nth_pctile'],
            'start_time': datetime.utcfromtimestamp(
                stats['time_series']['start_time']
            ).strftime(REPORT_TIME_FORMAT),
            'stop_time': datetime.utcfromtimestamp(
                stats['time_series']['stop']).strftime(REPORT_TIME_FORMAT),
            'duration': stats['time_series']['stop']
            - stats['time_series']['start_time'],
            'weighted_c': 0.0,
            'weighted_r': 0.0,
            'weighted_u': 0.0,
            'weighted_d': 0.0,
        }
        for size_data in scenario.sizes_by_name.values():
            if size_data['size_min'] == size_data['size_max']:
                size_range = '%-15s' % (
                    self._format_bytes(size_data['size_min']),)
            else:
                size_range = '%s - %s' % (
                    self._format_bytes(size_data['size_min']),
                    self._format_bytes(size_data['size_max']))
            initial_files = scenario._scenario_data['initial_files']
            initial_total = sum(initial_files.values())
            pct_total = (initial_files.get(size_data['name'], 0)
                         / float(initial_total) * 100.0)
            tmpl_vars['size_data'].append({
                'crud_pcts': '  '.join(map(lambda p: '%2.0f' % p,
                                           size_data['crud_pcts'])),
                'size_range': size_range,
                'size_name': size_data['name'],
                'pct_total_ops': '%3.0f%%' % pct_total,
            })
            tmpl_vars['weighted_c'] += pct_total * size_data['crud_pcts'][0] / 100.0
            tmpl_vars['weighted_r'] += pct_total * size_data['crud_pcts'][1] / 100.0
            tmpl_vars['weighted_u'] += pct_total * size_data['crud_pcts'][2] / 100.0
            tmpl_vars['weighted_d'] += pct_total * size_data['crud_pcts'][3] / 100.0
        return template.render(scenario=scenario, stats=stats, **tmpl_vars)
Example #3
0
    def scan_list(self, start_time=None, end_time=None, **kwargs):
        """List scans stored in Security Center in a given time range.

        Time is given in UNIX timestamps, assumed to be UTC. If a `datetime` is
        passed it is converted. If `end_time` is not specified it is NOW. If
        `start_time` is not specified it is 30 days previous from `end_time`.

        :param start_time: start of range to filter
        :type start_time: date, datetime, int
        :param end_time: end of range to filter
        :type start_time: date, datetime, int

        :return: list of dictionaries representing scans

        """

        try:
            end_time = datetime.utcfromtimestamp(int(end_time))
        except TypeError:
            if end_time is None:
                end_time = datetime.utcnow()

        try:
            start_time = datetime.utcfromtimestamp(int(start_time))
        except TypeError:
            if start_time is None:
                start_time = end_time - timedelta(days=30)

        data = {"startTime": calendar.timegm(start_time.utctimetuple()),
                "endTime": calendar.timegm(end_time.utctimetuple())}
        data.update(kwargs)

        result = self.raw_query("scanResult", "getRange", data=data)
        return result["scanResults"]
Example #4
0
 def _map_log(self, ci, url, path=None):
     revno = ci.revision.number
     rev = pysvn.Revision(pysvn.opt_revision_kind.number, revno)
     try:
         size = int(self._svn.list(url, revision=rev, peg_revision=rev)[0][0].size)
     except pysvn.ClientError:
         size = None
     rename_details = {}
     changed_paths = ci.get("changed_paths", [])
     for changed_path in changed_paths:
         changed_path = self._check_changed_path(changed_path, path)
         if changed_path["copyfrom_path"] and changed_path["path"] == path and changed_path["action"] == "A":
             rename_details["path"] = changed_path["copyfrom_path"]
             rename_details["commit_url"] = self._repo.url_for_commit(changed_path["copyfrom_revision"].number)
             break
     return {
         "id": revno,
         "message": h.really_unicode(ci.get("message", "--none--")),
         "authored": {
             "name": h.really_unicode(ci.get("author", "--none--")),
             "email": "",
             "date": datetime.utcfromtimestamp(ci.date),
         },
         "committed": {
             "name": h.really_unicode(ci.get("author", "--none--")),
             "email": "",
             "date": datetime.utcfromtimestamp(ci.date),
         },
         "refs": ["HEAD"] if revno == self.head else [],
         "parents": [revno - 1] if revno > 1 else [],
         "size": size,
         "rename_details": rename_details,
     }
Example #5
0
 def refresh_commit_info(self, oid, seen, lazy=True):
     from allura.model.repository import CommitDoc
     ci_doc = CommitDoc.m.get(_id=oid)
     if ci_doc and lazy:
         return False
     ci = self._git.rev_parse(oid)
     args = dict(
         tree_id=ci.tree.hexsha,
         committed=Object(
             name=h.really_unicode(ci.committer.name),
             email=h.really_unicode(ci.committer.email),
             date=datetime.utcfromtimestamp(ci.committed_date)),
         authored=Object(
             name=h.really_unicode(ci.author.name),
             email=h.really_unicode(ci.author.email),
             date=datetime.utcfromtimestamp(ci.authored_date)),
         message=h.really_unicode(ci.message or ''),
         child_ids=[],
         parent_ids=[p.hexsha for p in ci.parents])
     if ci_doc:
         ci_doc.update(**args)
         ci_doc.m.save()
     else:
         ci_doc = CommitDoc(dict(args, _id=ci.hexsha))
         try:
             ci_doc.m.insert(safe=True)
         except DuplicateKeyError:
             if lazy:
                 return False
     self.refresh_tree_info(ci.tree, seen, lazy)
     return True
Example #6
0
def date_cleaner(value, *args):
    """Try to clean value, coerce it into a python datetime.
    param value: (str) value to be parsed
    returns: datetime
    """
    if not value:
        return None
    if isinstance(value, datetime) or isinstance(value, date):
        return value
    try:
        value = dateutil.parser.parse(value)
    except TypeError:
        return None
    except ValueError as e:
        if e.message == 'year is out of range':
            value = datetime.utcfromtimestamp(int(value))
        else:
            return None
    except OverflowError:
        try:
            value = datetime.utcfromtimestamp(int(value))
        except ValueError:
            value = int(value) / 1000
            value = datetime.utcfromtimestamp(value)

    return value
Example #7
0
    def run(self):
        keen_client = KeenClient(
            project_id=self.project_id,
            write_key=self.write_key,
        )

        timezone = pytz.timezone(self.data['user']['timezone'])

        self.events = []
        with click.progressbar(self.data['days'],
                            label='Preparing keen.io events',
                            fill_char=click.style('#', fg='blue')) as days:

            for day in days:
                dt = self._parse_date(day['date'], timezone)

                self.append_event(dt, 'total', {
                    'seconds': day['grand_total']['total_seconds'],
                })

                categories = [
                    'editor',
                    'language',
                    'operating_system',
                    'project',
                ]
                for category in categories:
                    for item in day.get(category + 's', []):
                        self.append_event(dt, category, {
                            'seconds': item['total_seconds'],
                            'name': item['name'],
                        })

                files = {}
                for project in day.get('projects', []):
                    for f in project.get('dump', {}).get('files', []):
                        if f['name'] not in files:
                            files[f['name']] = 0
                        files[f['name']] += f['total_seconds']

                for name, seconds in files.items():
                    self.append_event(dt, 'file', {
                        'seconds': seconds,
                        'name': name,
                    })

        if len(self.events) == 0:
            click.echo('No events available for exporting to keen.io')
            return

        click.echo('Uploading events to keen.io...')
        start = datetime.utcfromtimestamp(self.data['range']['start'])
        end = datetime.utcfromtimestamp(self.data['range']['end'])
        collection = 'WakaTime Data Dump from {start} until {end}'.format(
            start=start.strftime('%Y-%m-%d'),
            end=end.strftime('%Y-%m-%d'),
        )
        keen_client.add_events({
            collection: self.events,
        })
Example #8
0
 def bs_to_json(filename):
     """Convert (task) buildstats file into json format"""
     bs_json = OrderedDict()
     iostat = OrderedDict()
     rusage = OrderedDict()
     with open(filename) as fobj:
         for line in fobj.readlines():
             key, val = line.split(':', 1)
             val = val.strip()
             if key == 'Started':
                 start_time = datetime.utcfromtimestamp(float(val))
                 bs_json['start_time'] = start_time
             elif key == 'Ended':
                 end_time = datetime.utcfromtimestamp(float(val))
             elif key.startswith('IO '):
                 split = key.split()
                 iostat[split[1]] = int(val)
             elif key.find('rusage') >= 0:
                 split = key.split()
                 ru_key = split[-1]
                 if ru_key in ('ru_stime', 'ru_utime'):
                     val = float(val)
                 else:
                     val = int(val)
                 rusage[ru_key] = rusage.get(ru_key, 0) + val
             elif key == 'Status':
                 bs_json['status'] = val
     bs_json['elapsed_time'] = end_time - start_time
     bs_json['rusage'] = rusage
     bs_json['iostat'] = iostat
     return bs_json
Example #9
0
 def process_tick(self, tick):
     timestamp, value = tick.timestamp, tick.value
     self.current = self.current or [tick_boundaries(timestamp, self.period), 
                                         value, value, value, value]
     if timestamp <= self.current[0]:
         self.current_ticks.append((timestamp, value))
         self.current[4] = value # reset close
         if value > self.current[2]: # compare with high
             self.current[2] = value # update high
         elif value < self.current[3]: # compare with low
             self.current[3] = value # update low
     else:
         self.append(tuple(self.current))
         self.current_ticks = [] # reset the current_ticks list
         # TODO: think about this roll-over
         if (datetime.utcfromtimestamp(timestamp).date() > 
             datetime.utcfromtimestamp(self.current[0]).date()):
             # next UTC day, do not fill gap
             self.current = [tick_boundaries(timestamp, self.period), value, 
                 value, value, value]
         else:
             # same day, fill gaps if needed
             candle_time = self.current[0] + (self.period * 60)
             while timestamp > candle_time: # fill gaps
                 close = self.current[4]
                 self.append((candle_time, close, close, close, close))
                 candle_time += self.period * 60
             self.current = [candle_time, value, value, value, value]
Example #10
0
def authorized(response):
    if response is None:
        #flash(u'Facebook-Access denied: reason=%s, error=%s' \
        #      %(request.args['error_reason'],
        #        request.args['error_description']),
        #      'error')
        flash(messages.fb_access_denied, 'error')

    else:
        oauth_token = response['access_token']
        expires = int(response['expires']) + time()

        if get_current_user():
            user_id = get_current_user().id
            db_obj = DBFacebook.query.filter_by(user_id=user_id).first()
            if db_obj:
                db_obj.access_token = oauth_token
                db_obj.expire_time = datetime.utcfromtimestamp(expires)
            else:
                db_obj = DBFacebook(
                            user_id=get_current_user().id,
                            access_token=oauth_token,
                            expire_time=datetime.utcfromtimestamp(expires)
                        )

            db.session.add(db_obj)
            db.session.commit()

    return redirect(request.args.get('next') or url_for('index'))
 def innerfunc(timestamp_string):
     timestamp = int(timestamp_string)
     try:
         t0 = datetime.utcfromtimestamp(timestamp)
     except ValueError:
         t0 = datetime.utcfromtimestamp(timestamp/1000)
     goto_timestamp_in_files(self.window, files, t0, "this type of files")
Example #12
0
def event_update_api(public_id):
    data = request.get_json(force=True)

    try:
        valid_event_update(data)
    except InputError as e:
        return err(404, e.message)

    if 'start' in data:
        data['start'] = datetime.utcfromtimestamp(int(data.get('start')))

    if 'end' in data:
        data['end'] = datetime.utcfromtimestamp(int(data.get('end')))

    if 'busy' in data:
        data['busy'] = int(data.get('busy'))

    if 'all_day' in data:
        data['all_day'] = int(data.get('all_day'))

    if 'participants' in data:
        data['participant_list'] = data['participants']
        del data['participants']
        for p in data['participant_list']:
            if 'status' not in p:
                p['status'] = 'awaiting'

    result = events.crud.update(g.namespace, g.db_session,
                                public_id, data)
    if result is None:
        return err(404, "Couldn't find event with id {0}".
                   format(public_id))
    return g.encoder.jsonify(result)
Example #13
0
def tstmp2dtime(Tstmp):
  if iterable(Tstmp) ==True:
    Shape  = shape(Tstmp)
    return array([datetime.utcfromtimestamp(tstmp)
                  for tstmp in Tstmp]).reshape(Shape)
  else:
    return datetime.utcfromtimestamp(Tstamp)
Example #14
0
def getTimeSteps(start, stop, interval, roundtime=True, timezone=time.timezone):
	logger.debug('getTimeSteps:')
	timeSteps = []
	
	logger.debug('   + Interval: %s' % interval)

	start_datetime 	= datetime.utcfromtimestamp(start)
	stop_datetime 	= datetime.utcfromtimestamp(stop)

	if roundtime:
		stop_datetime = roundTime(stop_datetime, interval, timezone)

	relativeinterval = intervalToRelativeDelta.get(interval, None)

	if relativeinterval:
		date = stop_datetime
		start_datetime_minus_relativeinterval = start_datetime - relativeinterval

		while date > start_datetime_minus_relativeinterval:			
			ts = calendar.timegm(date.timetuple())			
			timeSteps.append(ts)
			date -= relativeinterval
	else:
		logger.debug('   + Use interval')
		timeSteps = range(stop, start-interval, -interval)
	
	timeSteps.reverse()
	
	logger.debug('   + timeSteps: %s', timeSteps)

	return timeSteps
Example #15
0
    def _sync_step_from_active(self, step):
        item = self._get_jenkins_job(step)

        if not step.data.get('uri'):
            step.data['uri'] = item['url']

        # TODO(dcramer): we're doing a lot of work here when we might
        # not need to due to it being sync'd previously
        node = self._get_node(step.data['master'], item['builtOn'])

        step.node = node
        step.date_started = datetime.utcfromtimestamp(
            item['timestamp'] / 1000)

        if item['building']:
            step.status = Status.in_progress
        else:
            step.status = Status.finished
            step.result = RESULT_MAP[item['result']]
            step.date_finished = datetime.utcfromtimestamp(
                (item['timestamp'] + item['duration']) / 1000)

        if step.status == Status.finished:
            self._sync_results(step, item)

        if db.session.is_modified(step):
            db.session.add(step)
            db.session.commit()
Example #16
0
def get_git_info(filename):

    environ['TZ'] = 'UTC'
    git_info = getoutput('git log --pretty=raw -- "%s"' % filename)

    info = {'__git__': False}

    if (not git_info) or git_info.startswith('fatal:'):
        info['__updated__'] = datetime.utcfromtimestamp(
            stat(filename).st_mtime
            )
        return info

    info['__git__'] = True

    for line in git_info.splitlines():
        if line.startswith('author'):
            email, timestamp, tz = line.split()[-3:]
            email = email.lstrip('<').rstrip('>')
            if '(' in email:
                email = email.split('(')[0].strip()
            info['__by__'] = email
            info['__updated__'] = datetime.utcfromtimestamp(float(timestamp))
            break

    return info
Example #17
0
def _getTimeSteps(start, stop, periodtime, periodtype, roundtime, timezone=time.timezone):
	logger.debug('getTimeSteps:')
	timeSteps = []
	
	logger.debug('   + Interval: %s' % interval)

	start_datetime 	= datetime.utcfromtimestamp(start)
	stop_datetime 	= datetime.utcfromtimestamp(stop)

	if roundtime:
		stop_datetime = roundTime(stop_datetime, interval, timezone)

	if periodtype != None:

		relativeinterval = relativeDeltas[periodtype] * periodtime	

		if relativeinterval != None:
			date = stop_datetime
			start_datetime_minus_relativeinterval = start_datetime - relativeinterval
			while date > start_datetime_minus_relativeinterval:
				timeSteps.append(datetimeToTimestamp(date))
				date -= relativeinterval

	else:
		logger.debug('   + Use interval')
		timeSteps = range(stop, start-periodtime, -periodtime)
	
	timeSteps.reverse()
	
	logger.debug('   + timeSteps: %s', timeSteps)

	return timeSteps
def range(field, from_val, to_val, ret_fields=[], epoch=True, pagesCount = 200, es_index='memex', es_doc_type='page', es=None):
    if es is None:
        es = default_es_elastic

    if epoch:
        from_val = datetime.utcfromtimestamp(long(from_val)).strftime('%Y-%m-%dT%H:%M:%S')
        to_val = datetime.utcfromtimestamp(long(to_val)).strftime('%Y-%m-%dT%H:%M:%S')
            
    query = { 
        "query" : { 
            "range" : { 
                field : {
                    "gt": from_val,
                    "lte": to_val
                }
            },
        },
        "fields": ret_fields
    }

    res = es.search(body=query, index=es_index, doc_type=es_doc_type, size=pagesCount, request_timeout=600)
    hits = res['hits']['hits']
    
    results = []
    for hit in hits:
        fields = hit['fields']
        fields['id'] = hit['_id']
        results.append(fields)

    return results
Example #19
0
    def __str__(self):
        s = "Certificate name:\n"
        s += "  "+self.getName().toUri()+"\n"
        s += "Validity:\n"

        dateFormat = "%Y%m%dT%H%M%S"
        notBeforeStr = datetime.utcfromtimestamp(self._notBefore/1000).strftime(dateFormat)
        notAfterStr = datetime.utcfromtimestamp(self._notAfter/1000).strftime(dateFormat)

        s += "  NotBefore: " + notBeforeStr+"\n"
        s += "  NotAfter: " + notAfterStr + "\n"
        for sd in self._subjectDescriptionList:
            s += "Subject Description:\n"
            s += "  " + str(sd.getOid()) + ": " + sd.getValue().toRawStr() + "\n"

        s += "Public key bits:\n"
        keyDer = self._publicKey.getKeyDer()
        encodedKey = base64.b64encode(keyDer.toBytes())
        for idx in range(0, len(encodedKey), 64):
            # Use Blob to convert to a str.
            s += Blob(encodedKey[idx:idx+64], False).toRawStr() + "\n"


        if len(self._extensionList) > 0:
            s += "Extensions:\n"
            for ext in self._extensionList:
                s += "  OID: "+ext.getOid()+"\n"
                s += "  Is critical: " + ('Y' if ext.isCritical() else 'N') + "\n"

                s += "  Value: " + str(ext.getValue()).encode('hex') + "\n"

        return s
Example #20
0
 def get_time_range(self, start, end):
     """returns
     :type start: datetime.datetime
     :type end: datetime.datetime
     """
     # XXX rename get_localized_range()
     if start.tzinfo is None:
         start = self.locale['local_timezone'].localize(start)
     if end.tzinfo is None:
         end = self.locale['local_timezone'].localize(end)
     start = aux.to_unix_time(start)
     end = aux.to_unix_time(end)
     sql_s = ('SELECT recs_loc.href, dtstart, dtend, ref, dtype FROM '
              'recs_loc JOIN events ON '
              'recs_loc.href = events.href AND '
              'recs_loc.calendar = events.calendar WHERE '
              '(dtstart >= ? AND dtstart <= ? OR '
              'dtend >= ? AND dtend <= ? OR '
              'dtstart <= ? AND dtend >= ?) AND events.calendar = ?;')
     stuple = (start, end, start, end, start, end, self.calendar)
     result = self.sql_ex(sql_s, stuple)
     for href, start, end, ref, dtype in result:
         start = pytz.UTC.localize(datetime.utcfromtimestamp(start))
         end = pytz.UTC.localize(datetime.utcfromtimestamp(end))
         event = self.get(href, start=start, end=end, ref=ref, dtype=dtype)
         yield event
Example #21
0
 def bs_to_json(filename):
     """Convert (task) buildstats file into json format"""
     bs_json = {'iostat': {},
                'rusage': {},
                'child_rusage': {}}
     with open(filename) as fobj:
         for line in fobj.readlines():
             key, val = line.split(':', 1)
             val = val.strip()
             if key == 'Started':
                 start_time = datetime.utcfromtimestamp(float(val))
                 bs_json['start_time'] = start_time
             elif key == 'Ended':
                 end_time = datetime.utcfromtimestamp(float(val))
             elif key.startswith('IO '):
                 split = key.split()
                 bs_json['iostat'][split[1]] = int(val)
             elif key.find('rusage') >= 0:
                 split = key.split()
                 ru_key = split[-1]
                 if ru_key in ('ru_stime', 'ru_utime'):
                     val = float(val)
                 else:
                     val = int(val)
                 ru_type = 'rusage' if split[0] == 'rusage' else \
                                                   'child_rusage'
                 bs_json[ru_type][ru_key] = val
             elif key == 'Status':
                 bs_json['status'] = val
     bs_json['elapsed_time'] = end_time - start_time
     return bs_json
Example #22
0
    def parse(auth_data, params, query_result, **kwargs):
        for data in query_result:
            #print data
            # protocol switch
            if data[2] is 1:
                proto='icmp'
                sport=0
                dport=0
            elif data[2] is 6:
                proto='tcp'
                sport=data[3]
                dport=data[5]
            elif data[2] is 17:
                proto='udp'
                sport=data[4]
                dport=data[6]
            else:
                continue

            yield {
                'category': 'flow',
                'address': [{'ip': data[0], 'dir': 'src'}, {'ip': data[1], 'dir': 'dst'}],
                'sport': sport,
                'dport': dport,
                'proto': proto,
                'source': params['source'][0],
                'time': datetime.utcfromtimestamp(data[7]),
                'until': datetime.utcfromtimestamp(data[7]),
                }
Example #23
0
 def report(self):
     start = datetime.utcfromtimestamp(float(self.request.POST.get('from')))
     end = datetime.utcfromtimestamp(float(self.request.POST.get('to')))
     checks = self.request.POST.getall('check_ids[]')
     outages = DBSession.query(Outage).join(Check).filter(
         Outage.between(start, end)
     )
     if checks:
         outages = outages.filter(Check.id.in_(checks))
     outages = outages.all()
     grouped_outages = {x.check_id: [] for x in outages}
     for outage in outages:
         grouped_outages[outage.check_id].append({
             'id': outage.id,
             'status': outage.status,
             'start': max(
                 timegm(outage.start.utctimetuple()),
                 timegm(start.utctimetuple())
             ),
             'end': min(
                 timegm(outage.end.utctimetuple()),
                 timegm(end.utctimetuple())
             ),
         })
     return grouped_outages
Example #24
0
  def send(self, mlid, msg, delay = 0):
    """Send a message to a messaging list.

    mlid -- the ML's id
    msg -- text message
    delay -- number of seconds from now to schedule the message

    """

    try:
      ts = datetime.utcfromtimestamp(
        time.time() + 65 + delay).isoformat(b'T') + 'Z'
    except TypeError:
      ts = datetime.utcfromtimestamp(
        time.time() + 65 + delay).isoformat('T') + 'Z'

    self._call_api(
      _url('events') % _urlencval(mlid),
      method='POST',
      body={
        'start': {'dateTime': ts},
        'end': {'dateTime': ts},
        'summary': msg,
        'transparency': 'transparent'
      }
    )
    def process_exception(self, request, exception):
        if (not getattr(self, 'appenlight_client') or
                not self.appenlight_client.config.get('enabled')):
            return None

        environ = getattr(request, 'environ', request.META)
        if not self.appenlight_client.config['report_errors'] \
                or environ.get('appenlight.ignore_error'):
            return None
        user = getattr(request, 'user', None)
        end_time = default_timer()
        if user and user_is_authenticated(user):
            if 'appenlight.username' not in environ:
                environ['appenlight.username'] = six.text_type(user.pk)
        if not isinstance(exception, Http404):
            http_status = 500
            traceback = self.appenlight_client.get_current_traceback()
            appenlight_storage = get_local_storage()
            appenlight_storage.thread_stats[
                'main'] = end_time - request.__start_time__
            stats, slow_calls = appenlight_storage.get_thread_stats()
            self.appenlight_client.save_request_stats(
                stats, view_name=environ.get('appenlight.view_name', ''))
            self.appenlight_client.py_report(
                environ, traceback, message=None, http_status=http_status,
                start_time=datetime.utcfromtimestamp(
                    request.__start_time__),
                end_time=datetime.utcfromtimestamp(
                    end_time),
                request_stats=stats, slow_calls=slow_calls)
            del traceback
            request._errormator_create_report = True
Example #26
0
    def log(self, parent=None, offset=0, limit=100):
        # TODO(dcramer): we should make this streaming
        cmd = ['log', '--all', '--pretty=format:%s' % (LOG_FORMAT,)]
        if parent:
            cmd.append(parent)
        if offset:
            cmd.append('--skip=%d' % (offset,))
        if limit:
            cmd.append('--max-count=%d' % (limit,))
        result = self.run(cmd)

        for chunk in BufferParser(result, '\x02'):
            (sha, author, author_date, committer, committer_date,
             parents, message) = chunk.split('\x01')

            # sha may have a trailing newline due to git log adding it
            sha = sha.lstrip('\n')

            parents = filter(bool, parents.split(' '))

            author_date = datetime.utcfromtimestamp(float(author_date))
            committer_date = datetime.utcfromtimestamp(float(committer_date))

            yield LazyGitRevisionResult(
                vcs=self,
                id=sha,
                author=author,
                committer=committer,
                author_date=author_date,
                committer_date=committer_date,
                parents=parents,
                message=message,
            )
Example #27
0
def get_certificate():
    name = request.args.get('name')
    isView = request.args.get('view')

    ndn_name = ndn.Name(str(name))

    cert = current_app.mongo.db.certs.find_one({'name': str(name)})
    if cert == None:
        abort(404)

    if not isView:
        response = make_response(cert['cert'])
        response.headers['Content-Type'] = 'application/octet-stream'
        response.headers['Content-Disposition'] = 'attachment; filename=%s.ndncert' % str(ndn_name[-3])
        return response
    else:
        d = ndn.security.certificate.IdentityCertificate()
        d.wireDecode(bytearray(base64.b64decode(cert['cert'])))

        notBefore = datetime.utcfromtimestamp(d.getNotBefore() / 1000)
        notAfter = datetime.utcfromtimestamp(d.getNotAfter() / 1000)
        cert['from'] = notBefore
        cert['to'] = notAfter
        now = datetime.now()
        cert['isValid'] = (notBefore <= now and now <= notAfter)
        cert['info'] = d

        return render_template('cert-show.html',
                               cert=cert, title=cert['name'])
    def __init__(self, event):
        fields = event['message'].split()
        self.version = int(fields[0])
        self.account_id = fields[1]
        self.interface_id = fields[2]
        self.start = datetime.utcfromtimestamp(int(fields[10]))
        self.end = datetime.utcfromtimestamp(int(fields[11]))

        self.log_status = fields[13]
        if self.log_status in (NODATA, SKIPDATA):
            self.srcaddr = None
            self.dstaddr = None
            self.srcport = None
            self.dstport = None
            self.protocol = None
            self.packets = None
            self.bytes = None
            self.action = None
        else:
            self.srcaddr = fields[3]
            self.dstaddr = fields[4]
            self.srcport = int(fields[5])
            self.dstport = int(fields[6])
            self.protocol = int(fields[7])
            self.packets = int(fields[8])
            self.bytes = int(fields[9])
            self.action = fields[12]
Example #29
0
def fcdragmodify(request):
  """
  When the user drags and drops or resizes an event in FullCalendar, we can do some simple modify
  operations here without using the full edit form
  """
  if not request.user.has_perms(event_manager_perms): raise error_insufficient_perms

  # Lookup the event
  e = Event.objects.get(id=int(request.GET["id"]))
  newallday = True if (request.GET["allday"]=='true') else False
  newstart = datetime.utcfromtimestamp(float(request.GET["newstart"]))
  newend = request.GET["newend"]

  # Modify the event
  if (newend!='null'):
    # If end time is provided by fullcalendar, we will use it
    e.duration = datetime.utcfromtimestamp(float(newend)) - newstart
  else:
    if ((e.all_day == True) and (newallday == False)):
      # If user just dragged the event from the all-day section to the hourly section,
      # fullcalendar will show the event as [defaultTimedEventDuration] long, regardless of what
      # it was before, so force the database to match
      e.duration = timedelta(hours=get_default_event_hours())
    elif ((e.all_day == False) and (newallday == True)):
      # If user just dragged the event from the hourly section to the all-day section,
      # fullcalendar will show the event on only one day, even if it was a multi-day event before.
      # So force the database to match this behavior.
      e.duration = timedelta(days=get_default_event_days())
  e.start = newstart
  e.all_day = newallday
  e.save()
  return HttpResponse('ok')
Example #30
0
def _parse_status(bug_el):
    """Return a bugreport object from a given status xml element"""
    bug = Bugreport()

    # plain fields
    for field in ('originator', 'subject', 'msgid', 'package', 'severity',
                  'owner', 'summary', 'location', 'source', 'pending',
                  'forwarded'):
        setattr(bug, field, _parse_string_el(bug_el(field)))

    bug.date = datetime.utcfromtimestamp(float(bug_el('date')))
    bug.log_modified = datetime.utcfromtimestamp(float(bug_el('log_modified')))
    bug.tags = [_uc(tag) for tag in str(bug_el('tags')).split()]
    bug.done = _parse_bool(bug_el('done'))
    bug.archived = _parse_bool(bug_el('archived'))
    bug.unarchived = _parse_bool(bug_el('unarchived'))
    bug.bug_num = int(bug_el('bug_num'))
    bug.mergedwith = [int(i) for i in str(bug_el('mergedwith')).split()]
    bug.blockedby = [int(i) for i in str(bug_el('blockedby')).split()]
    bug.blocks = [int(i) for i in str(bug_el('blocks')).split()]

    bug.found_versions = [_uc(str(el)) for el in
                          bug_el('found_versions').children() or []]
    bug.fixed_versions = [_uc(str(el)) for el in
                          bug_el('fixed_versions').children() or []]
    affects = [_f for _f in str(bug_el('affects')).split(',') if _f]
    bug.affects = [_uc(a).strip() for a in affects]
    # Also available, but unused or broken
    # bug.keywords = [_uc(keyword) for keyword in
    #                 str(bug_el('keywords')).split()]
    # bug.fixed = _parse_crappy_soap(tmp, "fixed")
    # bug.found = _parse_crappy_soap(tmp, "found")
    # bug.found_date = [datetime.utcfromtimestamp(i) for i in tmp["found_date"]]
    # bug.fixed_date = [datetime.utcfromtimestamp(i) for i in tmp["fixed_date"]]
    return bug
Example #31
0
    def raids(self, origin: str, map_proto: dict, mitm_mapper):
        """
        Update/Insert raids from a map_proto dict
        """
        origin_logger = get_origin_logger(logger, origin=origin)
        origin_logger.debug3(
            "DbPogoProtoSubmit::raids called with data received")
        cells = map_proto.get("cells", None)
        if cells is None:
            return False
        raid_args = []
        now = datetime.utcfromtimestamp(
            time.time()).strftime("%Y-%m-%d %H:%M:%S")

        query_raid = (
            "INSERT INTO raid (gym_id, level, spawn, start, end, pokemon_id, cp, move_1, move_2, last_scanned, form, "
            "is_exclusive, gender, costume) "
            "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) "
            "ON DUPLICATE KEY UPDATE level=VALUES(level), spawn=VALUES(spawn), start=VALUES(start), "
            "end=VALUES(end), pokemon_id=VALUES(pokemon_id), cp=VALUES(cp), move_1=VALUES(move_1), "
            "move_2=VALUES(move_2), last_scanned=VALUES(last_scanned), is_exclusive=VALUES(is_exclusive), "
            "form=VALUES(form), gender=VALUES(gender), costume=VALUES(costume)"
        )

        for cell in cells:
            for gym in cell["forts"]:
                if gym["type"] == 0 and gym["gym_details"]["has_raid"]:
                    gym_has_raid = gym["gym_details"]["raid_info"][
                        "has_pokemon"]
                    if gym_has_raid:
                        pokemon_id = gym["gym_details"]["raid_info"][
                            "raid_pokemon"]["id"]
                        cp = gym["gym_details"]["raid_info"]["raid_pokemon"][
                            "cp"]
                        move_1 = gym["gym_details"]["raid_info"][
                            "raid_pokemon"]["move_1"]
                        move_2 = gym["gym_details"]["raid_info"][
                            "raid_pokemon"]["move_2"]
                        form = gym["gym_details"]["raid_info"]["raid_pokemon"][
                            "display"]["form_value"]
                        gender = gym["gym_details"]["raid_info"][
                            "raid_pokemon"]["display"]["gender_value"]
                        costume = gym["gym_details"]["raid_info"][
                            "raid_pokemon"]["display"]["costume_value"]
                    else:
                        pokemon_id = None
                        cp = 0
                        move_1 = 1
                        move_2 = 2
                        form = None
                        gender = None
                        costume = None

                    raid_end_sec = int(
                        gym["gym_details"]["raid_info"]["raid_end"] / 1000)
                    raid_spawn_sec = int(
                        gym["gym_details"]["raid_info"]["raid_spawn"] / 1000)
                    raid_battle_sec = int(
                        gym["gym_details"]["raid_info"]["raid_battle"] / 1000)

                    raidend_date = datetime.utcfromtimestamp(
                        float(raid_end_sec)).strftime("%Y-%m-%d %H:%M:%S")
                    raidspawn_date = datetime.utcfromtimestamp(
                        float(raid_spawn_sec)).strftime("%Y-%m-%d %H:%M:%S")
                    raidstart_date = datetime.utcfromtimestamp(
                        float(raid_battle_sec)).strftime("%Y-%m-%d %H:%M:%S")

                    is_exclusive = gym["gym_details"]["raid_info"][
                        "is_exclusive"]
                    level = gym["gym_details"]["raid_info"]["level"]
                    gymid = gym["id"]

                    mitm_mapper.collect_raid_stats(origin, gymid)

                    origin_logger.debug3(
                        "Adding/Updating gym {} with level {} ending at {}",
                        gymid, level, raidend_date)

                    raid_args.append(
                        (gymid, level, raidspawn_date, raidstart_date,
                         raidend_date, pokemon_id, cp, move_1, move_2, now,
                         form, is_exclusive, gender, costume))
        self._db_exec.executemany(query_raid, raid_args, commit=True)
        origin_logger.debug3(
            "DbPogoProtoSubmit::raids: Done submitting raids with data received"
        )
        return True
Example #32
0
class TestString(TestCase):
    from datetime import datetime

    __metaclass__ = TypeCaseMeta

    type_class = libyate.type.String
    values = (('', None, ''), ('a', 'a', 'a'), (0, '0', '0'), (None, None, ''),
              (True, 'true', 'true'), (u'a', u'a', u'a'),
              (datetime.utcfromtimestamp(1095112796), '1095112796',
               '1095112796'))

    def test_raises(self):
        # noinspection PyDocstring
        class C(object):
            __metaclass__ = libyate.type.DescriptorMeta

            attr = self.type_class()

        o = C()

        self.assertRaises(ValueError, setattr, o, 'attr', '')
        self.assertRaises(TypeError, setattr, o, 'attr', ('a', ))

    def test_length(self):
        # noinspection PyDocstring
        class C(object):
            __metaclass__ = libyate.type.DescriptorMeta

            attr = libyate.type.String(length=3)

        o = C()

        setattr(o, 'attr', 'abc')
        self.assertRaises(ValueError, setattr, o, 'attr', 'ab')
        self.assertRaises(ValueError, setattr, o, 'attr', 'abcd')

    def test_max_length(self):
        # noinspection PyDocstring
        class C(object):
            __metaclass__ = libyate.type.DescriptorMeta

            attr = libyate.type.String(max_length=3)

        o = C()

        setattr(o, 'attr', 'ab')
        setattr(o, 'attr', 'abc')
        self.assertRaises(ValueError, setattr, o, 'attr', 'abcd')

    def test_min_length(self):
        # noinspection PyDocstring
        class C(object):
            __metaclass__ = libyate.type.DescriptorMeta

            attr = libyate.type.String(min_length=3)

        o = C()

        setattr(o, 'attr', 'abc')
        setattr(o, 'attr', 'abcd')
        self.assertRaises(ValueError, setattr, o, 'attr', 'ab')

    def test_max_min_length(self):
        # noinspection PyDocstring
        class C(object):
            __metaclass__ = libyate.type.DescriptorMeta

            attr = libyate.type.String(min_length=2, max_length=4)

        o = C()

        setattr(o, 'attr', 'ab')
        setattr(o, 'attr', 'abc')
        setattr(o, 'attr', 'abcd')
        self.assertRaises(ValueError, setattr, o, 'attr', 'a')
        self.assertRaises(ValueError, setattr, o, 'attr', 'abcde')
Example #33
0
def timestamp(bytestring):
    '''
    for displaying 32-bit number as UTC time
    '''
    return datetime.utcfromtimestamp(to_long(bytestring)).isoformat()
Example #34
0
def update_checking(file, prefix_dict, as_dict):
    changed_flag = 0
    moas_change_count = 0
    new_moas_count = 0

    with open(file) as f:
        for line in f:
            fields = line.strip().split('|')
            timestamp, flag, vp, prefix = fields[1], fields[2], fields[4], fields[5]
            ori_as = ''
            if flag == 'W':
                if prefix in prefix_dict.keys():
                    if vp in prefix_dict[prefix].keys():
                        del prefix_dict[prefix][vp]
                        changed_flag = 1
            else:
                as_path = fields[6]
                if prefix in prefix_dict.keys():
                    if vp in prefix_dict[prefix].keys():
                        opath = prefix_dict[prefix][vp]
                        ori_as = opath.split(' ')[-1]
                        if opath != as_path:
                            prefix_dict[prefix][vp] = as_path
                            changed_flag = 1
                    else:
                        prefix_dict[prefix].setdefault(vp, as_path)
                        changed_flag = 1
                else:
                    prefix_dict.setdefault(prefix, dict())
                    prefix_dict[prefix].setdefault(vp, as_path)
                    prefix_dict[prefix].setdefault('ismoasnow', False)
                    prefix_dict[prefix].setdefault('moaseventid', 0)
                    prefix_dict[prefix].setdefault('firsttime', timestamp)
                    prefix_dict[prefix].setdefault('starttime', '0')
                    prefix_dict[prefix].setdefault('endtime', '0')
                    changed_flag = 1

            if changed_flag == 1:

                moasset = set()
                for ovp in prefix_dict[prefix].keys():
                    if ovp not in ['ismoasnow', 'moaseventid', 'firsttime', 'starttime', 'endtime']:
                        vppath = prefix_dict[prefix][ovp]
                        vppath_fields = vppath.split(' ')
                        oasn = vppath_fields[-1]
                        moasset.add(oasn)
                moasnum = len(moasset)

                if moasnum > 1:
                    if prefix_dict[prefix]['ismoasnow']:
                        eventid = prefix_dict[prefix]['moaseventid']
                        if prefix in Hijack_events_dict.keys():
                            if eventid in Hijack_events_dict[prefix].keys():
                                if Hijack_events_dict[prefix][eventid]['moas_set'].isdisjoint(moasset):
                                    Hijack_events_dict[prefix][eventid]['moas_set'] = \
                                    Hijack_events_dict[prefix][eventid]['moas_set'] | moasset

                    else:
                        prefix_dict[prefix]['moaseventid'] = prefix_dict[prefix]['moaseventid'] + 1
                        prefix_dict[prefix]['ismoasnow'] = True
                        prefix_dict[prefix]['starttime'] = timestamp

                        if '{' not in ori_as:
                            if is_event(ori_as, moasset, as_dict):
                                Hijack_events_dict.setdefault(prefix, dict())

                                event_dict = dict()
                                event_dict['prefix'] = prefix
                                event_dict['starttime'] = datetime.utcfromtimestamp(int(timestamp)).strftime(
                                    "%Y-%m-%d %H:%M:%S")
                                event_dict['ori_as'] = ori_as

                                event_dict['moas_set'] = moasset
                                Hijack_events_dict[prefix].setdefault(prefix_dict[prefix]['moaseventid'], event_dict)
                                write_begin_file(event_dict, as_dict)

                else:
                    if prefix_dict[prefix]['ismoasnow']:
                        prefix_dict[prefix]['endtime'] = timestamp
                        duration = int(prefix_dict[prefix]['endtime']) - int(prefix_dict[prefix]['starttime'])
                        eventid = prefix_dict[prefix]['moaseventid']
                        if prefix in Hijack_events_dict.keys():
                            if eventid in Hijack_events_dict[prefix].keys():
                                Hijack_events_dict[prefix][eventid]['endtime'] = datetime.utcfromtimestamp(
                                    int(timestamp)).strftime("%Y-%m-%d %H:%M:%S")
                                Hijack_events_dict[prefix][eventid]['duration'] = duration
                                end_as = list(moasset)[0]
                                Hijack_events_dict[prefix][eventid]['end_as'] = moasset

                                write_event_file(Hijack_events_dict[prefix][eventid], as_dict)

                    prefix_dict[prefix]['ismoasnow'] = False
                    prefix_dict[prefix]['starttime'] = '0'
                    prefix_dict[prefix]['endtime'] = '0'
                changed_flag = 0
Example #35
0
def datetime_zero_to_local(zero_datetime):
    now = time.time()
    offset = datetime.fromtimestamp(now) - datetime.utcfromtimestamp(now)
    return zero_datetime + offset
Example #36
0
from collections import defaultdict
from datetime import datetime, timedelta

import dns.message
import dns.query
import dns.rcode

# ISO datetime format without msec
fmt = '%Y-%m-%dT%H:%M:%SZ'

# The constants were taken from BIND 9 source code (lib/dns/zone.c)
max_refresh = timedelta(seconds=2419200)  # 4 weeks
max_expires = timedelta(seconds=14515200)  # 24 weeks
now = datetime.utcnow().replace(microsecond=0)
dayzero = datetime.utcfromtimestamp(0).replace(microsecond=0)


TIMEOUT = 10


# Generic helper functions
def check_expires(expires, min_time, max_time):
    assert expires >= min_time
    assert expires <= max_time


def check_refresh(refresh, min_time, max_time):
    assert refresh >= min_time
    assert refresh <= max_time
Example #37
0
    def _extract_args_single_stop(self, stop_data):
        if stop_data["type"] != 1:
            logger.warning("{} is not a pokestop", stop_data)
            return None

        now = datetime.utcfromtimestamp(
            time.time()).strftime("%Y-%m-%d %H:%M:%S")
        last_modified = datetime.utcfromtimestamp(
            stop_data["last_modified_timestamp_ms"] /
            1000).strftime("%Y-%m-%d %H:%M:%S")
        lure = "1970-01-01 00:00:00"
        active_fort_modifier = None
        incident_start = None
        incident_expiration = None
        incident_grunt_type = None

        if len(stop_data["active_fort_modifier"]) > 0:
            # get current lure duration
            sql = "select `event_lure_duration` " \
                  "from trs_event " \
                  "where now() between `event_start` and `event_end` and `event_name`<>'DEFAULT'"
            found = self._db_exec.execute(sql)
            if found and len(found) > 0 and found[0][0]:
                lure_duration = int(found[0][0])
            else:
                lure_duration = int(30)

            active_fort_modifier = stop_data["active_fort_modifier"][0]
            lure = datetime.utcfromtimestamp(
                lure_duration * 60 + (stop_data["last_modified_timestamp_ms"] /
                                      1000)).strftime("%Y-%m-%d %H:%M:%S")

        if "pokestop_displays" in stop_data \
                and len(stop_data["pokestop_displays"]) > 0 \
                and stop_data["pokestop_displays"][0]["character_display"] is not None \
                and stop_data["pokestop_displays"][0]["character_display"]["character"] > 1:
            start_ms = stop_data["pokestop_displays"][0]["incident_start_ms"]
            expiration_ms = stop_data["pokestop_displays"][0][
                "incident_expiration_ms"]
            incident_grunt_type = stop_data["pokestop_displays"][0][
                "character_display"]["character"]

            if start_ms > 0:
                incident_start = datetime.utcfromtimestamp(
                    start_ms / 1000).strftime("%Y-%m-%d %H:%M:%S")

            if expiration_ms > 0:
                incident_expiration = datetime.utcfromtimestamp(
                    expiration_ms / 1000).strftime("%Y-%m-%d %H:%M:%S")
        elif "pokestop_display" in stop_data:
            start_ms = stop_data["pokestop_display"]["incident_start_ms"]
            expiration_ms = stop_data["pokestop_display"][
                "incident_expiration_ms"]
            incident_grunt_type = stop_data["pokestop_display"][
                "character_display"]["character"]

            if start_ms > 0:
                incident_start = datetime.utcfromtimestamp(
                    start_ms / 1000).strftime("%Y-%m-%d %H:%M:%S")

            if expiration_ms > 0:
                incident_expiration = datetime.utcfromtimestamp(
                    expiration_ms / 1000).strftime("%Y-%m-%d %H:%M:%S")

        return (stop_data["id"], 1, stop_data["latitude"],
                stop_data["longitude"], last_modified, lure, now,
                active_fort_modifier, incident_start, incident_expiration,
                incident_grunt_type)
Example #38
0
    def mons(self, origin: str, map_proto: dict, mitm_mapper):
        """
        Update/Insert mons from a map_proto dict
        """
        origin_logger = get_origin_logger(logger, origin=origin)
        origin_logger.debug3(
            "DbPogoProtoSubmit::mons called with data received")
        cells = map_proto.get("cells", None)
        if cells is None:
            return False

        query_mons = (
            "INSERT INTO pokemon (encounter_id, spawnpoint_id, pokemon_id, latitude, longitude, disappear_time, "
            "individual_attack, individual_defense, individual_stamina, move_1, move_2, cp, cp_multiplier, "
            "weight, height, gender, catch_prob_1, catch_prob_2, catch_prob_3, rating_attack, rating_defense, "
            "weather_boosted_condition, last_modified, costume, form) "
            "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, "
            "%s, %s, %s, %s, %s) "
            "ON DUPLICATE KEY UPDATE last_modified=VALUES(last_modified), disappear_time=VALUES(disappear_time)"
        )

        mon_args = []
        for cell in cells:
            for wild_mon in cell["wild_pokemon"]:
                spawnid = int(str(wild_mon["spawnpoint_id"]), 16)
                lat = wild_mon["latitude"]
                lon = wild_mon["longitude"]
                mon_id = wild_mon["pokemon_data"]["id"]
                encounter_id = wild_mon["encounter_id"]

                if encounter_id < 0:
                    encounter_id = encounter_id + 2**64

                mitm_mapper.collect_mon_stats(origin, str(encounter_id))

                now = datetime.utcfromtimestamp(
                    time.time()).strftime("%Y-%m-%d %H:%M:%S")

                # get known spawn end time and feed into despawn time calculation
                getdetspawntime = self._get_detected_endtime(str(spawnid))
                despawn_time_unix = gen_despawn_timestamp(getdetspawntime)
                despawn_time = datetime.utcfromtimestamp(
                    despawn_time_unix).strftime("%Y-%m-%d %H:%M:%S")

                if getdetspawntime is None:
                    origin_logger.debug3(
                        "adding mon (#{}) at {}, {}. Despawns at {} (init) ({})",
                        mon_id, lat, lon, despawn_time, spawnid)
                else:
                    origin_logger.debug3(
                        "adding mon (#{}) at {}, {}. Despawns at {} (non-init) ({})",
                        mon_id, lat, lon, despawn_time, spawnid)

                mon_args.append((
                    encounter_id,
                    spawnid,
                    mon_id,
                    lat,
                    lon,
                    despawn_time,
                    # TODO: consider .get("XXX", None)  # noqa: E800
                    None,
                    None,
                    None,
                    None,
                    None,
                    None,
                    None,
                    None,
                    None,
                    wild_mon["pokemon_data"]["display"]["gender_value"],
                    None,
                    None,
                    None,
                    None,
                    None,
                    wild_mon["pokemon_data"]["display"]
                    ["weather_boosted_value"],
                    now,
                    wild_mon["pokemon_data"]["display"]["costume_value"],
                    wild_mon["pokemon_data"]["display"]["form_value"]))

        self._db_exec.executemany(query_mons, mon_args, commit=True)
        return True
Example #39
0
def get_time_offset():
    timestamp = time.time()
    date_utc = datetime.utcfromtimestamp(timestamp)
    date_local = datetime.fromtimestamp(timestamp)
    date_delta = date_local - date_utc
    return date_delta
Example #40
0
    def gyms(self, origin: str, map_proto: dict):
        """
        Update/Insert gyms from a map_proto dict
        """
        origin_logger = get_origin_logger(logger, origin=origin)
        origin_logger.debug3(
            "DbPogoProtoSubmit::gyms called with data received from")
        cells = map_proto.get("cells", None)
        if cells is None:
            return False
        gym_args = []
        gym_details_args = []
        now = datetime.utcfromtimestamp(
            time.time()).strftime("%Y-%m-%d %H:%M:%S")

        query_gym = (
            "INSERT INTO gym (gym_id, team_id, guard_pokemon_id, slots_available, enabled, latitude, longitude, "
            "total_cp, is_in_battle, last_modified, last_scanned, is_ex_raid_eligible) "
            "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) "
            "ON DUPLICATE KEY UPDATE "
            "guard_pokemon_id=VALUES(guard_pokemon_id), team_id=VALUES(team_id), "
            "slots_available=VALUES(slots_available), last_scanned=VALUES(last_scanned), "
            "last_modified=VALUES(last_modified), latitude=VALUES(latitude), longitude=VALUES(longitude), "
            "is_ex_raid_eligible=VALUES(is_ex_raid_eligible)")
        query_gym_details = (
            "INSERT INTO gymdetails (gym_id, name, url, last_scanned) "
            "VALUES (%s, %s, %s, %s) "
            "ON DUPLICATE KEY UPDATE last_scanned=VALUES(last_scanned), "
            "url=IF(VALUES(url) IS NOT NULL AND VALUES(url) <> '', VALUES(url), url)"
        )

        for cell in cells:
            for gym in cell["forts"]:
                if gym["type"] == 0:
                    guard_pokemon_id = gym["gym_details"]["guard_pokemon"]
                    gymid = gym["id"]
                    team_id = gym["gym_details"]["owned_by_team"]
                    latitude = gym["latitude"]
                    longitude = gym["longitude"]
                    slots_available = gym["gym_details"]["slots_available"]
                    last_modified_ts = gym["last_modified_timestamp_ms"] / 1000
                    last_modified = datetime.utcfromtimestamp(
                        last_modified_ts).strftime("%Y-%m-%d %H:%M:%S")
                    is_ex_raid_eligible = gym["gym_details"][
                        "is_ex_raid_eligible"]

                    gym_args.append((
                        gymid,
                        team_id,
                        guard_pokemon_id,
                        slots_available,
                        1,  # enabled
                        latitude,
                        longitude,
                        0,  # total CP
                        0,  # is_in_battle
                        last_modified,  # last_modified
                        now,  # last_scanned
                        is_ex_raid_eligible))

                    gym_details_args.append(
                        (gym["id"], "unknown", gym["image_url"], now))
        self._db_exec.executemany(query_gym, gym_args, commit=True)
        self._db_exec.executemany(query_gym_details,
                                  gym_details_args,
                                  commit=True)
        origin_logger.debug3("submit_gyms done")
        return True
Example #41
0
                                             body=query_data).execute()

        sogs = []
        timestamps = []
        lats = []
        lons = []

        print('Query Results:')
        if 'rows' in query_response:
            for row in query_response['rows']:
                #print row['f'][0]['v']
                lat = round(float(row['f'][0]['v']), 5)
                lon = round(float(row['f'][1]['v']), 5)
                sog = round(float(row['f'][3]['v']), 1)
                t = int(float(row['f'][2]['v']))
                timestamp = datetime.utcfromtimestamp(t)
                sogs.append(sog)
                lats.append(lat)
                lons.append(lon)
                timestamps.append(timestamp)
                #print('\t'.join(field['v'] for field in row['f']))

    except HttpError as err:
        print('Error: {}'.format(err.content))
        raise err

    for y in year_range:
        for m in quarter_range:
            m_lats = []
            m_lons = []
            m_sogs = []
Example #42
0
    def mon_iv(self, origin: str, timestamp: float, encounter_proto: dict,
               mitm_mapper):
        """
        Update/Insert a mon with IVs
        """
        origin_logger = get_origin_logger(logger, origin=origin)
        wild_pokemon = encounter_proto.get("wild_pokemon", None)
        if wild_pokemon is None or wild_pokemon.get(
                "encounter_id",
                0) == 0 or not str(wild_pokemon["spawnpoint_id"]):
            return

        origin_logger.debug3("Updating IV sent for encounter at {}", timestamp)

        now = datetime.utcfromtimestamp(
            time.time()).strftime("%Y-%m-%d %H:%M:%S")

        spawnid = int(str(wild_pokemon["spawnpoint_id"]), 16)

        getdetspawntime = self._get_detected_endtime(str(spawnid))
        despawn_time_unix = gen_despawn_timestamp(getdetspawntime)
        despawn_time = datetime.utcfromtimestamp(despawn_time_unix).strftime(
            "%Y-%m-%d %H:%M:%S")

        latitude = wild_pokemon.get("latitude")
        longitude = wild_pokemon.get("longitude")
        pokemon_data = wild_pokemon.get("pokemon_data")
        encounter_id = wild_pokemon["encounter_id"]
        shiny = wild_pokemon["pokemon_data"]["display"].get("is_shiny", 0)

        if encounter_id < 0:
            encounter_id = encounter_id + 2**64

        mitm_mapper.collect_mon_iv_stats(origin, encounter_id, int(shiny))

        if getdetspawntime is None:
            origin_logger.debug3(
                "updating IV mon #{} at {}, {}. Despawning at {} (init)",
                pokemon_data["id"], latitude, longitude, despawn_time)
        else:
            origin_logger.debug3(
                "updating IV mon #{} at {}, {}. Despawning at {} (non-init)",
                pokemon_data["id"], latitude, longitude, despawn_time)

        capture_probability = encounter_proto.get("capture_probability")
        capture_probability_list = capture_probability.get(
            "capture_probability_list")
        if capture_probability_list is not None:
            capture_probability_list = capture_probability_list.replace(
                "[", "").replace("]", "").split(",")

        pokemon_display = pokemon_data.get("display", {})

        # ditto detector
        if is_mon_ditto(origin_logger, pokemon_data):
            # mon must be a ditto :D
            mon_id = 132
            gender = 3
            move_1 = 242
            move_2 = 133
            form = 0
        else:
            mon_id = pokemon_data.get("id")
            gender = pokemon_display.get("gender_value", None)
            move_1 = pokemon_data.get("move_1")
            move_2 = pokemon_data.get("move_2")
            form = pokemon_display.get("form_value", None)

        query = (
            "INSERT INTO pokemon (encounter_id, spawnpoint_id, pokemon_id, latitude, longitude, disappear_time, "
            "individual_attack, individual_defense, individual_stamina, move_1, move_2, cp, cp_multiplier, "
            "weight, height, gender, catch_prob_1, catch_prob_2, catch_prob_3, rating_attack, rating_defense, "
            "weather_boosted_condition, last_modified, costume, form) "
            "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, "
            "%s, %s, %s, %s, %s) "
            "ON DUPLICATE KEY UPDATE last_modified=VALUES(last_modified), disappear_time=VALUES(disappear_time), "
            "individual_attack=VALUES(individual_attack), individual_defense=VALUES(individual_defense), "
            "individual_stamina=VALUES(individual_stamina), move_1=VALUES(move_1), move_2=VALUES(move_2), "
            "cp=VALUES(cp), cp_multiplier=VALUES(cp_multiplier), weight=VALUES(weight), height=VALUES(height), "
            "gender=VALUES(gender), catch_prob_1=VALUES(catch_prob_1), catch_prob_2=VALUES(catch_prob_2), "
            "catch_prob_3=VALUES(catch_prob_3), rating_attack=VALUES(rating_attack), "
            "rating_defense=VALUES(rating_defense), weather_boosted_condition=VALUES(weather_boosted_condition), "
            "costume=VALUES(costume), form=VALUES(form), pokemon_id=VALUES(pokemon_id)"
        )
        insert_values = (encounter_id, spawnid, mon_id, latitude, longitude,
                         despawn_time, pokemon_data.get("individual_attack"),
                         pokemon_data.get("individual_defense"),
                         pokemon_data.get("individual_stamina"), move_1,
                         move_2, pokemon_data.get("cp"),
                         pokemon_data.get("cp_multiplier"),
                         pokemon_data.get("weight"),
                         pokemon_data.get("height"), gender,
                         float(capture_probability_list[0]),
                         float(capture_probability_list[1]),
                         float(capture_probability_list[2]), None, None,
                         pokemon_display.get('weather_boosted_value',
                                             None), now,
                         pokemon_display.get("costume_value", None), form)

        self._db_exec.execute(query, insert_values, commit=True)
        origin_logger.debug3("Done updating mon in DB")
        return True
Example #43
0
                    '--input',
                    action="store",
                    help='Filepath to location history .json file.',
                    required=True)
parser.add_argument('-o',
                    '--output',
                    action="store",
                    help='Filepath to write output .csv file.',
                    required=True)

args = parser.parse_args()
file = open(args.input)
data = json.load(file)
locations = data['locations']
with open(args.output, 'w') as csvfile:
    filewriter = csv.writer(csvfile,
                            delimiter=',',
                            quotechar='|',
                            quoting=csv.QUOTE_MINIMAL)
    filewriter.writerow(['timestamp', 'latitude', 'longitude'])
    for l, location in enumerate(locations):
        timestamp = datetime.utcfromtimestamp(
            int(location['timestampMs']) / 1000)
        filewriter.writerow([
            timestamp,
            float(location['latitudeE7']) / math.pow(10, 7),
            float(location['longitudeE7']) / math.pow(10, 7)
        ])

print('Done! Wrote ' + str(len(locations)) + ' points.')
Example #44
0
def a():
    model = load('../models/SVM_EEG.joblib')
    data = pd.read_csv(cf.base_dir+cf.prepared_data_imagery_V)
    X = data.drop(['0'], axis=1)
    y = data[['0']]  # .values.ravel()
    X = np.c_[X]
    print(y.shape)

    # Feature Scaling
    StdScaler = StandardScaler()
    X_scaled = StdScaler.fit_transform(X)

    Y_bin = label_binarize(y, classes=[0, 1, 2])
    n_classes = y.shape[1]
    print()
    # X_Train, x_test, Y_Train, y_test = train_test_split(X_scaled, Y, test_size=0.99, random_state=0)

    print('Prediction started')
    start_time = time.time()  # Time counter
    print(" Started at ", datetime.utcfromtimestamp(int(time.time())).strftime('%Y-%m-%d %H:%M:%S'))
    pred = model.predict_proba(X_scaled)
    p = model.predict(X_scaled)
    precision = dict()
    recall = dict()
    average_precision = dict()
    for i in range(n_classes):
        precision[i], recall[i], _ = precision_recall_curve(Y_bin[:, i],
                                                            pred[:, i])
        average_precision[i] = average_precision_score(Y_bin[:, i], pred[:, i])

    # A "micro-average": quantifying score on all classes jointly
    precision["micro"], recall["micro"], _ = precision_recall_curve(Y_bin.ravel(),
                                                                    pred.ravel())
    average_precision["micro"] = average_precision_score(Y_bin, pred,
                                                         average="micro")
    print('Average precision score, micro-averaged over all classes: {0:0.2f}'
          .format(average_precision["micro"]))

    plt.figure()
    plt.step(recall['micro'], precision['micro'], color='b', alpha=0.2,
             where='post')
    plt.fill_between(recall["micro"], precision["micro"], alpha=0.2, color='b')

    plt.xlabel('Recall')
    plt.ylabel('Precision')
    plt.ylim([0.0, 1.05])
    plt.xlim([0.0, 1.0])
    plt.title(
        'Average precision score, micro-averaged over all classes: AP={0:0.2f}'.format(average_precision["micro"]))
    plt.savefig('../Plots/Avg_Prec_scoreSVM.png')

    # Confusion matrix
    cm = confusion_matrix(y, p, labels=['rest(0)', 'left', 'right'])
    names = (['rest(0)', 'left', 'right'])
    sns.heatmap(cm, square=True, annot=True, fmt='d', cbar=False,
                xticklabels=names, yticklabels=names)
    plt.xlabel('Truth')
    plt.ylabel('Predicted')
    plt.savefig('../Plots/Conf_Matrix_SVM.png')
    plt.show()
    print("Conf_matrix: ", cm)

    # Accuracy
    accu_percent = accuracy_score(y, p) * 100
    print("Accuracy obtained over the whole test set is %0.6f %% ." % (accu_percent))

    # Balanced Accuracy Score
    blnc = balanced_accuracy_score(y, p) * 100
    print("balanced_accuracy_score: %0.6f %% ." % (blnc))
Example #45
0
    def template(self, event, submission=None):
        """Make a new submission"""
        if self.status != "Started":
            return #no event going on, pretend nothing happened #noleeks
        if event.guild is None or event.channel.id != self.config.event_channel: #ignore users running this in the wrong channel, also prevents non hunters from submitting
            return

        help_message = "<@{}> It seems you're missing parts, the syntax for this command is `+submit <trello link> | <where this ticket should be moved to> | <why it should be moved there and/or new steps>`".format(event.author.id)
        if submission is None:
            #no params given, print help info
            event.msg.reply(help_message)
            return

        parts = submission.split("|")
        if len(parts) < 3:
            #missing things we need
            event.msg.reply(help_message)
            return
        if len(parts) > 3:
            #for some reason they used a | in their report, re-assemble it so we don't void things
            parts[2] = "|".join(parts[2:])

        link = parts[0]
        destination = parts[1]
        info = parts[2]

        #fetch the trello info and validate
        trello_info = TrelloUtils.getCardInfo(event, link)
        error = None
        if trello_info is False:
            #wrong type of link, user already informed, we're done here
            return
        if trello_info is None:
            #no info, non existant card or from a private board
            error = "<@{}> Unable to fetch info about that card, are you sure it exists? Cause I don't feel like playing hide and seek.".format(event.author.id)
        elif trello_info["idBoard"] not in self.config.boards.keys():
            #not a discord board
            error = "This card is not from one of the discord bug boards, what do you expect me to do with this?"
        elif trello_info['id'] in self.reported_cards.keys():
            #already reported
            report = self.reported_cards[trello_info['id']]
            #hit by sniper?
            timediv = datetime.utcnow() - datetime.utcfromtimestamp(report["report_time"])
            hours, remainder = divmod(int(timediv.total_seconds()), 3600)
            minutes, seconds = divmod(remainder, 60)
            error = "<@{}> Looks like {} beat you to the punch. Better luck next time {}".format(event.author.id, self.participants[str(report["author_id"])], "SNIPED!" if minutes < 2 else "<:dupebutton:341981924010491904>")
        if error is None:
            #all good so far
            board = self.config.boards[trello_info["idBoard"]]
            listname = TrelloUtils.getListInfo(trello_info["idList"])["name"]
            if trello_info["idList"] not in board["lists"]:
                #this list is not valid for this event
                error = "<@{}> This card is in the {} list instead of an event list, thanks for the submission but no thanks.".format(event.author.id, listname)
            elif trello_info["closed"] is True:
                #archived card
                error = "<@{}> _cough cough_ that card has been archived and collected way to much dust for me to do anything with it".format(event.author.id)

        if error is not None:
            #card failed one of the checks, inform user and terminate processing
            event.msg.reply(error)
            return
        else:
            # valid submission, processing...

            message = """
**Board**: {} {}
**Source list**:  {}
**Destination**: {}
**Submitted by**: {}
**Detailed info**: {}
**Trello link**: {}""".format(board["name"], board["emoji"], listname, destination, str(event.author), info,
                                          trello_info["shortUrl"])
            #sanitze the entire thing, no pinging or breaking codeblocks
            message = sanitize.S(message, escape_codeblocks=True)
            if len(message) > 2000:
                #discord only accepts essays up to 2000 characters
                event.msg.reply("<@{}> Sorry, but that report is too long for me to process, would mind removing {} characters? Then everything should be fine again.".format(event.author.id, len(message) - 2000))
                return
            #send the submission and clean input
            dmessage = event.msg.reply(message)
            event.msg.delete()
            # add to tracking
            self.reported_cards[trello_info['id']] = dict(
                author_id= str(event.author.id),
                board= trello_info["idBoard"],
                list= trello_info["idList"],
                message_id= dmessage.id,
                status= "Submitted",
                report_time = datetime.utcnow().timestamp()
        )

        if not str(event.author.id) in self.participants.keys():
            #this person has not submitted anything yet, special message
            self.participants[str(event.author.id)] = str(event.author)
            event.msg.reply("<@{}> Achievement get! Successfully submitted your first event entry :tada:".format(event.author.id))
        else:
            event.msg.reply("<@{}> Thanks for your submission!".format(event.author.id))

        self.botlog(event, ":inbox_tray: {} has submitted <https://trello.com/c/{}>".format(str(event.author), trello_info['shortLink']))
        self.save_event_stats()
Example #46
0
 def format_timestamp(cls, time):
     tstamp = datetime.utcfromtimestamp(time)
     return tstamp.strftime(
         "%Y-%m-%dT%H:%M:%S") + ".%03d" % (tstamp.microsecond / 1000) + "Z"
Example #47
0
from .api import call


class StateNameError(BaseException):
    pass


class StateInitializationError(BaseException):
    pass


#############################
# GET from Facebook API
#############################

epoch = datetime.utcfromtimestamp(0)


def unix_time_millis(dt):
    return (dt - epoch).total_seconds() * 1000.0


@dataclass
class BudgetWindow:
    start_date: datetime
    until_date: datetime

    @property
    def start(self):
        return self.start_date.strftime("%Y-%m-%d")
Example #48
0
print('ok')

def name_of_email(addr):
	m = re.match(r'^\<?([a-zA-Z\s]+)>?.*@.+$', addr) 
	return m.group(1)
assert name_of_email('<Tom Paris> [email protected]') == 'Tom Paris'
assert name_of_email('*****@*****.**') == 'tom'
print('ok')

from datetime import datetime
now = datetime.now()
print(now)

dt = datetime(2015,4,19,13,59,23)
dtt = dt.timestamp()
ddt = datetime.utcfromtimestamp(dtt)
print(ddt)

cday = datetime.strptime('2015-6-1 18:19:59', '%Y-%m-%d %H:%M:%S')
print(cday)

from datetime import datetime, timezone, timedelta

def to_timestamp(dt_str, tz_str):
# def to_timestamp(dt_str, tz_str):
	#strč½¬ę¢äøŗdatetime
    date = datetime.strptime(dt_str, '%Y-%m-%d %H:%M:%S')
    #å–ę—¶åŒŗ
    tz = tz_str.split('UTC')[1]
    tz = tz.split(':')[0]
    # å¼ŗåˆ¶č½¬ę¢ę—¶åŒŗ
Example #49
0
def test_looker_ingest_allow_pattern(pytestconfig, tmp_path, mock_time):
    mocked_client = mock.MagicMock()
    with mock.patch(
        "datahub.ingestion.source.looker.LookerDashboardSource._get_looker_client",
        mocked_client,
    ):
        mocked_client.return_value.all_dashboards.return_value = [Dashboard(id="1")]
        mocked_client.return_value.dashboard.return_value = Dashboard(
            id="1",
            title="foo",
            created_at=datetime.utcfromtimestamp(time.time()),
            description="lorem ipsum",
            dashboard_elements=[
                DashboardElement(
                    id="2",
                    type="",
                    subtitle_text="Some text",
                    query=Query(
                        model="data",
                        view="my_view",
                        dynamic_fields='[{"table_calculation":"calc","label":"foobar","expression":"offset(${my_table.value},1)","value_format":null,"value_format_name":"eur","_kind_hint":"measure","_type_hint":"number"}]',
                    ),
                ),
                DashboardElement(
                    id="10",
                    type="",
                    subtitle_text="Some other text",
                    query=Query(
                        model="bogus data",
                        view="my_view",
                        dynamic_fields='[{"table_calculation":"calc","label":"foobar","expression":"offset(${my_table.value},1)","value_format":null,"value_format_name":"eur","_kind_hint":"measure","_type_hint":"number"}]',
                    ),
                ),
            ],
        )

        test_resources_dir = pytestconfig.rootpath / "tests/integration/looker"

        pipeline = Pipeline.create(
            {
                "run_id": "looker-test",
                "source": {
                    "type": "looker",
                    "config": {
                        "base_url": "https://looker.company.com",
                        "client_id": "foo",
                        "client_secret": "bar",
                        "chart_pattern": {"allow": ["2"]},
                    },
                },
                "sink": {
                    "type": "file",
                    "config": {
                        "filename": f"{tmp_path}/looker_mces.json",
                    },
                },
            }
        )
        pipeline.run()
        pipeline.raise_from_status()

        mce_helpers.check_golden_file(
            pytestconfig,
            output_path=tmp_path / "looker_mces.json",
            golden_path=test_resources_dir / "expected_output.json",
        )
Example #50
0
    #     "humidity": int(data["main"]["humidity"]),
    #     "wind_speed": int(data["wind"]["speed"]),
    #     "wind_direction": int(data["wind"]["deg"]),
    #     "pressure": int(data["main"]["pressure"]),
    #     "weather_severity": weather_severity,
    #     "condition_main": data["weather"][0]["main"].lower(),
    #     "condition_detail": data["weather"][0]["description"],
    #     "weather_code": int(data["weather"][0]["id"]),
    #     "city_id": int(data["id"]),
    #     "city": data["name"],
    #     "region": "new york",
    #     "country": "usa"
    # }

    weather = {
        "timestamp": datetime.utcfromtimestamp(1429488000),
        "temp": 52,
        "high_temp": 56,
        "low_temp": 47,
        "humidity": 90,
        "wind_speed": 6,
        "wind_direction": 0,
        "pressure": 30,
        "weather_severity": 0,
        "condition_main": "fog",
        "condition_detail": "fog and drizzle",
        "weather_code": 0,
        "city_id": 5128581,
        "city": "new york",
        "region": "new york",
        "country": "usa"
Example #51
0
def int_to_iso8601(when):
    return datetime.utcfromtimestamp(when).isoformat()
Example #52
0
 def _get_timestamp(timestamp):
     if timestamp > 0:
         dt = datetime.utcfromtimestamp(timestamp)
         return dt.strftime("%Y-%m-%d %H:%M")
     else:
         return ''
Example #53
0
def tosecondstimestamp(dt):
    if isinstance(dt, datetime):
        return (dt - datetime.utcfromtimestamp(0)).total_seconds()
    elif isinstance(dt, np.datetime64):
        return dt.astype(int) / 1000000000.
    return dt
while has_more_comments:
    _ = API.getMediaComments(media_id, max_id=max_id)
    #comments' page come from older to newer, lets preserve desc order in full list
    for c in reversed(API.LastJson['comments']):
        comments.append(c)
    has_more_comments = API.LastJson.get('has_more_comments', False)
    #evaluate stop conditions
    if count and len(comments) >= count:
        comments = comments[:count]
        #stop loop
        has_more_comments = False
        print "stopped by count"
    if until_date:
        older_comment = comments[-1]
        dt = datetime.utcfromtimestamp(older_comment.get('created_at_utc', 0))
        #only check all records if the last is older than stop condition
        if dt.isoformat() <= until_date:
            #keep comments after until_date
            comments = [
                c for c in comments if datetime.utcfromtimestamp(
                    c.get('created_at_utc', 0)) > until_date
            ]
            #stop loop
            has_more_comments = False
            print "stopped by until_date"
    #next page
    if has_more_comments:
        max_id = API.LastJson.get('next_max_id', '')
        time.sleep(2)
Example #55
0
 def test_eventlet_schedule_now(self):
     scheduler = EventletScheduler(eventlet)
     hub = eventlet.hubs.get_hub()
     diff = scheduler.now - datetime.utcfromtimestamp(hub.clock())
     assert abs(diff) < timedelta(milliseconds=1)
Example #56
0
def split_timestamp_to_dt(sample):
    return np.datetime64(datetime.utcfromtimestamp(
        sample['secs'])) + np.timedelta64(sample['nano'], 'ns')
import requests
kota = 'surabaya'
apikey = '&appid=3ac69e0b0e0a02c5c49186345f0ba92a'
url = f'http://api.openweathermap.org/data/2.5/weather?q={kota}{apikey}'
data = requests.get(url)
print(data)
print(data.json())
data = data.json()
sunrise = data['sys']['sunrise']

from datetime import datetime
utc = datetime.utcfromtimestamp(int(sunrise))
print(utc)

from dateutil import tz
myzone = tz.gettz('Asia/Jakarta')
print(utc.astimezone(myzone))
Example #58
0
# Get Extents
extents = api.get_full_dataset_extent(platform = platform, product = product)


# <hr>  
# 
# ## <a id="extents">Determine the Extents of the Data</a>  [&#9652;](#top)

# In[3]:

dt = datetime.utcnow()
dt64 = np.datetime64(dt)

ts1 = (dt64 - np.datetime64(min(extents['time'].values))) / np.timedelta64(1, 's')
datetime.utcfromtimestamp(ts1)
ts2 = (dt64 - np.datetime64(max(extents['time'].values))) / np.timedelta64(1, 's')
datetime.utcfromtimestamp(ts2)
latitude_extents = (min(extents['latitude'].values),max(extents['latitude'].values))
longitude_extents = (min(extents['longitude'].values),max(extents['longitude'].values))
time_extents = (ts1, ts2)


# <hr>  
# 
# ## <a id="define_extents">Define the Region to Be Examined</a>  [&#9652;](#top)

# In[4]:


from utils.data_cube_utilities.dc_display_map import display_map
Example #59
0
    async def mirror_msg(self, message):
        author = message.author

        if author.bot:
            return

        if (await self.bot.get_context(message)).prefix is not None:
            return

        channel = message.channel
        mirrored_channels = self.settings.get_mirrored_channels(channel.id)

        if not mirrored_channels:
            return

        last_spoke, last_spoke_timestamp = self.settings.get_last_spoke(channel.id)
        now_time = datetime.utcnow()
        last_spoke_time = datetime.utcfromtimestamp(
            last_spoke_timestamp) if last_spoke_timestamp else now_time
        attribution_required = last_spoke != author.id
        attribution_required |= (now_time - last_spoke_time).total_seconds() > ATTRIBUTION_TIME_SECONDS
        self.settings.set_last_spoke(channel.id, author.id)

        attachment_bytes = None
        filename = None

        if message.attachments:
            # If we know we're copying a message and that message has an attachment,
            # pre download it and reuse it for every upload.
            attachment_bytes = [(BytesIO(await attachment.read()), attachment.filename)
                                for attachment in message.attachments
                                if hasattr(attachment, 'url') and hasattr(attachment, 'filename')]

        if await self.config.channel(message.channel).multiedit():
            await message.delete()
            idmess = await channel.send("Pending...")
            attachments = message.attachments
            try:
                message = await channel.send(message.content,
                                             files=[await a.to_file() for a in attachments])
            except discord.HTTPException:
                try:
                    message = await channel.send(content=message.content)
                    for a in attachments:
                        await channel.send(file=await a.to_file())
                except discord.HTTPException:
                    if message.content:
                        message = await channel.send(message.content)
                    await channel.send(
                        f"<{author.mention} File too large for this channel. Other attachments not shown>")

            await idmess.edit(content=str(message.id))

        for dest_channel_id in mirrored_channels:
            dest_channel = self.bot.get_channel(dest_channel_id)
            if not dest_channel:
                continue
            try:
                if attribution_required:
                    msg = self.makeheader(message, author)
                    await dest_channel.send(msg)

                fmessage = await self.mformat(message.content, message.channel, dest_channel)

                if attachment_bytes:
                    try:
                        [b.seek(0) for b, fn in attachment_bytes]
                        dest_message = await dest_channel.send(
                            files=[discord.File(b, fn) for b, fn in attachment_bytes],
                            content=fmessage)
                    except discord.HTTPException:
                        try:
                            [b.seek(0) for b, fn in attachment_bytes]
                            dest_message = await dest_channel.send(file=discord.File(*attachment_bytes[0]),
                                                                   content=fmessage)
                            for b, fn in attachment_bytes[1:]:
                                await dest_channel.send(file=discord.File(b, fn))
                        except discord.HTTPException:
                            dest_message = await dest_channel.send(fmessage)
                            await dest_channel.send("<File too large to attach>")
                elif message.content:
                    dest_message = await dest_channel.send(fmessage)
                else:
                    logger.warning('Failed to mirror message from {} no action to take'.format(channel.id))
                    continue

                self.settings.add_mirrored_message(
                    channel.id, message.id, dest_channel.id, dest_message.id)
            except discord.Forbidden:
                if dest_channel.guild.owner:
                    try:
                        notify = ("Hi, {1.guild.owner}!  This is an automated message from the Tsubaki team to let"
                                  " you know that your server, {1.guild.name}, has been configured to mirror"
                                  " messages from {0.name} (from {0.guild.name}) to {1.name}, but your channel"
                                  " doesn't give me manage message permissions!  Please do make sure to allow"
                                  " me permissions to send messages, embed links, and attach files!  It's also"
                                  " okay to turn off message mirroring from your channel.  If you need help, contact"
                                  " us via `{2}feedback`!"
                                  "").format(channel, dest_channel, (await self.bot.get_valid_prefixes())[0])

                        fctx = await self.bot.get_context(message)
                        fctx.send = dest_channel.guild.owner.send
                        fctx.history = dest_channel.guild.owner.history
                        await tsutils.send_repeated_consecutive_messages(fctx, notify)
                    except Exception:
                        logger.exception("Owner message failed.")
            except Exception as ex:
                logger.exception(
                    'Failed to mirror message from {} to {}: {}'.format(channel.id, dest_channel_id, str(ex)))

        if attachment_bytes:
            [b.close() for b, fn in attachment_bytes]
 def create():
     return xs.pipe(
         ops.skip_until_with_time(datetime.utcfromtimestamp(215)),
         ops.skip_until_with_time(datetime.utcfromtimestamp(230)),
     )