Example #1
0
    def init_skill_in_training(self):
        from django.utils.timezone import utc
        from eve.views import char_skill_training
        from datetime import datetime

        skill_train = char_skill_training(self.account.key_id, self.account.v_code, self.character_id)
        if skill_train.skillInTraining:
            CharacterSkillTraining.objects.create(
                char_name=self,
                skill_training=True,
                skill_type=Skill.objects.get(skill_id=skill_train.trainingTypeID),
                training_to_level=skill_train.trainingToLevel,
                training_start=datetime.utcfromtimestamp(skill_train.trainingStartTime).replace(tzinfo=utc),
                training_finish=datetime.utcfromtimestamp(skill_train.trainingEndTime).replace(tzinfo=utc),
                current_tq_time=datetime.utcfromtimestamp(skill_train.currentTQTime.data).replace(tzinfo=utc),
                start_sp=skill_train.trainingStartSP,
                destination_sp=skill_train.trainingDestinationSP,
            )
        else:
            CharacterSkillTraining.objects.create(
                char_name=self,
                skill_training=False,
                skill_type=None,
                training_to_level=None,
                training_start=None,
                training_finish=None,
                current_tq_time=None,
                start_sp=None,
                destination_sp=None,
            )
Example #2
0
    def update(self):
        from eve.views import char_skill_training
        from django.utils.timezone import utc
        from datetime import datetime

        training = char_skill_training(
            self.char_name.account.key_id, self.char_name.account.v_code, self.char_name.character_id
        )
        self.skill_training = training.skillInTraining
        if training.skillInTraining:
            self.skill_type = Skill.objects.get(skill_id=training.trainingTypeID)
            self.skill_training = True
            self.training_to_level = training.trainingToLevel
            self.training_start = datetime.utcfromtimestamp(training.trainingStartTime).replace(tzinfo=utc)
            self.training_finish = datetime.utcfromtimestamp(training.trainingEndTime).replace(tzinfo=utc)
            self.current_tq_time = datetime.utcfromtimestamp(training.currentTQTime.data).replace(tzinfo=utc)
            self.start_sp = training.trainingStartSP
            self.destination_sp = training.trainingDestinationSP
        else:
            self.skill_type = None
            self.skill_training = False
            self.training_to_level = None
            self.training_start = None
            self.training_finish = None
            self.current_tq_time = None
            self.start_sp = None
            self.destination_sp = None
        self.save()
Example #3
0
def rawtime_lite(stamp):
    rtime = []
    hours = datetime.utcfromtimestamp(int(stamp + 3 * 60 * 60)).strftime('%H')
    minutes = datetime.utcfromtimestamp(int(stamp)).strftime('%M')
    seconds = datetime.utcfromtimestamp(int(stamp)).strftime('%S')
    rtime.append(hours)
    rtime.append(minutes)
    rtime.append(seconds)
    return rtime
Example #4
0
def DatetimeFromValue(ts):
    ''' Utility for dealing with time
    '''
    if isinstance(ts, (int, long)):
        return datetime.utcfromtimestamp(ts)
    elif isinstance(ts, float):
        return datetime.utcfromtimestamp(ts)
    elif not isinstance(ts, datetime):
        raise ValueError('Unknown timestamp value')
    return ts
Example #5
0
 def device_firmware_str(self, indent):
     host_build_ns = self.host_firmware_build_timestamp
     host_build_s = datetime.utcfromtimestamp(host_build_ns/1000000000) if host_build_ns != None else None
     wifi_build_ns = self.wifi_firmware_build_timestamp
     wifi_build_s = datetime.utcfromtimestamp(wifi_build_ns/1000000000) if wifi_build_ns != None else None
     s = "Host Firmware Build Timestamp: {} ({} UTC)\n".format(host_build_ns, host_build_s)
     s += indent + "Host Firmware Build Version: {}\n".format(self.host_firmware_version)
     s += indent + "Wifi Firmware Build Timestamp: {} ({} UTC)\n".format(wifi_build_ns, wifi_build_s)
     s += indent + "Wifi Firmware Build Version: {}\n".format(self.wifi_firmware_version)
     return s
Example #6
0
    def createAddRemoveLists(self, dirs):
        ix = {}
        db_set = set()
        current_set = set()
        self.dbfiles = len(db_set)
        filelist = self.getRecursiveFilelist(dirs)
        if self.quit:
            return [],[]
        for path in filelist:
            try:
                current_set.add((path, datetime.utcfromtimestamp(os.path.getmtime(path))))
            except:
                logging.debug(u"Monitor: Failed To Access '{0}'".format(path))
                filelist.remove(path)
            
        logging.debug(u"Monitor: %d Files Found " % len(current_set))
        try:
            for comic_id, path, md_ts in self.library.getComicPaths():
                db_set.add((path, md_ts))
                ix[path] = comic_id
                if self.quit:
                    return [],[]
        except:
            logging.debug(u"Monitor: Failed To Access '{0}'".format(path))
        to_add = current_set - db_set
        to_remove = db_set - current_set
        logging.debug(u"Monitor: %d Files In Library " % len(db_set))
        logging.debug(u"Monitor: %d Files To Remove" % len(to_remove))
        logging.info(u"Monitor: %d Files To Scan" % len(to_add))


        return [r[0] for r in to_add], [ix[r[0]] for r in to_remove]
def XmmFromLogs(t):
    ydate = (datetime.today() - datetime.utcfromtimestamp(0)).days - 1
    filenames = next(os.walk(logsPath))[2]
    for x in filenames:
        ldate = x
        if ldate == str(ydate):
            fpath = x
        #
    #
    yET = json.load(open(ETPath + '/' + fpath))
    tET = [0] * len(yET)
    logs = json.load(open(logsPath + '/' + fpath))
    if debug != 0: print >>sys.stderr, "E: json load %s/%s (%s)" % (logsPath, fpath, logs)
    l = len(t['mmTime'])
    ydur = [-1] * l
    ymm = [-1] * l
    for x in logs:
        if int(x[0]) == pid:
            ydur[safe_int(x[1])] += safe_int(x[2])
        #
    #
    for x in range(l):
        if t['mmTime'][x]:
            ymm[x] = round(safe_float(yET[safe_int(t['crop'][x])]) - ydur[x] / safe_float(t['mmTime'][x]), 4) * -1
            tET[int(t['crop'][x])] = ymm[x]
        else:
            ymm[x] = 0
        #
    #
    return (ymm, tET)
Example #8
0
 def checkIfRemovedOrModified(self, comic, pathlist):
     remove = False
     
     def inFolderlist(filepath, pathlist):
         for p in pathlist:
             if p in filepath:
                 return True
         return False
     
     if not (os.path.exists(comic.path)):
         # file is missing, remove it from the comic table, add it to deleted table
         logging.debug(u"Removing missing {0}".format(comic.path))
         remove = True
     elif not inFolderlist(comic.path, pathlist):
         logging.debug(u"Removing unwanted {0}".format(comic.path))
         remove = True
     else:
         # file exists.  check the mod date.
         # if it's been modified, remove it, and it'll be re-added
         #curr = datetime.datetime.fromtimestamp(os.path.getmtime(comic.path))
         curr = datetime.utcfromtimestamp(os.path.getmtime(comic.path))
         prev = comic.mod_ts
         if curr != prev:
             logging.debug(u"Removed modifed {0}".format(comic.path))
             remove = True
        
     return remove
def main(argv):

    # Overhead to manage command line opts and config file
    p = getCmdLineParser()
    args = p.parse_args()
    cfg.read(args.config_file)

    # Get the logger going
    glogname = cfg.get('logging', 'logName')
    rightNow = time.strftime("%Y%m%d%H%M%S")
    logger = initLog(rightNow)
    logger.info('Starting Run: '+time.strftime("%Y%m%d%H%M%S")+'  =========================')

    startDate = datetime.strptime(cfg.get('coverage', 'startTime'), '%Y-%m-%d')
    t1 = unix_time_millis(startDate)
    qid = cfg.get('coverage', 'qid')
    table = cfg.get('coverage', 'table')

    client = RiakClient(host=cfg.get('riak', 'ip'), pb_port=int(cfg.get('riak', 'port')))
    epoch = datetime.utcfromtimestamp(0)

    counter = 0
    for pid in range(int(cfg.get('coverage', 'startID')), int(cfg.get('coverage', 'numIDs'))):
        for dayOffset in range(1, int(cfg.get('coverage', 'numDays'))+1):
            counter = counter +1
            t2 = unix_time_millis(startDate + timedelta(days=dayOffset))
            startTime = time.time()
            coverage = getCoverage(client, table, pid, qid, t1, t2)
            duration = round((time.time() - startTime),3)
            if (dayOffset-1)*2 == len(coverage["coverage"]):
                pass1 = "PASS!"
            else:
                pass1 = "FAIL!"
            results = "Test #{counter}: ID: {x}: total: {dur1}s, riak: {dur2}s, numdays: {numDays}, result: {result}".format(counter=counter, x=pid, dur1 = duration, dur2 = coverage["duration"], numDays = dayOffset, result = pass1)
            logger.info(results)
Example #10
0
def parse_unixtime(u):
    '''
    recognize unix time stamp e.g. 1294989360
    to a datetime object.
    '''
    from datetime import datetime
    return datetime.utcfromtimestamp(u)
Example #11
0
    def process_response(self, request, response):
        if not self.internal and request.facebook.session_key and request.facebook.uid:
            request.session["facebook_session_key"] = request.facebook.session_key
            request.session["facebook_user_id"] = request.facebook.uid

            if request.facebook.session_key_expires:
                expiry = datetime.datetime.fromtimestamp(request.facebook.session_key_expires)
                request.session.set_expiry(expiry)

        try:
            fb = request.facebook
        except:
            return response

        if not fb.is_session_from_cookie:
            # Make sure the browser accepts our session cookies inside an Iframe
            response["P3P"] = 'CP="NOI DSP COR NID ADMa OPTa OUR NOR"'
            fb_cookies = {"expires": fb.session_key_expires, "session_key": fb.session_key, "user": fb.uid}

            expire_time = None
            if fb.session_key_expires:
                expire_time = datetime.utcfromtimestamp(fb.session_key_expires)

            for k in fb_cookies:
                response.set_cookie(self.api_key + "_" + k, fb_cookies[k], expires=expire_time)
            response.set_cookie(self.api_key, fb._hash_args(fb_cookies), expires=expire_time)

        return response
    def _createQueryFrame(uri, primary_access_chain, expiry, expiry_delta,
                         elaborate_pac, unpack, auto_chain, routing_objects):
        seq_num = Frame.generateSequenceNumber()
        frame = Frame("quer", seq_num)
        frame.addKVPair("uri", uri)

        if primary_access_chain is not None:
            frame.addKVPair("primary_access_chain", primary_access_chain)

        if expiry is not None:
            expiry_time = datetime.utcfromtimestamp(expiry)
            frame.addKVPair("expiry", _utcToRfc3339(expiry_time))
        if expiry_delta is not None:
            frame.addKVPair("expirydelta", "{0}ms".format(expiry_delta))

        if elaborate_pac is not None:
            if elaborate_pac.lower() == "full":
                frame.addKVPair("elaborate_pac", "full")
            else:
                frame.addKVPair("elaborate_pac", "partial")

        if unpack:
            frame.addKVPair("unpack", "true")
        else:
            frame.addKVPair("unpack", "false")

        if auto_chain:
            frame.addKVPair("autochain", "true")

        if routing_objects is not None:
            frame.addRoutingObjects(routing_objects)

        return frame
Example #13
0
def set_attributes(items, type_dict):
    print "Clearing old tags."
    tags = get_full_taglist()
    for tag in tags:
        print "Killing %s" % tag
        tag.delete()
    NAME = 0
    DESC = 1
    CAT = 2
    for tag in tags_dict.values():
        category = 'object_%s' % tag[CAT].strip().lower()
        new_tag = Tag(db_key=tag[NAME], db_category=category, db_data=tag[DESC])
        new_tag.save()
        
    for item in items:
        obj = get_from_ref(item["db"])
        if obj:
            print "Setting attributes for " + item["name"] + ' (#'+ str(item['db']) +')'
            assign_attributes(obj, item["props"])
            if 'owner' in item:
                obj.db.owner = get_from_ref(item["owner"])
            else:
                # Must be a player. They own themselves.
                obj.db.owner = [get_from_ref(item["db"]),int(time.time())]
            obj.get_attribute_obj('owner').locks.add('attrread: perm(Immortals);attredit: perm(Immortals)')
            local_tz = pytz.timezone("America/Chicago") 
            utc_dt = datetime.utcfromtimestamp(item["createdtime"]).replace(tzinfo=pytz.utc)
            obj.dbobj.db_date_created = utc_dt

            if "home" in item:
                obj.home = get_from_ref(item["home"])
Example #14
0
def make_player(item):
    """Create a character/user/player"""
    permissions = ev.settings.PERMISSION_PLAYER_DEFAULT
    typeclass = ev.settings.BASE_PLAYER_TYPECLASS
    # Create the character.
    if item["db"] == 1:
        superuser = True
        email = ['*****@*****.**']
    else:
        superuser = False
        # Email is special.
        email = [ prop['propval'] for prop in item['props'] if prop['propname'] == '@/email']
    try:
        email = email[0]
        print email
        is_active = True
    except:
        email=None
        is_active = False
    player = ev.create_player(item["name"], email, "testpass",
                                typeclass=typeclass,
                                permissions=permissions,
				is_superuser = superuser)
    character = ev.create_object(typeclass=settings.BASE_CHARACTER_TYPECLASS, key=item["name"],
                                 permissions=permissions)
    local_tz = pytz.timezone("America/Chicago")
    utc_dt = datetime.utcfromtimestamp(item["createdtime"]).replace(tzinfo=pytz.utc)
    player.dbobj.db_date_created = utc_dt
    character.dbobj.db_date_created = utc_dt
    player.dbobj.is_active = is_active
    character.db.spirit = player
    player.db.avatar = character
    print character.name + " was born!"
    permissions_list = { "MAGE" : "PlayerHelpers", "WIZARD" :  "Wizards",
                        "ARCHWIZARD" : "Immortals", "BOY" : "Immortals" }
    for flag in item["flags"]:
        if flag in permissions_list:
	    # These properties are special. When we assign things to them, they
	    # trigger effects down the line. They're meant to be set and pulled
	    # from, but not modified directly, which is why we don't append to 
	    # the property. For more info, see
	    # http://docs.python.org/reference/datamodel.html#object.__setattr__
	    permission = character.permissions
	    permission.append(permissions_list[flag])
            character.permissions = permission
	    # The Player objects should have these permissions, too.
	    permission = player.permissions
	    permission.append(permissions_list[flag])
	    player.permissions = permission
            print character.name + " was added to group '" + permissions_list[flag] + "'."
    if "WIZARD" in item["flags"] or "ARCHWIZARD" in item["flags"] or "BOY" in item["flags"]:
        character.locks.add('delete:superuser()')
        player.is_staff = True
        player.save()
    if "STAFF" in item["flags"]:
        player.is_staff = True
        player.save()
    repassword_player(player, item["password"])
    character.save()
    player.save()
Example #15
0
    def process_response(self, request, response):
        if not self.internal and request.facebook.session_key and request.facebook.uid:
            request.session['facebook_session_key'] = request.facebook.session_key
            request.session['facebook_user_id'] = request.facebook.uid

            if request.facebook.session_key_expires:
                expiry = datetime.datetime.fromtimestamp(request.facebook.session_key_expires)
                request.session.set_expiry(expiry)

        try:
            fb = request.facebook

            if not fb.is_session_from_cookie:
                # Make sure the browser accepts our session cookies inside an Iframe
                response['P3P'] = 'CP="NOI DSP COR NID ADMa OPTa OUR NOR"'
                fb_cookies = {
                    'expires': fb.session_key_expires,
                    'session_key': fb.session_key,
                    'user': fb.uid,
                }
    
                expire_time = datetime.utcfromtimestamp(fb.session_key_expires)
    
                for k in fb_cookies:
                    response.set_cookie(self.api_key + '_' + k, fb_cookies[k], expires=expire_time )
                response.set_cookie(self.api_key , fb._hash_args(fb_cookies), expires=expire_time )

        finally:
            return response
            

        return response
    def _createMakeEntityFrame(contact, comment, expiry, expiry_delta, revokers,
                               omit_creation_date):
        seq_num = Frame.generateSequenceNumber()
        frame = Frame("make", seq_num)

        if contact is not None:
            frame.addKVPair("contact", contact)
        if comment is not None:
            frame.addKVPair("comment", comment)

        if expiry is not None:
            expiry_time = datetime.utcfromtimestamp(expiry)
            frame.addKVPair("expiry", _utfToRfc3339(expiry_time))
        if expiry_delta is not None:
            frame.addKVPair("expirydelta", "{0}ms".format(expiry_delta))

        if revokers is not None:
            for revoker in reovkers:
                frame.addKVPair("revoker", revoker)
        if omit_creation_date:
            frame.addKVPair("omitcreationdate", "true")
        else:
            frame.addKVPair("omitcreationdate", "false")

        return frame
Example #17
0
def getEvents(magnitude=1.0,significance=0,product='dyfi',lastUpdate=2678000):
    """
    Return a list of earthquake event urls that meet the conditions set above.
    Inputs:

     * magnitude: Event magnitude (OR condition with significance)
     * significance: Event significance (integer score assembled from weighting magnitude, PAGER alert level, Max MMI, etc.)
     * lastUpdateDelta: Only retrieve events that have been updated in the past lastUpdate minutes.
     * product: Only retrieve events that have this product type associated with them.
    """
    fh = urllib2.urlopen(FEEDURL)
    data = fh.read()
    fh.close()
    jdict = json.loads(data)
    eventurls = []
    tnow = datetime.utcnow()
    for event in jdict['features']:
        eurl = event['properties']['detail']
        emag = event['properties']['mag']
        esig = event['properties']['sig']
        etypes = event['properties']['types'].split(',')[1:-1]
        eupdate = datetime.utcfromtimestamp(event['properties']['updated']/1000)
        hasproduct = product in etypes
        if not hasproduct:
            continue
        if eupdate < tnow - timedelta(seconds=60*lastUpdate):
            continue
        eventurls.append(eurl)
    return eventurls
Example #18
0
    def unserialize(cls, string):
        """Unserializes from a string.

        :param string: A string created by :meth:`serialize`.
        """
        id_s, created_s = string.split('_')
        return cls(int(id_s, 16),
                   datetime.utcfromtimestamp(int(created_s, 16)))
Example #19
0
 def _epoch(time):
   """milliseconds since 12AM Jan 1, 1970"""
   if isinstance(time, float) or isinstance(time, int):
     return time
   elif isinstance(time, datetime.datetime):
     start = datetime.utcfromtimestamp(0)
     delta = time - start
     return int(delta.total_seconds() * 1000)
Example #20
0
def get_time_since_modification(filename):
    '''
    Returns the timedelta object giving the time since the last modification
    of the file given by filename.
    '''
    m_time_stamp = int(os.stat(filename).st_mtime)
    m_utc_date_time = datetime.utcfromtimestamp(m_time_stamp)
    return datetime.utcnow() - m_utc_date_time
Example #21
0
def update_payment_attempts(sender, customer, attempt, payment, **kwargs):
    try:
        user_profile = UserProfile.objects.get(customer_id=customer)
        user_profile.payment_attempts = int(attempt)
        user_profile.last_payment_attempt = datetime.utcfromtimestamp(payment['time'])
        user_profile.save()
    except UserProfile.DoesNotExist:
        pass
def getutfoffset():
    import time
    from datetime import datetime

    ts = time.time()
    utc_offset = total_seconds((   datetime.fromtimestamp(ts)-datetime.utcfromtimestamp(ts)))/60
              
    return int(utc_offset)
Example #23
0
def rawtime(stamp):
    rtime = []
    weekday = datetime.utcfromtimestamp(int(stamp + 3 * 60 * 60)).strftime('%a')
    if weekday == 'Mon':
        weekday = 'Пн'
    elif weekday == 'Tue':
        weekday = 'Вт'
    elif weekday == 'Wed':
        weekday = 'Ср'
    elif weekday == 'Thu':
        weekday = 'Чт'
    elif weekday == 'Fri':
        weekday = 'Пт'
    elif weekday == 'Sat':
        weekday = 'Сб'
    elif weekday == 'Sun':
        weekday = 'Вс'
    day = datetime.utcfromtimestamp(int(stamp + 3 * 60 * 60)).strftime('%d')
    month = datetime.utcfromtimestamp(int(stamp + 3 * 60 * 60)).strftime('%m')
    year = datetime.utcfromtimestamp(int(stamp + 3 * 60 * 60)).strftime('%Y')
    hours = datetime.utcfromtimestamp(int(stamp + 3 * 60 * 60)).strftime('%H')
    minutes = datetime.utcfromtimestamp(int(stamp)).strftime('%M')
    seconds = datetime.utcfromtimestamp(int(stamp)).strftime('%S')
    rtime.append(weekday)
    rtime.append(day)
    rtime.append(month)
    rtime.append(year)
    rtime.append(hours)
    rtime.append(minutes)
    rtime.append(seconds)
    return rtime
Example #24
0
    def run(self):
        """ Gets tracking information from the APRS receiver """

        aprsSer = self.APRS.getDevice()

        while(not self.aprsInterrupt):
            ### Read the APRS serial port, and parse the string appropriately                               ###
            # Format:
            # "Callsign">CQ,WIDE1-1,WIDE2-2:!"Lat"N/"Lon"EO000/000/A="Alt"RadBug,23C,982mb,001
            # ###
            try:
                line = str(aprsSer.readline())
                print(line)
                idx = line.find(self.callsign)
                if(idx != -1):
                    line = line[idx:]
                    line = line[line.find("!") + 1:line.find("RadBug")]
                    line = line.split("/")

                    # Get the individual values from the newly created list ###
                    time = datetime.utcfromtimestamp(
                        time.time()).strftime('%H:%M:%S')
                    lat = line[0][0:-1]
                    latDeg = float(lat[0:2])
                    latMin = float(lat[2:])
                    lon = line[1][0:line[1].find("W")]
                    lonDeg = float(lon[0:3])
                    lonMin = float(lon[3:])
                    lat = latDeg + (latMin / 60)
                    lon = -lonDeg - (lonMin / 60)
                    alt = float(line[3][2:])
                    aprsSeconds = float(time.split(
                        ':')[0]) * 3600 + float(time.split(':')[1]) * 60 + float(time.split(':')[2])

                    ### Create a new location object ###
                    try:
                        newLocation = BalloonUpdate(
                            time, aprsSeconds, lat, lon, alt, "APRS", self.mainWindow.groundLat, self.mainWindow.groundLon, self.mainWindow.groundAlt)
                    except:
                        print(
                            "Error creating a new balloon location object from APRS Data")

                    try:
                        # Notify the main GUI of the new location
                        self.aprsNewLocation.emit(newLocation)
                    except Exception, e:
                        print(str(e))
            except:
                print("Error retrieving APRS Data")

        ### Clean Up ###
        try:
            aprsSer.close()         # Close the APRS Serial Port
        except:
            print("Error closing APRS serial port")

        self.aprsInterrupt = False
  def testInitFromDatetimeObject(self):
    # Test initializing from a datetime object
    date = datetime(2015, 6, 17, 5, 22, 3)
    self.assertEqual(rdfvalue.RDFDatetime(date).AsDatetime(), date)
    date = datetime.utcfromtimestamp(99999)
    self.assertEqual(rdfvalue.RDFDatetime(date).AsSecondsFromEpoch(), 99999)

    # Test microsecond support
    date = datetime(1970, 1, 1, 0, 0, 0, 1000)
    self.assertEqual(rdfvalue.RDFDatetime(date).AsMicroSecondsFromEpoch(), 1000)
Example #26
0
def from_timestamp(timestamp, tzone = None):
    ''''Returns a datetime.datetime object given a timestamp
    Timezone is optional and supplied as a string (not a timezone object).
    Naive local time is assumed if timezone is not supplied. '''
    if tzone:
        tz = timezone(tzone)
        utc = pytz.utc
        ts = datetime.utcfromtimestamp(timestamp).replace(tzinfo=utc)
        return tz.normalize(ts.astimezone(tz))
    else:
        return datetime.fromtimestamp(timestamp)
Example #27
0
def get_photos(mid_lat, mid_lng):
    mongo = pymongo.Connection("grande",27017)
    mongo_db = mongo['tmp_citybeat']
    mongo_collection = mongo_db.photos
    res = []
    repeat_filter = set()
    dates = []
    counts = []
    for p in mongo_collection.find({"mid_lat":mid_lat, "mid_lng":mid_lng}):
        if p['id'] in repeat_filter:
            continue
        repeat_filter.add(p['id'])
        res.append(p)
        dates.append( datetime.utcfromtimestamp(float(p['created_time'])))
        print datetime.utcfromtimestamp(float(p['created_time']))
        counts.append(1)
    ts = Series(counts, index = dates)
    print ts.index
    ts = ts.resample('10Min',how='sum', label='right')
    return ts, res 
Example #28
0
 def device_time_str(self, resp, indent="  "):
     time = response.time
     uptime = response.uptime
     downtime = response.downtime
     time_s = datetime.utcfromtimestamp(time/1000000000) if time != None else None
     uptime_s = round(nanosec_to_hours(uptime), 2) if uptime != None else None
     downtime_s = round(nanosec_to_hours(downtime), 2) if downtime != None else None
     s = "Current Time: {} ({} UTC)\n".format(time, time_s)
     s += indent + "Uptime (ns): {} ({} hours)\n".format(uptime, uptime_s)
     s += indent + "Last Downtime Duration +/-5s (ns): {} ({} hours)\n".format(downtime, downtime_s)
     return s
Example #29
0
def parse_time(s):
    try:
        ret = datetime.strptime(s, "%Y-%m-%d %H:%M:%S %Z")
    except ValueError:
        try:
            ret = datetime.strptime(s, "%Y-%m-%d %H:%M:%S")
        except ValueError:
            try:
                ret = datetime.strptime(s, "%Y-%m-%d")
            except ValueError:
                ret = datetime.utcfromtimestamp(float(s))
    return ret
Example #30
0
 def _parse_entry(field_value, field_type):
     """Stolen directly from pandas/io/gbq.py."""
     if field_value is None or field_value == 'null':
         return None
     if field_type == 'INTEGER' or field_type == 'FLOAT':
         return float(field_value)
     elif field_type == 'TIMESTAMP':
         timestamp = datetime.utcfromtimestamp(float(field_value))
         return np.datetime64(timestamp)
     elif field_type == 'BOOLEAN':
         return field_value == 'true'
     return field_value
Example #31
0
def unix_time_millis(dt):
    epoch = datetime.utcfromtimestamp(0)
    return int((dt - epoch).total_seconds() * 1000)
Example #32
0
KERNEL_LOG_TITLE = "KERNEL LOG"
SYSYEM_LOG_TITLE = "SYSTEM LOG"
LAST_KMSG_TITLE = "LAST KMSG"
LAST_LOGCAT_TITLE = "LAST LOGCAT"

SYSTEM_PROPS_TITLE = "SYSTEM PROPERTIES"

TIME_DMESG = "\[\s*(\d+\.\d+)\]"
TIME_LOGCAT = "(\d+)\-(\d+)\s(\d+):(\d+):(\d+\.\d+)"

NATIVE_CRASH_START_PATTERN = "I\sDEBUG\s+:\s\*\*\*\s\*\*\*"
NATIVE_CRASH_PATTERN = "I\sDEBUG\s+:"
JAVA_CRASH_START_PATTERN = "E\sAndroidRuntime:\sFATAL\sEXCEPTION"
JAVA_CRASH_PATTERN = "E\sAndroidRuntime:\s"

EPOCH = datetime.utcfromtimestamp(0)

def init_arguments():
  parser = argparse.ArgumentParser(description='Measures boot time from bugreport.')
  parser.add_argument('-c', '--config', dest='config',
                      default='config.yaml', type=argparse.FileType('r'),
                      help='config file for the tool')
  parser.add_argument('bugreport_file', nargs=1, help='bugreport txt file',
                       type=argparse.FileType('r'))
  parser.add_argument('-n', '--iterate', dest='iterate', type=int, default=1,
                      help='number of time to repeat the measurement', )
  return parser.parse_args()

# Event per each reboot, for distinghishing current boot from last boot
class Events:
  def __init__(self):
Example #33
0
        depo = open(pro + prjname + '.txt', 'w', encoding='utf-8')
        depo.write(prjname + '\n')
        depo.write(str(tmpV0) + '\n')
        depo.write(str(tmpS) + '\n')
        depo.write(str(oV0) + '\n')
        depo.write(str(oS) + '\n')
        depo.write(str(uV) + '\n')
        depo.write(v)
        depo.close()

        totalnow = calendar.timegm(time.gmtime())
        localnow = calendar.timegm(datetime.now().timetuple())
        doct = os.path.getmtime(pro + prjname + '.txt')
        tx = totalnow - localnow
        tz = doct - tx
        doc = datetime.utcfromtimestamp(tz).strftime('%Y-%m-%d %H:%M:%S')

        df.close()

        bulk()

    elif event == 'Load previous projects':
        de.close()
        stb = 'prj'
        while stb == 'prj':
            olinda = []
            files = [f for f in os.listdir(pro) if os.path.isfile(pro + f)]
            for a in range(len(files)):
                olinda.append((files[a]).replace('.txt', ''))

            llpp = [[sg.Text('Which project would you like to load?')],
Example #34
0
def unpackTime(_time):
    return datetime.utcfromtimestamp(int(binascii.hexlify(_time)))
Example #35
0
def collect_events(helper, ew):

    api_key = helper.get_arg('api_key')
    backfill_days = int(helper.get_arg('backfill_days'))

    run_time = time.time()

    last_ran = helper.get_check_point('last_ran')

    if last_ran is not None:
        since = datetime.utcfromtimestamp(last_ran)
    else:
        since = datetime.now() - timedelta(days=backfill_days)

    helper.log_info("Retrieving subscribed pulses since: %s" % str(since))

    response = helper.send_http_request(
        'https://otx.alienvault.com/api/v1/pulses/subscribed',
        'GET',
        parameters={'modified_since': since},
        headers={'X-OTX-API-KEY': api_key},
        verify=True,
        use_proxy=True)

    response.raise_for_status()

    pulses = response.json()['results']

    pulse_count = 0
    indicator_count = 0
    for pulse in pulses:

        indicators = pulse.pop('indicators', None)

        timeparts = pulse['modified'].split('.')
        time_parsed = utc_to_local(
            datetime.strptime(timeparts[0], "%Y-%m-%dT%H:%M:%S"))
        xtime = time.mktime(time_parsed.timetuple())

        e = helper.new_event(data=json.dumps(pulse),
                             time=xtime,
                             sourcetype="otx:pulse",
                             index=helper.get_output_index(),
                             done=True)
        ew.write_event(e)

        pulse_count = pulse_count + 1

        for indicator in indicators:
            indicator['pulse_id'] = pulse['id']

            timeparts = indicator['created'].split('.')
            time_parsed = utc_to_local(
                datetime.strptime(timeparts[0], "%Y-%m-%dT%H:%M:%S"))
            xtime = time.mktime(time_parsed.timetuple())

            e = helper.new_event(data=json.dumps(indicator),
                                 time=xtime,
                                 sourcetype="otx:indicator",
                                 index=helper.get_output_index(),
                                 done=True)
            ew.write_event(e)

            indicator_count = indicator_count + 1

    helper.log_info("Completed polling. Logged %d pulses and %d indicators." %
                    (pulse_count, indicator_count))

    helper.save_check_point('last_ran', run_time)
Example #36
0
 def gmt(self, datetime):
   millis = 1288483950000
   ts = millis * 1e-3
   utc_offset = datetime.fromtimestamp(ts) - datetime.utcfromtimestamp(ts)
   gmt = str(utc_offset)
   return gmt[0]
Example #37
0
def unix_time(dt):
    epoch = datetime.utcfromtimestamp(0)
    td = dt - epoch

    return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / 10 ** 6
def dateFormatFunction(dateUTC):
    if dateUTC is not None:
        date = (int(float(dateUTC) + float(TimeZone)))
        return datetime.utcfromtimestamp(date).strftime('%d-%m-%Y %H:%M:%S')
    return ""
def getconn(connid):
    # Connect to Elastic and get information about the connection.
    esserver = app.config["esserver"]
    es = Elasticsearch(esserver)
    search = es.search(
        index="*:logstash-*",
        doc_type="doc",
        body={"query": {
            "bool": {
                "must": {
                    "match": {
                        '_id': esid
                    }
                }
            }
        }})
    hits = search['hits']['total']
    if hits > 0:
        for result in search['hits']['hits']:
            # Check for src/dst IP/ports
            if 'source_ip' in result['_source']:
                src = result['_source']['source_ip']
            if 'destination_ip' in result['_source']:
                dst = result['_source']['destination_ip']
            if 'source_port' in result['_source']:
                srcport = result['_source']['source_port']
            if 'destination_port' in result['_source']:
                dstport = result['_source']['destination_port']

            # Check if bro_conn log
            if 'uid' in result['_source']:
                uid = result['_source']['uid']
                if isinstance(uid, list):
                    uid = result['_source']['uid'][0]
                if uid[0] == "C":
                    es_type = "bro_conn"
                    bro_query = uid

            # Check if X509 log
            elif 'id' in result['_source']:
                x509id = result['_source']['id']
                if x509id[0] - - "F":
                    es_type = "bro_files"
                    bro_query = x509id

            # Check if Bro files log
            elif 'fuid' in result['_source']:
                fuid = result['_source']['fuid']
                if fuid[0] - - "F":
                    es_type = "bro_files"
                    bro_query = fuid

            # If we didn't match any of the above, we will build a query
            # from the info that we have (src/dst ip/port)
            else:
                es_type = "bro_conn"
                bro_query = str(src) + " AND " + str(srcport) + " AND " + str(
                    dst) + " AND " + str(dstport)

            # Get timestamp from original result and format it for search
            estimestamp = datetime.strptime(result['_source']['@timestamp'],
                                            "%Y-%m-%dT%H:%M:%S.%fZ")
            epochtimestamp = mktime(estimestamp.timetuple())
            st = epochtimestamp - 1800
            et = epochtimestamp + 1800
            st_es = st * 1000
            et_es = et * 1000

            # If we have a Bro Files log, lets get set to look for a connection log
            if result['_source']['event_type'] == "bro_files":
                es_type = "bro_conn"
                bro_query = ujson.dumps(
                    result['_source']['uid']).strip('[').strip(']').strip('\"')
            else:
                es_type = None
                bro_query = None
            # Set query string for second search,
            query_string = 'event_type:' + str(es_type) + ' AND ' + str(
                bro_query)
            query = '{"query": {"bool": {"must": [{"query_string": {"query": "' + str(
                query_string
            ) + '","analyze_wildcard": true}},{"range": {"@timestamp":{ "gte": "' + str(
                st_es) + '", "lte": "' + str(
                    et_es) + '", "format": "epoch_millis"}}}]}}}'
            # Search for a Bro connection log
            search = es.search(index="*:logstash-*",
                               doc_type="doc",
                               body=query)
            hits = search['hits']['total']
            if hits > 0:
                for result in search['hits']['hits']:
                    # Build the rest of the query for Steno
                    src = result['_source']['source_ip']
                    dst = result['_source']['destination_ip']
                    srcport = result['_source']['source_port']
                    dstport = result['_source']['destination_port']
                    duration = result['_source']['duration']
                    estimestamp = datetime.strptime(
                        result['_source']['@timestamp'],
                        "%Y-%m-%dT%H:%M:%S.%fZ")
                    epochtimestamp = mktime(estimestamp.timetuple())
                    epochminus = int(epochtimestamp - int(duration) - 120)
                    epochplus = int(epochtimestamp + int(duration) + 120)
                    pcapbefore = datetime.utcfromtimestamp(
                        epochminus).strftime('%Y-%m-%dT%H:%M:%S:%fZ')
                    pcapafter = datetime.utcfromtimestamp(epochminus).strftime(
                        '%Y-%m-%dT%H:%M:%S:%fZ')
                    sensor = result['_source']['sensor_name']
                    stenoquery = "before %s and after %s and host %s and host %s and port %s and port %s" % (
                        pcapbefore, pcapafter, src, dst, srcport, dstport)
                    return [sensor, stenoquery]
                    #print sensor,stenoquery
            else:
                print("No hits for second query!")
    else:
        print('No hits for first query')
Example #40
0
    def run(self, _length, data_df):

        #used to determin the state of the bot
        state = 0
        #0 ready to buy, 1 ready to sell

        #money at start
        Capital = 500.0

        FirstPrice = 0

        ETH = 0.0

        firstRound = True
        secondRound = True

        Candle0 = None
        Candle1 = None
        Candle2 = None

        #used for performance analysis
        highestGain = 0
        highestLost = 0

        tradesGood = []
        tradesBad = []

        balances = pd.DataFrame(columns=['Capital'])
        price = pd.DataFrame(columns=['Price'])

        for i, Candle0 in data_df.iterrows():

            CurrPrice = float(Candle0["Open"])

            balances = balances.append(
                {'Capital': max(Capital, ETH * CurrPrice)}, ignore_index=True)
            price = price.append({'Price': CurrPrice}, ignore_index=True)

            #takes first price
            if FirstPrice == 0:
                FirstPrice = CurrPrice
                #Capital = CurrPrice

            if firstRound == False and secondRound == False:

                Candle0['Open'] = (float(Candle0['Open']) +
                                   float(Candle0['Close'])) / 2
                Candle0['Close'] = (
                    float(Candle0['Open']) + float(Candle0['Close']) +
                    float(Candle0['Low']) + float(Candle0['High'])) / 4

                if Candle0["Open"] < Candle0["Close"]:
                    Candle0["trend"] = "up"
                else:
                    Candle0["trend"] = "down"

                buyTrigger = (Candle1["trend"] == "up") & (Candle2["trend"]
                                                           == "down")
                sellTrigger = (Candle1["trend"] == "down") & (Candle2["trend"]
                                                              == "up")

                #state 0 -> wants to buy
                if state == 0:
                    #check for MACD crossover
                    if buyTrigger:

                        capitalBefore = Capital
                        stopLoss = Candle1["Low"]

                        state = 1
                        #execute buy
                        ETH = Capital / float(CurrPrice)
                        ETH = ETH * 0.999

                        Capital = 0
                        print "::::::::::::::::::::::::::::::::::::BUY  " + str(
                            CurrPrice) + " " + str(
                                datetime.utcfromtimestamp(Candle0["Open time"])
                                .strftime('%Y-%m-%d %H:%M:%S'))

                #state 1 -> wants to sell
                if state == 1:
                    #check for MACD crossover
                    if sellTrigger:
                        state = 0
                        #execute sell
                        Capital = float(CurrPrice) * ETH
                        Capital = Capital * 0.999  #trading fee of 0.1 % = 1/1000

                        print "::::::::::::::::::::::::::::::::::::SELL " + str(
                            CurrPrice) + " " + str(
                                datetime.utcfromtimestamp(Candle0["Open time"])
                                .strftime('%Y-%m-%d %H:%M:%S'))
                        #check if new best/worst trade
                        if (float(((Capital / capitalBefore) * 100) -
                                  100)) > highestGain:
                            highestGain = float((
                                (Capital / capitalBefore) * 100) - 100)
                        if (float(((Capital / capitalBefore) * 100) -
                                  100)) < highestLost:
                            highestLost = float((
                                (Capital / capitalBefore) * 100) - 100)

                        print "Profit on trade: " + str((
                            (Capital / capitalBefore) * 100) - 100) + "%"
                        if (Capital - capitalBefore >= 0):
                            tradesGood.append(
                                float(((Capital / capitalBefore) * 100) - 100))
                        else:
                            tradesBad.append(
                                float(((Capital / capitalBefore) * 100) - 100))

            Candle2 = Candle1
            Candle1 = Candle0

            if firstRound == True:
                firstRound = False
                Candle2 = Candle0
                Candle2['Open'] = (float(Candle0['Open']) +
                                   float(Candle0['Close'])) / 2
                Candle2['Close'] = (
                    float(Candle0['Open']) + float(Candle0['Close']) +
                    float(Candle0['Low']) + float(Candle0['High'])) / 4

                print "started... -2"

            if firstRound == False and secondRound == True:
                secondRound = False
                Candle1 = Candle0
                Candle1['Open'] = (float(Candle0['Open']) +
                                   float(Candle0['Close'])) / 2
                Candle1['Close'] = (
                    float(Candle0['Open']) + float(Candle0['Close']) +
                    float(Candle0['Low']) + float(Candle0['High'])) / 4

                if Candle1["Open"] < Candle1["Close"]:
                    Candle1["trend"] = "up"
                else:
                    Candle1["trend"] = "down"

        print "Starts with 500, ends with: " + str(CurrPrice * ETH)
        print "Buy-and-Hold strategy ends with: " + str(CurrPrice *
                                                        (500.0 / FirstPrice))
        print "Profit :" + str((((CurrPrice * ETH) / 500.0) * 100) - 100) + "%"
        print "Compared against buy-and-hold :" + str(
            ((CurrPrice * ETH) / (CurrPrice *
                                  (500.0 / FirstPrice))) * 100) + "%"
        print "best trade :" + str(highestGain) + "%"
        print "worst trade :" + str(highestLost) + "%"

        summ = 0
        for l in tradesGood:
            summ += l
        print "Avarage good trade :" + str(summ / len(tradesGood)) + "%"
        print "Number of good trades :" + str(len(tradesGood))
        summ = 0
        for k in tradesBad:
            summ += k
        print "Avarage bad trade :" + str(summ / len(tradesBad)) + "%"
        print "Number of bad trades :" + str(len(tradesBad))

        return balances, price
Example #41
0
Cincinnati_coordinates = '39.0943,-84.2724'

for x in range(0, loop_duration + 1, interval):
    formatted_date = datetime(Year, Month, Day, Hour, Minute, Seconds)
    timestamp = int(calendar.timegm(formatted_date.timetuple()))
    response = requests.get(
        'https://api.darksky.net/forecast/enter personal key here/enter your coordinates here,'
        + str(timestamp) + '?exclude=daily,flags')
    data = response.json()

    Weather_description = data['currently']['summary']
    Time = data['currently']['time']
    Weather_temperature = data['currently']['temperature']
    Weather_humidity = data['currently']['humidity']

    utc_time = str(datetime.utcfromtimestamp((Time)))
    utc_hour = int(utc_time[11:13])
    local_day = int(utc_time[8:10])
    local_hour = utc_hour - 4
    if (local_hour < 0):
        local_hour += 24
    if local_day < 10:
        local_time = utc_time[0:8] + '0' + str(local_day) + ' ' + str(
            local_hour) + utc_time[13:]
    else:
        local_time = utc_time[0:8] + str(local_day) + ' ' + str(
            local_hour) + utc_time[13:]
    timestamp += (60 * interval)
    Minute += interval
    if (Minute >= 60):
        Minute = (Minute % 60)
                  ' ' + save_season_page)

#%% =============================================================================
# LIST ALREADY DONE
# =============================================================================
list_sport = df_all.sport.unique()
for sport in list_sport:
    list_pays = df_all[df_all.sport == sport].pays.unique()
    for pays in list_pays:
        list_ligue = df_all[(df_all.sport == sport)
                            & (df_all.pays == pays)].ligue.unique()
        for ligue in list_ligue:
            df = df_all[(df_all.sport == sport) & (df_all.pays == pays) &
                        (df_all.ligue == ligue)]
            df['date_match'] = df.match_date.apply(lambda x: (
                datetime.utcfromtimestamp(x).strftime('%Y-%m-%d %H:%M:%S')))
            df.reset_index(drop=True, inplace=True)
            df.date_match = pd.to_datetime(df.date_match)
            df['season'] = df.date_match.apply(lambda x: x.year
                                               if x.month >= 7 else x.year - 1)
            df['season_week'] = df.date_match.apply(
                lambda x: x.week + 52 if x.week <= 25 else x.week)
            list_season = df.season.unique()
            for season in list_season:
                print sport, pays, ligue, season

# =============================================================================
#
# =============================================================================
for item in list_ligue_diff:
    sport = item[0]
Example #43
0



datetime是基于time模块封装的,使用起来更加友好,但是执行效率略低。
datetime里有四个重要的类:datetime、date、time、timedelta

#1、datetime类:

  #创建datetime对象:

    datetime.datetime (year, month, day[ , hour[ , minute[ , second[ , microsecond[ , tzinfo] ] ] ] ] )

    datetime.fromtimestamp(cls, timestamp, tz=None)

    datetime.utcfromtimestamp(cls, timestamp)

    datetime.now(cls, tz=None)

    datetime.utcnow(cls)

    datetime.combine(cls, datetime.date, datetime.time)

  #datetime的实例方法:

    datetime.year、month、day、hour、minute、second、microsecond、tzinfo:
    datetime.date():获取date对象;
    datetime.time():获取time对象;
    datetime. replace ([ year[ , month[ , day[ , hour[ , minute[ , second[ , microsecond[ , tzinfo] ] ] ] ] ] ] ]):
    datetime. timetuple ()
    datetime. utctimetuple ()
Example #44
0
import datetime
import time
import multiprocessing as mp
import coinapi_brain as brain
from datetime import datetime

if __name__ == '__main__':
    request_ohlcv = brain.request_ohlcv
    datapath = brain.datapath
    coins = brain.coins
    quotes = brain.quotes
    symbols = brain.symbols
    #get beginning of current day
    T = time.time()
    unix_time = int(T - T % 86400)
    utc = datetime.utcfromtimestamp(unix_time).strftime('%Y%m%d')
    #download ohlcv: for each symbol get ohlcv data.
    pool = mp.Pool()
    res = [[sym, pool.apply_async(request_ohlcv, args=(
        unix_time,
        sym,
    ))] for sym in symbols]
    out = [[r[0], r[1].get()] for r in res]
    #out contains all rates. Create file.
    with open(
            datapath + utc[:4] + '/' + utc[4:6] + '/' + utc[6:] + '/' + utc +
            '.ohlcv', 'w') as outfile:
        name = [
            'time_close', 'time_open', 'price_close', 'price_open',
            'asset_id_base', 'asset_id_quote', 'exchange', 'trades_count',
            'volume_traded', 'price_high', 'price_low', 'time_period_start',
Example #45
0
    if get_market_name(stock_code) == "sh":
        return "SH." + stock_code
    else:
        return "SZ." + stock_code


def get_available_tdx_server(api):
    for k, v in ct.TDX_SERVERS.items():
        ip, port = ct.TDX_SERVERS[k][1].split(":")
        if api.connect(ip, int(port)): return ip, int(port)
    raise Exception("no server can be connected")


def get_market(code):
    if (code.startswith("6") or code.startswith("500")
            or code.startswith("550")
            or code.startswith("510")) or code.startswith("7"):
        return ct.MARKET_SH
    elif (code.startswith("00") or code.startswith("30")
          or code.startswith("150") or code.startswith("159")):
        return ct.MARKET_SZ
    else:
        return ct.MARKET_OTHER


epoch = datetime.utcfromtimestamp(0)


def unix_time_millis(dt):
    return int((dt - epoch).total_seconds() * 1000)
import datetime
import sys
import time
from datetime import datetime
from PySide2.QtCore import QDate, SIGNAL, QObject
from PySide2.QtWidgets import (QTableView, QApplication, QDateEdit,
                               QHeaderView, QCheckBox, QSystemTrayIcon, QStyle)
from PySide2.QtWidgets import (QWidget, QGridLayout, QPushButton, QLineEdit,
                               QVBoxLayout, QHBoxLayout, QLabel)
import ClassesPlanner as cPl

buttonData = ["Start", "Pause", "Finish", "Delete"]
buttonDataFinish = ["Delete"]
TimeZone = (datetime.fromtimestamp(time.time()) -
            datetime.utcfromtimestamp(time.time())).total_seconds()  #


# Unix Time stamp in hh:mm:ss
def dateFormatFunction(dateUTC):
    if dateUTC is not None:
        date = (int(float(dateUTC) + float(TimeZone)))
        return datetime.utcfromtimestamp(date).strftime('%d-%m-%Y %H:%M:%S')
    return ""


# Seconds to hh:mm:ss format
# f.e. In: workTime = 5400 -> Out: 01:30:00
def workTimeFormat(workTime):
    hours = int((workTime) / 3600)
    minutes = int((workTime - (hours * 3600)) / 60)
    seconds = int(workTime - (hours * 3600 + minutes * 60))
Example #47
0
def datetime_from_utc_to_local(utc_datetime):
    now_timestamp = time.mktime(utc_datetime.timetuple())
    offset = datetime.fromtimestamp(now_timestamp) - datetime.utcfromtimestamp(
        now_timestamp)
    return utc_datetime + offset
Example #48
0
i = 0
for item in ['BVP', 'EDA', 'TEMP']:
    df = rcf(item, fname1)
    df1 = rcf(item, fname2)
    fnum2 = np.array(df.iloc[:, 1]).astype(float)
    fnum1 = np.array(df["UTCTimeStamp"]).astype(float)
    fnum_nxt2 = np.array(df1.iloc[:, 1]).astype(float)
    fnum_nxt1 = np.array(df1["UTCTimeStamp"]).astype(float)
    if fnum1[-1] - fnum1[0] > 3600:
        x_day = []
        x_night = []
        fnum2_day = []
        fnum2_night = []
        #converting UTC time into HH/MM/SS
        for idx in range(len(fnum1)):
            d = datetime.utcfromtimestamp(fnum1[idx])
            if d.hour >= 20:
                x_night.append(d)
                fnum2_night.append(fnum2[idx])
            elif d.hour < 20 and d.hour >= 8:
                x_day.append(d)
                fnum2_day.append(fnum2[idx])
                x_night.append(d)
                fnum2_night.append(np.NaN)
        fnum2_day.append(np.NaN)
        fnum2_night.append(np.NaN)
        x_day.append(x_day[-1])
        x_night.append(x_night[-1])
        for idx in range(len(fnum_nxt1)):
            d = datetime.utcfromtimestamp(fnum_nxt1[idx])
            if d.day == modified_date.day:
    def timestampUTC(self, datestring):

        dt = datetime.strptime(datestring, '%a %b %d %H:%M:%S +0000 %Y')
        epoch = datetime.utcfromtimestamp(0)
        return int((dt - epoch).total_seconds() * 1000.0)
Example #50
0
File: tiempo.py Project: meshrg/TIS
import ntplib as ntp
import datetime
from time import ctime
from datetime import datetime
import ephem as ep

x = ntp.NTPClient()
c = x.request("mx.pool.ntp.org", version=4, timeout=5)

#ti=datetime.utcfromtimestamp(c.recv_time)
t1 = ctime(c.recv_time)

t = datetime.utcfromtimestamp(c.recv_time)
da = datetime.date(t)

print t

print da

print type(da)
Example #51
0
def format_datetime(timestamp):
    """Format a timestamp for display."""
    return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d @ %H:%M')
    def run(self):
        """ Gets tracking information from the APRS receiver """

        aprsSer = self.APRS.getDevice()

        while (not self.aprsInterrupt):
            ### Read the APRS serial port, and parse the string appropriately 								###
            # Format:
            # "Callsign">CQ,WIDE1-1,WIDE2-2:!"Lat"N/"Lon"EO000/000/A="Alt"RadBug,23C,982mb,001
            # ###
            try:
                line = str(aprsSer.readline())
                print(line)
                idx = line.find(self.callsign)
                if (idx != -1):
                    line = line[idx:]
                    line = line[line.find("!") + 1:line.find("RadBug")]
                    line = line.split("/")

                    # Get the individual values from the newly created list ###
                    time = datetime.utcfromtimestamp(
                        time.time()).strftime('%H:%M:%S')
                    lat = line[0][0:-1]
                    latDeg = float(lat[0:2])
                    latMin = float(lat[2:])
                    lon = line[1][0:line[1].find("W")]
                    lonDeg = float(lon[0:3])
                    lonMin = float(lon[3:])
                    lat = latDeg + (latMin / 60)
                    lon = -lonDeg - (lonMin / 60)
                    alt = float(line[3][2:])
                    aprsSeconds = float(time.split(':')[0]) * 3600 + float(
                        time.split(':')[1]) * 60 + float(time.split(':')[2])

                    ### Create a new location object ###
                    try:
                        newLocation = BalloonUpdate(time, aprsSeconds, lat,
                                                    lon, alt, "APRS",
                                                    self.mainWindow.groundLat,
                                                    self.mainWindow.groundLon,
                                                    self.mainWindow.groundAlt)
                    except:
                        print(
                            "Error creating a new balloon location object from APRS Data"
                        )

                    try:
                        # Notify the main GUI of the new location
                        self.aprsNewLocation.emit(newLocation)
                    except Exception, e:
                        print(str(e))
            except:
                print("Error retrieving APRS Data")

        ### Clean Up ###
        try:
            aprsSer.close()  # Close the APRS Serial Port
        except:
            print("Error closing APRS serial port")

        self.aprsInterrupt = False
Example #53
0
    hfile.create_dataset("time_3day", data=ymdh_arr.astype(numpy.int32))
    ret = create_obc_hdf('3day', hfile, data_3day,
                         ins_conf.obc_3dim_to_db.values())

    if not ret:
        return False

    hfile.close()

    return True

    # like: FY3C_MWTS_20140303_0259_TO_20140428_1159_12H_CH01_[PRT|INS_TEMP|...]
    png_title = sat.upper() + '_' + ins.upper() + '_' \
                + begin_data[0]['ymdhms'].strftime("%Y%m%d") + '_' \
                + begin_data[0]['ymdhms'].strftime("%H%M") + '_TO_' \
                + datetime.utcfromtimestamp(timespan['end_t']).strftime('%Y%m%d') \
                + '_' \
                + datetime.utcfromtimestamp(timespan['end_t']).strftime('%H%M') \
                + '_' \
                + format(int(hour_span), '02d') + 'H_CH' \
                + format(channel,'02d')
    tmphdf = tmpfile + '.' + png_title + '.HDF'



    ret = draw_channel(tmphdf, format(channel,'02d'), png_title,begin_data[0]['ymdhms'].strftime("%Y%m%d"),\
                       datetime.utcfromtimestamp(timespan['end_t']).strftime('%Y%m%d'))

    return True

Example #54
0
def get_unix_time(timestamp):
    epoch = datetime.utcfromtimestamp(0)
    modified_ts = unix_time_millenium_epoch(timestamp)
    return (modified_ts - epoch).total_seconds()
Example #55
0
def is_day(unix_timestamp, weekday=None):
    if weekday:
        return weekday == datetime.utcfromtimestamp(unix_timestamp).weekday()
    else:
        now = datetime.now()
        return now.weekday() == datetime.utcfromtimestamp(unix_timestamp).weekday()
Example #56
0
def soundscapeCronoPlot(sc,varnames=[],orids=[],splits=None,group=None,plot_type="concat",viewTz=None,plotTimeFormat="%H:%M",nlabels=21,xlabel="T",ylabel="F (kHz)",xlabels=[],ylabels=[],cmap="terrain",interpolation="sinc",figsize=(20,10),path=None,baseName="",timeLimits=None,freqLimits=None):

    if len(varnames) == 0:
        raise ValueError("Must specify at least one variable to plot.")
    elif len(varnames) > 3:
        print("Too many variables to plot. Using first three")
    
    if splits is not None:
        df = splits
    else:
        df = sc.getSplits(group=group,baseName=baseName)
    
    if len(orids):
        df = df[df["orid"].isin(orids)]

    groupCat="timeCell"
    sampleCat = "timeCell_incr"
    orderCat="standardStart"

    config = dict(sc.config["cronoParams"])
    config["modulo"] = config["modulo"]*365*10
    
    df = df.apply(calendarize,axis=1,config=config,fieldName=sampleCat,transformTz=None)

    df[orderCat] = pd.to_datetime(df[orderCat])
    #df = df[df["origmd.conglomerado_muestra.id"]==1096]
    
    if timeLimits is not None:
        start = standardizeTime(timeLimits[0],sc.config["cronoParams"]["timeZone"],sc.config["cronoParams"]["timeFormat"])
        stop = standardizeTime(timeLimits[1],sc.config["cronoParams"]["timeZone"],sc.config["cronoParams"]["timeFormat"])
        df = df[(df[orderCat]>=start) & (df[orderCat]<=stop)]
    
    sorted_df = df.sort_values(by=[orderCat])
    
    min_df = sorted_df.iloc[0][orderCat]
    max_df = sorted_df.iloc[-1][orderCat]
    
    min_sample_cat = sorted_df[sampleCat].min()
    max_sample_cat = sorted_df[sampleCat].max()
    
    delta = max_df - min_df
    
    proj_stats = []
    aggr = None
        
    cats = list(df[groupCat].unique())
    cats = [int(x) for x in cats]
    cats.sort()
    
    all_cats = list(np.arange(0, sc.config["cronoParams"]["modulo"]))
    n_missing_cats = len(list(set(all_cats)-set(cats)))
    
    concat_indx = None
    
    if len(xlabels) == 0:
        if plot_type != "concat":
            xlabels = [catToStrTime(int(x),unit=sc.config["cronoParams"]["unit"],startDate=sc.config["cronoParams"]["startDate"],timeZone=sc.config["cronoParams"]["timeZone"],timeFormat=sc.config["cronoParams"]["timeFormat"],transformTz=viewTz,outFormat=plotTimeFormat) for x in all_cats]
        else:
            periods = nlabels
            xlabels = list(pd.date_range(min_df.astimezone(timezone("UTC")),max_df.astimezone(timezone("UTC")),periods=periods).values)
            xlabels = [datetime.utcfromtimestamp((x - np.datetime64('1970-01-01T00:00:00Z')) / np.timedelta64(1, 's')) for x in xlabels]
            if viewTz:
                xlabels = [transformTime(x,viewTz) for x in xlabels]
                
            xlabels = [x.strftime(plotTimeFormat) for x in xlabels]


        
    nlevels = 0
    for cname in list(sorted_df.columns.values):
        if varnames[0] in cname:
            nlevels += 1

    global_freqLimits = sc.config["energyParams"]["fLimits"]
    actual_freqLimits = global_freqLimits
    freqLevels = range(0,nlevels)
    
    if freqLimits is not None:
        actual_freqLimits = [max(freqLimits[0],global_freqLimits[0]),min(freqLimits[1],global_freqLimits[1])]
        unit_conv = (global_freqLimits[1]-global_freqLimits[0])/nlevels
        
        min_level = max(min(int(round((actual_freqLimits[0]-global_freqLimits[0])/unit_conv)),nlevels-1),0)
        max_level = max(min(int(round((actual_freqLimits[1]-global_freqLimits[0])/unit_conv)),nlevels-1),0)
        
        level_freqLimits = [min_level*unit_conv,max_level*unit_conv]
        
        if min_level >= max_level:
            raise ValueError("Wrong frequency range: freqLimits = "+str(freqLimits))
        
        freqLevels = range(min_level,max_level+1)    

        
        
        
    for varname in varnames:
        cols = []
                
        for i in freqLevels:
            cols.append(varname+"_"+str(i))
        
        
        if plot_type == "concat":
            proj = sorted_df[[sampleCat]+cols]
            proj = proj.fillna(0)
            proj_stat = proj.groupby(sampleCat).mean()
            max_stat = np.amax(proj_stat.values)
            min_stat = np.amin(proj_stat.values)
            proj_stat = proj_stat.reindex(np.arange(min_sample_cat,max_sample_cat+1)).fillna(-1000)
            proj_stat = np.flipud(proj_stat.values.T)
        else:
            proj = sorted_df[[groupCat]+cols]
            proj = proj.fillna(0)
            group = proj.groupby(groupCat)
            if plot_type == "std":
                aggr="std"
                #proj_stat = group.apply(lambda x: np.sqrt(np.cov(x, aweights=x.chunkWeight)))
                proj_stat = group.std()
            else:
                aggr="mean"
                #proj_stat = group.apply(lambda x: np.average(x, weights=x.chunkWeight))
                proj_stat = group.mean()
            
            max_stat = np.amax(proj_stat.values)
            min_stat = np.amin(proj_stat.values)
            proj_stat = proj_stat.reindex(np.arange(0, sc.config["cronoParams"]["modulo"])).fillna(-1000)
            proj_stat = np.flipud(proj_stat.values.T)
        
        proj_stat = (proj_stat-min_stat)/(max_stat-min_stat)
        proj_stats.append(proj_stat)
        
        if len(proj_stats) == 3:
            break

    nstats = len(proj_stats)
    compose = False
    values = None
    
    if isinstance(cmap,str):
        cmap =  matplotlib.cm.get_cmap(cmap)
        
    if nstats == 1:
        values = proj_stats[0]
        values = np.ma.masked_where(values < 0, values)
    elif nstats == 2:
        proj_stats.append(np.zeros_like(proj_stats[0]))
        values = np.transpose(np.array(proj_stats),[1,2,0])

        values = np.ma.masked_where(values > 0, values)
        cmap.set_bad(color='black')
        compose = True
    elif nstats == 3:
        values = np.transpose(np.array(proj_stats),[1,2,0])
        values = np.ma.masked_where(values > 0, values)
        cmap.set_bad(color='black')
        compose = True
        
    if len(ylabels) == 0:
        freq_limits = level_freqLimits
        step = (freq_limits[1]-freq_limits[0])/20
        ylabels = list(np.arange(freq_limits[1],(freq_limits[0])-step,-step))
        #ylabels = [x/1000 for x in ylabels]
    

    
        
    if plot_type == "concat":
        return plot_concat(values,figsize,cmap,interpolation,xlabel,ylabel,xlabels,ylabels,compose=compose,path=path)
    else:
        return plot_aggr(values,figsize,cmap,interpolation,xlabel,ylabel,xlabels,ylabels,aggr=aggr,compose=compose,path=path)
Example #57
0
def get_time_from_epoch(timestamp):
    t = datetime.utcfromtimestamp(timestamp)
    return t.strftime("%B %d, %Y %H:%M:%S UTC")
Example #58
0
def format_datetime(timestamp):
    """ 정수값에 해당하는 날짜 시간 포맷 형식 변경해서 문자열로 반환하는 함수 """
    return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d  %H:%M')
def f(number, subject, days):
    totaltime1 = time.time()
    timedata = []
    data = []
    fulldata = []
    names = []
    nums = []

    n = 0
    secs = days * 86400

    with open(f"{name}.csv", 'r', encoding='utf-8') as file:

        csv_reader = csv.reader(file)
        next(csv_reader)
        t = int(time.time())
        t1 = time.time()
        print(f"Scanning {subject}...")

        if subject == 'Artists': column = 0
        if subject == 'Albums': column = 1
        if subject == 'Tracks': column = 2

        for line in csv_reader:
            unixtime = int(
                time.mktime(
                    datetime.strptime(line[3], "%d %b %Y %H:%M").timetuple()))
            if (t - unixtime) > secs and secs != 0:
                break

            n += 1
            if line[column] == '':
                next(csv_reader)
                print(f"No album attributed to: {line[2]} - {line[0]}"
                      )  # will show albumless tracks
            elif line[column] not in names:
                names.append(line[column])
                nums.append(1)

            else:
                nums[names.index(line[column])] += 1

        t2 = time.time()
        print(f"{subject} scanned in {t2-t1} seconds")

        for i in range(len(nums)):
            data.append((nums[i], names[i]))
        data.sort(reverse=True)

        plottime = time.time()

        for j in range(number):
            x = []
            y = []
            p = 0
            file.seek(0)
            for line2 in csv_reader:
                unixtime = int(
                    time.mktime(
                        datetime.strptime(line2[3],
                                          "%d %b %Y %H:%M").timetuple()))

                if data[j][1] == line2[column]:

                    if (t - unixtime) > secs and secs != 0:
                        break
                    p += 1
                    x.insert(0, unixtime)

                    y.append(p)

            x.append(int(time.time()))
            y.append(p)
            x = [datetime.fromtimestamp(val) for val in x]
            plt.plot(x, y, label=data[j], linewidth=1.0)  #line thickness
            #plt.scatter(x,y,s=1.8)       #size of scatter points (works best with top tracks)
            print(data[j])

    if days > 90 or days == 0: timetype = '%Y-%m'
    else: timetype = '%Y-%m-%d'

    #matplotlib.rc('font', family='TakaoPGothic')
    plt.gcf().autofmt_xdate()
    plt.gca().xaxis.set_major_formatter(DateFormatter(timetype))
    plt.legend(fontsize=5, loc=2)  #size of legend font
    plt.grid(0, linestyle='--', alpha=0.5)
    plt.xlabel('Date')
    plt.ylabel('Plays')
    startdate = datetime.utcfromtimestamp(unixtime).strftime('%Y-%m-%d')
    plt.title(f"{name}'s Top {number} {subject} Since {startdate}",
              fontsize=10)
    plt.savefig(f"{name}'s Top {number} {subject} Since {startdate}.png",
                dpi=800,
                bbox_inches='tight')
    totaltime2 = time.time()
    print(f"{totaltime2-plottime} seconds to plot data")
    print(f"{totaltime2-totaltime1} seconds overall")
    plt.clf()
def datetime_to_float(d):
    epoch = datetime.utcfromtimestamp(0)
    total_seconds =  (d - epoch).total_seconds()
    # total_seconds will be in decimals (millisecond precision)
    return total_seconds