コード例 #1
0
ファイル: server.py プロジェクト: PercyARS/OTPP_Project
def fetchServer2Data_Live():
    #called every minute
    data = urllib2.urlopen("https://api.bitcoinaverage.com/history/USD/per_minute_24h_sliding_window.csv")
    PriceWindow = []
    firstLine = True
    for line in data:
        if firstLine:
            firstLine = False
            continue
        time = line.split(",")[0]
        time1 = time.split(" ")[0]
        time2 = time.split(" ")[1]
        time = time1+"-"+time2
        # get rid of second
        time1 = time.split(":")[0]
        time2 = time.split(":")[1]
        time = time1+":"+time2
        time = UTCtoLinux(time)
        price = float(line.split(",")[1])
        Price[time] = price
        PriceWindow.append(price)

    # updating the internal data structure
    # now time points to the latest time
    print "Adding Server2 data at:" + linuxToUTC(time) + " Price " + str(price)
    calculateTradingStrategies(PriceWindow,time)
コード例 #2
0
ファイル: sync.py プロジェクト: Astalaseven/syncthing-gui
    def handleRecentsMessage(self, recents):
        if not recents and syncthing_repositories:
            return

        recents = [recent for recent in recents if recent['type'] == 'LocalIndexUpdated']
        recents = recents[-10:]
        folders = {repo['ID']: repo['Directory'] for repo in syncthing_repositories}

        if recents:
            # remove all actions in recents menu
            self.recents.clear()
            max_length = max([len(recent['data']['name']) for recent in recents])

        for recent in recents:
            filename = recent['data']['name']
            directory = recent['data']['repo']
            time = recent['time']
            time = '+'.join([time.split('+')[0][:-1], time.split('+')[1]]) # hack to let arrow parse it
            time = arrow.get(time).humanize()

            action = QAction('%-*s (%s)' % (-max_length, filename, time), self.recents)

            if os.path.exists(os.path.join(folders[directory], filename)):
                action.setIcon(QIcon('icons/newfile.png'))
                action.triggered.connect(lambda: self.open_dir(folders[directory]))
            else:
                action.setIcon(QIcon('icons/delfile.png'))
                folder = os.path.join(folders[directory], '.stversions')

                if os.path.exists(os.path.join(folder, filename)):
                    action.triggered.connect(lambda: self.open_dir(folder))

            self.recents.addAction(action)
コード例 #3
0
ファイル: scraper_2001_2100.py プロジェクト: yzjing/ao3
def get_bookmarks_time(url, opener=opener):
    # go to the bookmarks page of the work and find the timestamps for the bookmarks
    # returns a dict of {month:# of bookmarks in the month}

    req = urllib2.Request(url)
    page = bs(opener.open(req))
    page_list = [i for i in re.findall('<a href="(.*?)>', str(page)) if "bookmarks?" in i]
    page_list = sorted(list(set([i.split()[0].replace('"', "") for i in page_list])))

    dt = re.findall('<p class="datetime">(.*?)</p>', str(page))
    times = []
    month_dict = {
        "Jan": "01",
        "Feb": "02",
        "Mar": "03",
        "Apr": "04",
        "May": "05",
        "Jun": "06",
        "Jul": "07",
        "Aug": "08",
        "Sep": "09",
        "Oct": "10",
        "Nov": "11",
        "Dec": "12",
    }
    for time in dt:
        times.append(time.split()[2] + "-" + month_dict.get(time.split()[1]))
    times = times[1:]
    if page_list != []:
        for page in page_list:
            times += get_bookmarks_time_subpages("http://archiveofourown.org" + page, opener=opener)
    c = Counter(times)
    return {time: c[time] for time in times}
コード例 #4
0
ファイル: server.py プロジェクト: ZachGoldberg/Pandora-UPnP
def time_to_int(time):
    if time.count(":") == 2:
        (hour, min, sec) = time.split(":")
        return (int(hour) * 3600) + (int(min) * 60) + int(sec) 
    else:
        (min, sec) = time.split(":")
        return (int(min) * 60) + int(sec)
コード例 #5
0
def get_song_length(duration, songnumber):
    duration = str(duration)
    songnumber = int(songnumber)
    if songnumber <= 1:
        time = duration.split(" ")[0]
        minutes_ev = time.split(":")[0]
        seconds_ev = time.split(":")[1]
        minutes = "".join([letter for letter in minutes_ev if letter.isdigit()])
        seconds = "".join([letter for letter in seconds_ev if letter.isdigit()])
        songtime = (int(minutes) * 60) + int(seconds)
        return songtime

    else:
        songfetch = int(songnumber) - 1
        if len(duration.split(" ")) >= songfetch:
            time = duration.split(" ")[songfetch]
            minutes_ev = time.split(":")[0]
            seconds_ev = time.split(":")[1]
            minutes = "".join([letter for letter in minutes_ev if letter.isdigit()])
            seconds = "".join([letter for letter in seconds_ev if letter.isdigit()])
            songtime = (int(minutes) * 60) + int(seconds)
            return songtime

        else:
            global __settings__
            int(getsl(__settings__.getSetting("sl_custom")))
            return int(getsl(__settings__.getSetting("sl_custom")))
コード例 #6
0
def toDateTime(sVal, iDefault=None):
    """ Suponer formato Iso OrderingDate 
    """
    if sVal is None:
        return iDefault
    try:
        if sVal.count("T") > 0:
            # IsoFormat DateTime
            (date, time) = sVal.split("T")
            (an, mois, jour) = date.split('-')
            (h, m, s) = time.split(':')
            return datetime.datetime(int(an), int(mois), int(jour), int(h), int(m), int(s))

        elif sVal.count("-") == 2:
            # IsoFormat Date
            (an, mois, jour) = sVal.split('-')
            return datetime.date(int(an), int(mois), int(jour))

        elif sVal.count("/") == 2:
            if sVal.count(' ') > 0:
                (date, time) = sVal.split(" ")
                (jour, mois, an) = date.split('/')
                (h, m, s) = time.split(':')
                return datetime.datetime(int(an), int(mois), int(jour), int(h), int(m), int(s))
            else:
                (jour, mois, an) = date.split('/')
                return datetime.date(int(an), int(mois), int(jour))
    except:
        return iDefault
コード例 #7
0
 def _srtTc2ms(self, time):
     if ',' in time:
         split_time = time.split(',')
     else:
         split_time = time.split('.')
     minor = split_time[1]
     major = split_time[0].split(':')
     return (int(major[0])*3600 + int(major[1])*60 + int(major[2])) * 1000 + int(minor)
コード例 #8
0
    def run(self):
        """ Gets tracking information from the APRS receiver """

        aprsSer = self.APRS.getDevice()

        while(not self.aprsInterrupt):
            ### Read the APRS serial port, and parse the string appropriately                               ###
            # Format:
            # "Callsign">CQ,WIDE1-1,WIDE2-2:!"Lat"N/"Lon"EO000/000/A="Alt"RadBug,23C,982mb,001
            # ###
            try:
                line = str(aprsSer.readline())
                print(line)
                idx = line.find(self.callsign)
                if(idx != -1):
                    line = line[idx:]
                    line = line[line.find("!") + 1:line.find("RadBug")]
                    line = line.split("/")

                    # Get the individual values from the newly created list ###
                    time = datetime.utcfromtimestamp(
                        time.time()).strftime('%H:%M:%S')
                    lat = line[0][0:-1]
                    latDeg = float(lat[0:2])
                    latMin = float(lat[2:])
                    lon = line[1][0:line[1].find("W")]
                    lonDeg = float(lon[0:3])
                    lonMin = float(lon[3:])
                    lat = latDeg + (latMin / 60)
                    lon = -lonDeg - (lonMin / 60)
                    alt = float(line[3][2:])
                    aprsSeconds = float(time.split(
                        ':')[0]) * 3600 + float(time.split(':')[1]) * 60 + float(time.split(':')[2])

                    ### Create a new location object ###
                    try:
                        newLocation = BalloonUpdate(
                            time, aprsSeconds, lat, lon, alt, "APRS", self.mainWindow.groundLat, self.mainWindow.groundLon, self.mainWindow.groundAlt)
                    except:
                        print(
                            "Error creating a new balloon location object from APRS Data")

                    try:
                        # Notify the main GUI of the new location
                        self.aprsNewLocation.emit(newLocation)
                    except Exception, e:
                        print(str(e))
            except:
                print("Error retrieving APRS Data")

        ### Clean Up ###
        try:
            aprsSer.close()         # Close the APRS Serial Port
        except:
            print("Error closing APRS serial port")

        self.aprsInterrupt = False
コード例 #9
0
ファイル: scraper_3701_3800.py プロジェクト: yzjing/ao3
def get_bookmarks_time_subpages(url, opener=opener):
    #A work's bookmarks can take up multiple pages. In this case, all timestamp information is add to the first page.
    req = urllib2.Request(url)
    page = bs(opener.open(req))
    dt = re.findall('<p class="datetime">(.*?)</p>', str(page))
    times = []
    month_dict = {'Jan':'01', 'Feb':'02','Mar':'03', 'Apr':'04', 'May':'05', 'Jun':'06', 'Jul':'07', 'Aug':'08', 'Sep':'09', 'Oct':'10', 'Nov':'11', 'Dec':'12'}
    for time in dt:
        times.append(time.split()[2] + '-' + month_dict.get(time.split()[1]))
    return times[1:]
コード例 #10
0
ファイル: 20160313_crawler.py プロジェクト: yzjing/ao3
def get_bookmarks_time_subpages(url, opener=opener):
    req = urllib2.Request(url)
    page = bs(opener.open(req))
    dt = re.findall('<p class="datetime">(.*?)</p>', str(page))
    times = []
    month_dict = {'Jan':'01', 'Feb':'02','Mar':'03', 'Apr':'04', 'May':'05', 'Jun':'06', 'Jul':'07', 'Aug':'08', 'Sep':'09', 'Oct':'10', 'Nov':'11', 'Dec':'12'}
    for time in dt:
        times.append(time.split()[2] + '-' + month_dict.get(time.split()[1]))
    times = times[1:]
    return times
コード例 #11
0
ファイル: plugin.py プロジェクト: TomTelos/YWeather
	def time_convert(self, time):
		print "[YWeather] Time convert"
		tmp_time = ''
		if time.endswith('pm'):
			tmp_time = '%s:%s' % (int(time.split()[0].split(':')[0]) + 12, time.split()[0].split(':')[-1])
		else:
			tmp_time = time.replace('am', '').strip()
		if len(tmp_time) is 4:
			return '0%s' % tmp_time
		else:
			return tmp_time
コード例 #12
0
ファイル: direct_athletics.py プロジェクト: malcolmjmr/track
def get_timedelta(time):
    minutes = 0
    if len(time.split(':')) > 1:
        minutes, seconds = [float(t) for t in time.split(':')]
    else:
        try:
            seconds = float(time)
        except ValueError:
            seconds = 1000 * 1000
        
    return datetime.timedelta(minutes=minutes, seconds=seconds)
コード例 #13
0
def convertTime(time):
    timeInSeconds = 0
    if time.find(":")>0:
        min,sec = time.split(":")
    elif time.find("m")>0:
         min,sec = time.split("m")
         sec = sec.replace("s","")
    else:
        min = 0
        sec = 0
    min = int(min)
    sec = int(sec)       
    return (min*60)+sec
コード例 #14
0
def timeshifter(time):

    #06/02/2014 02:30 -> 2014-06-02T02:30

    try:
        origdate, origtime = time.split(' ')[0], time.split(' ')[1]

        date = origdate.split('/')[2] + '-' + origdate.split('/')[0] + '-' + origdate.split('/')[1]
        time = origtime + ':00'

        return date + 'T' + time
    except Exception,e :
        return None
コード例 #15
0
ファイル: DDishEPG.py プロジェクト: dafoyiming/DDishEPG
 def generatebeginTimecode(begin_time_list):
     beginTimecode_list=list()
     for i in range(0, len(begin_time_list)):
         datetime = begin_time_list[i].split(' ')[0]
         time = begin_time_list[i].split(' ')[1]
         year = int(datetime.split('-')[0])
         month = int(datetime.split('-')[1])
         day = int(datetime.split('-')[2])
         hour = int(time.split(':')[0])
         minute = int(time.split(':')[1]) 
         second ='00'
         beginTimecode = str(year)+str('%02d'%month)+str('%02d'%day)+str('%02d'%hour)+str('%02d'%minute)+second
         beginTimecode_list.append(beginTimecode)
     return beginTimecode_list
コード例 #16
0
def save_channels_rambler(date_invert, ADDRESS):
    source = 1  #rambler
    source_obj = TvForecastSources.objects.get(id = 1)
    g.go(ADDRESS)
    res = g.response.body
    soup = BeautifulSoup(str(res))
    for channel in soup.findAll("table", {"class":"grid-element"}):
        
        must_del = True #для каждого нового канала
        
        channel_my_id =  rambler_to_mine_channel[channel.find('td', {"class" : "plain-event-logo"}).text]
        channel_obj = TvChannels.objects.get(id = channel_my_id)
        
        for param in channel.findAll("div", {"class":"plain-event"}):
            #if channel_obj.id ==7 or channel_obj.id ==8 or channel_obj.id ==9: #временно только для первого канала
                #print parmam.text
                try:
                    name = param.find('a').text  #название передачи
                    time =  param.find('span').text  #время показа
                    time_splitted = time.split(":")
                except AttributeError:
                    name= param.contents[1].nextSibling  # если не передача а сообщение все равно название передачи
                    time= param.find('span').text  #время показа если не передача а сообщение
                    time_splitted = time.split(":")
                #здесь можем сохранить объект в базу
    
                tv_forecast_obj = TvForecast(source=source_obj,
                                             channel = channel_obj,
                                             #date = '2015-01-12',  #из андреса
                                             date = date_invert,  #из андреса
                                             time_hour = time_splitted[0],
                                             time_minute= time_splitted[1],
                                             name = name,
                                             date_get = datetime.datetime.now().strftime("%Y-%m-%d") ,
                                             
                                             )
                #удалить записи по данному каналу на данную дату
                if tv_forecast_obj:
                   
                    if must_del ==True:
                        tv_forecast_obj_must_del = TvForecast.objects.filter(source = source_obj, date=date_invert, channel = channel_obj)
                        #print channel_obj.id
                        if tv_forecast_obj_must_del.count() != 0:
                            for tmd in  tv_forecast_obj_must_del:
                                #print tmd.id
                                tmd.delete()
                        must_del = False
                        #tv_forecast_obj_must_del.delete()
                tv_forecast_obj.save()
コード例 #17
0
ファイル: race_predictor.py プロジェクト: 41734785/thinkstats
def ConvertTimeToMinutes(time):
    """Converts time in HH:MM:SS format to minutes."""
    t = time.split('.')
    if len(t) == 2:
        time, fraction = time.split('.')
    
    t = [int(x) for x in time.split(':')]
    if len(t) == 2:
        h = 0
        m, s = t
    else:
        h, m, s = t

    mins = h * 60 + m + s / 60.0
    return mins
コード例 #18
0
ファイル: mpdwrapper.py プロジェクト: rompolompo/mpdlcd
 def total(self):
     logger.debug(u'Fetching MPD total time')
     time = self.status.get('time')
     if time:
         return self._parse_time(time.split(':')[-1])
     else:
         return None
コード例 #19
0
def get_quote_info(ctx, pline, userdata):
    args = pline.trailing.split()
    args.pop(0)
    if args:
        try:
            quote_id = int(args.pop(0))
        except ValueError:
            ctx.command('/say Invalid argument.')
            return
    else:
        ctx.command('/say You must provide a quote id.')
        return

    init_quotes_db()
    with closing(sqlite3.connect(quote_db_filename)) as quote_db:
        with closing(quote_db.cursor()) as cursor:
            cursor.execute('''SELECT "id", "server", "channel", "date" FROM "quotes" WHERE "id"=?''', (quote_id,))
            row = cursor.fetchone()

    if row is None:
        ctx.command('/say That quote does not exist.')
        return

    quote_id, quote_server, quote_channel, quote_date = row
    date, time = quote_date.split()
    year, month, day = map(int, date.split('-'))
    hour, minute, second = map(int, time.split(':'))
    dt_utc_naive = datetime(year, month, day, hour, minute, second)
    dt = pytz.timezone('Europe/Berlin').fromutc(dt_utc_naive)
    ctx.command(u'/say \x02#{} infos:\x02 {} \x02@\x02 {} \x02at\x02 {}'.format(
        quote_id, quote_channel, quote_server, dt.strftime(u'%Y-%m-%d %H:%M')
    ).encode('utf-8'))
コード例 #20
0
ファイル: clock.py プロジェクト: Konzertheld/quodlibet
 def is_valid_time(time):
     try:
         hour, minute = map(int, time.split(":"))
     except:
         return False
     else:
         return (hour < 24 and minute < 60)
コード例 #21
0
def time_to_seconds(time):
    """
    Takes a string in the form of "%M:%S" and returns the total seconds.
    Can also handle cases where only ":%S" is provided.
    """
    m,s = [int(x) if x != '' else 0 for x in time.split(':')]
    return 60*m + s
コード例 #22
0
ファイル: mpdwrapper.py プロジェクト: rompolompo/mpdlcd
 def elapsed(self):
     logger.debug(u'Fetching MPD elapsed time')
     time = self.status.get('time')
     if time:
         return self._parse_time(time.split(':')[0])
     else:
         return None
コード例 #23
0
ファイル: Arduino2DB.py プロジェクト: DrinkingGypsie/GrowCase
def parseString(message):
   s = message.split("Date: ")
   s = s[1].split(" Time: ")
   date = s[0]
   s = s[1].split(" L")
   time = s[0]
   s = s[1].split(" ")
   light = s[0]
   hum = s[2].split("%")[0]
   temp = (s[1].split("C"))[0]
   moist = (s[3].split("% M"))[0].split("M")[1].split("\r")[0]
   p = date.split(".")
   date = p[2]+"-"
   if len(p[1]) < 2:
      date+="0"
   date+= p[1]+"-"
   if len(p[0]) < 2:
      date+="0"
   date+= p[0]+" "
   p = time.split(":")
   if len(p[0]) < 2:
      date+="0"
   date+= p[0]+":"
   if len(p[1]) < 2:
      date+="0"
   date+= p[1]+":"
   if len(p[2]) < 2:
      date+="0"
   date+= p[2]
   return [date, temp, hum, moist, light]
コード例 #24
0
ファイル: condition.py プロジェクト: whouweling/yelena
    def __init__(self, time, when):

        self.when = when

        (hours, minutes) = time.split(":")

        self.minute = int(hours) * 60 + int(minutes)
コード例 #25
0
def parse_datetime(date):
    """Parse ISO format date/time string into Datetime object.
    
        >>> from datetime import datetime
        >>> print parse_datetime('2007-10-05')
        None
        >>> parse_datetime('2007-10-05 08:12:43') == datetime(2007, 10, 5, 8, 12, 43)
        True
        >>> parse_datetime('2007-10-06') == datetime(2007, 10, 5)
        False
        >>> print parse_datetime('invalid')
        None
    """
    try:
        date, time = date.split(' ')
        year, month, day = date.split('-')[:3]
        year = int(year)
        month = int(month)
        day = int(day)
        hours, minutes, seconds = time.split(':')[:3]
        hours = int(hours)
        minutes = int(minutes)
        seconds = int(seconds)
        return datetime(year, month, day, hours, minutes, seconds)
    except:
        return None
コード例 #26
0
ファイル: validators.py プロジェクト: josephw/feedvalidator
  def validate(self):
    if not self.iso8601_re.match(self.value):
      self.log(self.message({"parent":self.parent.name, "element":self.name, "value":self.value}))
      return

    work=self.value.split('T')

    date=work[0].split('-')
    year=int(date[0])
    if len(date)>1:
      month=int(date[1])
      try:
        if len(date)>2: datetime.date(year,month,int(date[2]))
      except ValueError as e:
        return self.log(self.message({"parent":self.parent.name, "element":self.name, "value":str(e)}))

    if len(work) > 1:
      time=work[1].split('Z')[0].split('+')[0].split('-')[0]
      time=time.split(':')
      if int(time[0])>23:
        self.log(self.message({"parent":self.parent.name, "element":self.name, "value":self.value}))
        return
      if len(time)>1 and int(time[1])>60:
        self.log(self.message({"parent":self.parent.name, "element":self.name, "value":self.value}))
        return
      if len(time)>2 and float(time[2])>60.0:
        self.log(self.message({"parent":self.parent.name, "element":self.name, "value":self.value}))
        return

    self.log(ValidW3CDTFDate({"parent":self.parent.name, "element":self.name, "value":self.value}))
    return 1
コード例 #27
0
ファイル: server.py プロジェクト: robertoyubero/ptavi-p4
 def get_expires(self, mensaje):
     """
     Extraigo el tiempo de expiracion del mensaje
     """
     time = mensaje.split("Expires: ")[1]
     time = time.split("\r")[0]
     return(int(time))
コード例 #28
0
ファイル: fxs.py プロジェクト: cctbx/cctbx-playground
  def store_index_h5(self, time, index,flag = 1) :
      """Store information about:
         * Time-stamp
         * Total intensity
         * Beam center
         * Estimated Particle Size
         * Estimated particle nr

      """

      self.tot_t[index]         = time.split('_')[1]

      self.tot_int[index]       = float(self.img.sum())
      self.tot_peak1_int[index] = self.peak1
      self.tot_peak2_int[index] = self.peak2
      self.tot_streak_m[index]  = self.streak_m
      self.tot_streak_s[index]  = self.streak_s

      self.tot_cx[index]        = self.cent[0]
      self.tot_cy[index]        = self.cent[1]
      self.tot_size[index]      = self.radius
      self.tot_score[index]     = self.score

      if flag :
         self.ave                  += self.img
コード例 #29
0
    def convert_time(self, time):
        """
        Helper function to convert the displaytime strings to
        an actual integer minute represenation.
        Examples: 'Nu' => 0, '8 min.'' => 8, 
                  '12:22' (at the time of 12:18) => 4
                  '9' => 9
        """
        if 'min' in time:
            time = time.replace('min', '').replace('.', '').strip()
        elif 'Nu' in time:
            time = 0
        elif ':' in time:
            now = self.get_now()
            # floor below minute
            now = datetime.datetime(year=now.year, month=now.month, day=now.day,
                                    hour=now.hour, minute=now.minute, second=0,
                                    microsecond=0)

            hour, minute = time.split(':')
            dtime = datetime.datetime(year=now.year, month=now.month, day=now.day,
                                      hour=int(hour), minute=int(minute), second=0,
                                      microsecond=0)

            # 00.00 wraparound?
            if dtime < now:
                dtime = dtime + datetime.timedelta(days=1)
            time = round((dtime - now).total_seconds() / 60.0)
        return int(time)
コード例 #30
0
    def as_dict(self):
        self.photo = None
        if self.picture:
            self.photo = "/img?img_id=%s" % self.key()
        self.created = self.created + datetime.timedelta(hours = 8)
        self.lastModified = self.lastModified + datetime.timedelta(hours = 8)
        date, time = self.lastSeen.split(" ")
        year, month, day = map(int, date.split("-"))
        hour, minute, second = map(int, time.split(":"))
        self.lastSeenTime = datetime.datetime(year, month, day, hour, minute, second)

        d = {'name': self.name,
             'age': self.age,
             'last_seen': self.lastSeenAt,
             'last_seen_date': self.lastSeenTime.strftime("%d %b %Y"),
             'last_seen_time': self.lastSeenTime.strftime("%I:%M %p"),
             'contact_details': self.contactDetails,
             'user': self.user,
             'found': self.found,
             'posted_date': self.created.strftime("%b %d, %Y"),
             'posted_time': self.created.strftime("%I:%M %p")
             }
        if self.photo:
            d['picture'] = self.photo
        if self.additionalDetails:
            d['additional_details'] = self.additionalDetails
        if not (self.created.strftime("%I:%M %p") ==
                self.lastModified.strftime("%I:%M %p") and
                (self.created.strftime("%b %d, %Y") ==
                 self.lastModified.strftime("%b %d, %Y"))):
             d['last_modified_date'] = self.lastModified.strftime("%b %d, %Y")
             d['last_modified_time'] = self.lastModified.strftime("%I:%M %p")
        
        return d
コード例 #31
0
def time_in_seconds(time):
    t1 = time.split()
    t2 = t1[3].split(':')
    return int(t2[0])* 3600 + int(t2[1])*60 + int(t2[2])
コード例 #32
0
ファイル: abres.py プロジェクト: dlotnyk/abtranstion
 def _gettime(self, date, time):
     '''get time in datetime from strings date and time'''
     mm, dd, yy = date.split('/')
     hh, mins, ss = time.split(':')
     return mm, dd, yy, hh, mins, ss
コード例 #33
0
ファイル: util.py プロジェクト: jzfengziyan/zju-memory
    def _get_jwbinfosys_course(self, sess, semester_num=4):
        base = 2018
        teacher2num = {}
        semester2num = {}
        teacher2course = {}
        res = None

        res = self._get(
            sess=sess,
            url='http://jwbinfosys.zju.edu.cn/xskbcx.aspx?xh={}'.format(
                self._stuid))
        self._get_jwbinfosys_util(res, teacher2num, semester2num,
                                  teacher2course, '2018-2019 春夏')
        viewstate = re.search('name="__VIEWSTATE" value=".*?"',
                              res.text).group(0)[26:-1]
        data = {
            '__VIEWSTATE': viewstate,
            'xnd': '2018-2019',
            'xqd': '1|秋、冬'.encode('GBK')
        }
        res = self._post(
            sess=sess,
            url='http://jwbinfosys.zju.edu.cn/xskbcx.aspx?xh={}'.format(
                self._stuid),
            data=data)
        self._get_jwbinfosys_util(res, teacher2num, semester2num,
                                  teacher2course, '2018-2019 秋冬')

        for i in range(1, self._semester_num):
            viewstate = re.search('name="__VIEWSTATE" value=".*?"',
                                  res.text).group(0)[26:-1]
            data = {
                '__VIEWSTATE': viewstate,
                'xnd': '{}-{}'.format(base - i, base - i + 1),
            }
            res = self._post(
                sess=sess,
                url='http://jwbinfosys.zju.edu.cn/xskbcx.aspx?xh={}'.format(
                    self._stuid),
                data=data)
            self._get_jwbinfosys_util(
                res, teacher2num, semester2num, teacher2course,
                '{}-{} 春夏'.format(base - i, base - i + 1))

            viewstate = re.search('name="__VIEWSTATE" value=".*?"',
                                  res.text).group(0)[26:-1]
            data = {
                '__VIEWSTATE': viewstate,
                'xnd': '{}-{}'.format(base - i, base - i + 1),
                'xqd': '1|秋、冬'.encode('GBK')
            }
            res = self._post(
                sess=sess,
                url='http://jwbinfosys.zju.edu.cn/xskbcx.aspx?xh={}'.format(
                    self._stuid),
                data=data)
            self._get_jwbinfosys_util(
                res, teacher2num, semester2num, teacher2course,
                '{}-{} 秋冬'.format(base - i, base - i + 1))

        soup = bs4.BeautifulSoup(res.text, 'html.parser')
        year = soup.find(id='xnd').find(attrs={'selected': "selected"}).text
        table = soup.find(id='xsgrid')
        trs = table.findAll('tr')[1:]
        first_semester_course = []
        for tr in trs:
            tds = tr.findAll('td')
            course_name, teacher, semester, time, place = tds[1].text, tds[
                2].text, tds[3].text, tds[4].text, tds[5].text
            if semester.find('秋') != -1:
                places = place.split('<br/>')
                times = time.split('<br/>')
                time, place = None, None
                if len(times) >= 2:
                    time = sorted(times)[0]
                    place = places[times.index(time)]
                else:
                    place = places[0]
                    time = times[0]
                first_semester_course.append(
                    (course_name, teacher, place, time))

        first_semester_course = sorted(first_semester_course,
                                       key=lambda d: d[-1])
        first_course = None
        if first_semester_course[0][0] == '军训':
            first_course = {
                'name': first_semester_course[1][0],
                'teacher': first_semester_course[1][1],
                'place': first_semester_course[1][2]
            }
        else:
            first_course = {
                'name': first_semester_course[0][0],
                'teacher': first_semester_course[0][1],
                'place': first_semester_course[0][2]
            }
        return teacher2num, teacher2course, semester2num, first_course
コード例 #34
0
def call_api(base_url, cookies):
    # ---- make REST API Call section  -------

    # Generate URL used for REST API call to get 5minute temperature data
    # curly braces {} in string indicates a "replacement field"
    sensor_url = base_url + 'mo/{}/sys/ch/supslot-1/sup/sensor-3/CDeqptTemp5min.json'

    # Get all leaf information
    leaf_url = base_url + '/class/fabricNode.json?query-target-filter=and(eq(fabricNode.role,"leaf"))'
    leafs = requests.get(leaf_url, cookies=cookies,
                         verify=False).json()['imdata']
    leaf_dns = []

    # for each of the objects returned by the API we will extract the dn
    for leaf in leafs:
        # this will return something like -
        # "topology/pod-1/node-101" - replaces curly braces {} in sensor_url
        dn = leaf['fabricNode']['attributes']['dn']
        leaf_dns.append(dn)

    # now we will run a query for the sensor of each node by it's dn
    for dn in leaf_dns:
        resp = requests.get(sensor_url.format(dn),
                            cookies=cookies,
                            verify=False).json()['imdata'][0]
        temp = resp['eqptTemp5min']['attributes']['currentMax']
        ts = resp['eqptTemp5min']['attributes']['repIntvEnd']

        # clean up dn - only save node name
        dn = dn.replace('topology/pod-1/', '')

        # temperature returned as string -
        # make it an int and assign condition
        int_temp = int(temp)

        if int_temp >= 60:  # temp is greater than 60 degrees C
            condition = "high"
        elif int_temp >= 48:  # temp between 48 and 60 degrees C
            condition = "elevated"
        else:  # temp less than 48 degrees C
            condition = "normal"

        # take day/time returned and place into two separate variables
        date = ts.split('T')[0]  # date occurs before 'T' in  ts string
        time = ts.split('T')[1]  # time occurs after 'T' in  ts string
        time = time.split('.')[
            0]  # remove microseconds from time occurring after period

        obj = {
            "dn": dn,
            "attributes": {
                "temp": temp,
                "timestamp": time,
                "date": date,
                "condition": condition,
                "type": 'leaf'
            }
        }

        # send object to RESTAPI function
        upload = send2_RESTAPI(obj)

        if upload:
            print "device successfully uploaded to api (L)"
        else:
            print "error uploading device "

    # Get all spine information
    spine_url = base_url + '/class/fabricNode.json?query-target-filter=and(eq(fabricNode.role,"spine"))'
    spines = requests.get(spine_url, cookies=cookies,
                          verify=False).json()['imdata']
    print spines
    spine_dns = []
    print spine_dns

    # for each of the objects returned by the API we will extract the dn
    for spine in spines:
        # this will return something like -
        # "topology/pod-1/node-101" - replaces curly braces {} in sensor_url
        dn = spine['fabricNode']['attributes']['dn']
        spine_dns.append(dn)

        # now we will run a query for the sensor of each node by it's dn
    for dn in spine_dns:
        resp = requests.get(sensor_url.format(dn),
                            cookies=cookies,
                            verify=False).json()['imdata'][0]
        temp = resp['eqptTemp5min']['attributes']['currentMax']
        ts = resp['eqptTemp5min']['attributes']['repIntvEnd']

        # clean up dn - only save node name
        dn = dn.replace('topology/pod-1/', '')

        # temperature returned as string -
        # make it an int and assign condition
        int_temp = int(temp)

        if int_temp >= 60:  # temp is greater than 60 degrees C
            condition = "high"
        elif int_temp >= 48:  # temp between 48 and 60 degrees C
            condition = "elevated"
        else:  # temp less than 48 degrees C
            condition = "normal"

        # take day/time returned and place into two separate variables
        date = ts.split('T')[0]  # date occurs before 'T' in  ts string
        time = ts.split('T')[1]  # time occurs after 'T' in  ts string
        time = time.split('.')[
            0]  # remove microseconds from time occuring after period

        obj = {
            "dn": dn,
            "attributes": {
                "temp": temp,
                "timestamp": time,
                "date": date,
                "condition": condition,
                "type": 'spine'
            }
        }

        # send object to RESTAPI function
        upload = send2_RESTAPI(obj)

        if upload:
            print "device successfully uploaded to api (S)"
        else:
            print "error uploading device "
コード例 #35
0
ファイル: downsample.py プロジェクト: eschanet/QMonit
def run():

    config = ConfigParser.ConfigParser()
    config.read("config.cfg")

    password = config.get("credentials", "password")
    username = config.get("credentials", "username")
    database = config.get("credentials", "database")

    logger.info("Constructing InfluxDB queries.")

    if args.average == "1h":
        retention = "10m"
        delta = "2h"
        time_units = 6
    elif args.average == "1d":
        retention = "1h"
        delta = "2d"
        time_units = 24
    else:
        return 0

    client = InfluxDBClient(
        "dbod-eschanet.cern.ch", 8080, username, password, "monit_jobs", True, False
    )
    rs_distinct_sets = client.query(
        """select * from "{}"."jobs" where "prod_source" != '' group by panda_queue, prod_source, resource, job_status limit 1""".format(
            retention
        )
    )

    rs_result = client.query(
        """select * from "{}"."jobs" where time > now() - {} and "prod_source" != '' group by panda_queue, prod_source, resource, job_status """.format(
            retention, delta
        )
    )
    raw_dict = rs_result.raw
    series = raw_dict["series"]

    logger.info("Got data from InfluxDB.")
    logger.info("Averaging now.")

    # uploader = InfluxDBClient('dbod-eschanet.cern.ch', 8080, username, password, "test", True, False)

    points_list = []
    for rs in rs_distinct_sets.keys():
        rs = rs[1]  # rs is a tuple
        logger.debug(rs)

        filtered_points = [
            p
            for p in series
            if p["tags"]["panda_queue"] == rs["panda_queue"]
            and p["tags"]["resource"] == rs["resource"]
            and p["tags"]["prod_source"] == rs["prod_source"]
            and p["tags"]["job_status"] == rs["job_status"]
        ]

        if len(filtered_points) == 0:
            logger.debug("Got no points for this set of keys.")
            continue

        filtered_points = filtered_points[0]

        values = filtered_points["values"]
        tags = filtered_points["tags"]
        columns = filtered_points["columns"]

        # reverse in place, want to have latest points first
        values.reverse()

        # get me the last (most recent) point, because this is the one I want to overwrite.
        latest_value = values[0]

        # get averaged values
        if tags["job_status"] in ["failed", "finished", "cancelled", "closed"]:
            averaged_jobs = get_sum(time_units, values, columns.index("jobs"))
        else:
            averaged_jobs = get_average(time_units, values, columns.index("jobs"))
        # averaged_jobs = get_average(time_units, values, columns.index('jobs'))
        averaged_cpu = get_average(time_units, values, columns.index("resource_factor"))
        averaged_corepower = get_average(time_units, values, columns.index("corepower"))
        averaged_HS06_benchmark = get_average(
            time_units, values, columns.index("HS06_benchmark")
        )
        averaged_HS06_pledge = get_average(
            time_units, values, columns.index("federation_HS06_pledge")
        )

        # construct rest of the data dict
        data = dict(zip(columns, latest_value))

        time = data["time"].replace("T", " ").replace("Z", "")

        if args.average == "1h":
            hash = time.split(".")[-1].ljust(9, "0")
        else:
            # got no hashes in 1h aggregate data yet
            m = hashlib.md5()
            m.update(
                str(tags["panda_queue"])
                + str(tags["prod_source"])
                + str(tags["resource"])
                + str(tags["job_status"])
            )
            hash = str(int(m.hexdigest(), 16))[0:9]

        time = unix + int(hash)

        data.update(tags)
        data.pop("time", None)
        data.pop("jobs", None)
        data.pop("resource_factor", None)
        data.pop("corepower", None)
        data.pop("HS06_benchmark", None)
        data.pop("federation_HS06_pledge", None)

        json_body = {
            "measurement": "jobs",
            "tags": data,
            "time": time,
            "fields": {
                "jobs": averaged_jobs,
                "resource_factor": averaged_cpu,
                "corepower": averaged_corepower,
                "HS06_benchmark": averaged_HS06_benchmark,
                "federation_HS06_pledge": averaged_HS06_pledge,
            },
        }

        # sometimes I f**k up and then I want to kill the last measurement...
        if args.kill_last:
            for key, value in json_body["fields"].iteritems():
                json_body["fields"][key] = 0.0

        logger.debug(json_body)
        points_list.append(json_body)

    client.write_points(
        points=points_list, time_precision="n", retention_policy=args.average
    )
コード例 #36
0
WebDriverWait(driver, 5)

name = 'Vitm'
count = 1
input('Enter anything after scanning QR code')

user = driver.find_element_by_xpath('//span[@title = "{}"]'.format(name))

with open(r'PATH\Train.txt', 'r') as h:
    sub = h.readlines()

re_pattern = r'[0-9]{2}:[0-9]{2}:[0-9]{2},[0-9]{3} -->'
regex = re.compile(re_pattern)
# Get start times
start_times = list(filter(regex.search, sub))
start_times = [time.split(' ')[0] for time in start_times]
# Get lines
lines = [[]]
for sentence in sub:
    if re.match(re_pattern, sentence):
        lines[-1].pop()
        lines.append([])
    else:
        lines[-1].append(sentence)
lines = lines[1:]

# print(lines)
# df = pd.DataFrame(lines)
df = pd.DataFrame()
for i in lines:
    print(i)
コード例 #37
0
 def get_time(self, match):
     time = match.select_one(".table__time-text")
     time = time.text
     return int(time.split(':')[0])  # mm:ss -> mm
コード例 #38
0
    def onMessage(self, Connection, Data):
        Status = int(Data["Status"])
        if (Status == 200):
            strData = Data["Data"].decode("utf-8", "ignore")
            response = json.loads(strData)
            Domoticz.Debug("JSON REPLY: " + str(response))
            if (self.melcloud_state == "LOGIN"):
                if (response["ErrorId"] == None):
                    Domoticz.Log("MELCloud login successfull")
                    self.melcloud_key = response["LoginData"]["ContextKey"]
                    self.melcloud_units_init()
                elif (response["ErrorId"] == 1):
                    Domoticz.Log(
                        "MELCloud login fail: check login and password")
                    self.melcloud_state = "LOGIN_FAILED"
                else:
                    Domoticz.Log("MELCloud failed with unknown error " +
                                 str(response["ErrorId"]))
                    self.melcloud_state = "LOGIN_FAILED"

            elif (self.melcloud_state == "UNITS_INIT"):
                idoffset = 0
                Domoticz.Log("Find " + str(len(response)) + " buildings")
                for building in response:
                    Domoticz.Log("Find " +
                                 str(len(building["Structure"]["Areas"])) +
                                 " areas in building " + building["Name"])
                    Domoticz.Log("Find " +
                                 str(len(building["Structure"]["Floors"])) +
                                 " floors in building " + building["Name"])
                    Domoticz.Log("Find " +
                                 str(len(building["Structure"]["Devices"])) +
                                 " devices  in building " + building["Name"])
                    #Search in devices
                    for device in building["Structure"]["Devices"]:
                        self.melcloud_add_unit(device, idoffset)
                        idoffset += len(self.list_switchs)
                    #Search in areas
                    for area in building["Structure"]["Areas"]:
                        for device in area["Devices"]:
                            self.melcloud_add_unit(device, idoffset)
                            idoffset += len(self.list_switchs)
                    #Search in floors
                    for floor in building["Structure"]["Floors"]:
                        for device in floor["Devices"]:
                            self.melcloud_add_unit(device, idoffset)
                            idoffset += len(self.list_switchs)
                        for area in floor["Areas"]:
                            for device in area["Devices"]:
                                self.melcloud_add_unit(device, idoffset)
                                idoffset += len(self.list_switchs)
                self.melcloud_create_units()
            elif (self.melcloud_state == "UNIT_INFO"):
                for unit in self.list_units:
                    if (unit['id'] == response['DeviceID']):
                        Domoticz.Log("Update unit {0} information.".format(
                            unit['name']))
                        unit['power'] = response['Power']
                        unit['op_mode'] = response['OperationMode']
                        unit['room_temp'] = response['RoomTemperature']
                        unit['set_temp'] = response['SetTemperature']
                        unit['set_fan'] = response['SetFanSpeed']
                        unit['vaneH'] = response['VaneHorizontal']
                        unit['vaneV'] = response['VaneVertical']
                        unit['next_comm'] = False
                        Domoticz.Debug("Heartbeat unit info: " + str(unit))
                        self.domoticz_sync_switchs(unit)
            elif (self.melcloud_state == "SET"):
                for unit in self.list_units:
                    if (unit['id'] == response['DeviceID']):
                        date, time = response['NextCommunication'].split("T")
                        hours, minutes, sec = time.split(":")
                        sign = Parameters["Mode1"][0]
                        value = Parameters["Mode1"][1:]
                        Domoticz.Debug("TIME OFFSSET :" + sign + value)
                        if (sign == "-"):
                            hours = int(hours) - int(value)
                            if (hours < 0):
                                hours = hours + 24
                        else:
                            hours = int(hours) + int(value)
                            if (hours > 24):
                                hours = hours - 24
                        next_comm = date + " " + str(
                            hours) + ":" + minutes + ":" + sec
                        unit[
                            'next_comm'] = "Update for last command at " + next_comm
                        Domoticz.Log("Next update for command: " + next_comm)
                        self.domoticz_sync_switchs(unit)
            else:
                Domoticz.Log("State not implemented:" + self.melcloud_state)
        else:
            Domoticz.Log("MELCloud receive unknonw message with error code " +
                         Data["Status"])
コード例 #39
0
def Hourly_stats_trends(dataArray, inputField):
    """
    Create hourly stats and trends for device data across days
    """

    #get length of retrieved data
    dataLength = len(dataArray)

    #initalize hourlyData
    hourlyData = []

    #loop through data for the device
    for x in range(0, dataLength):
        #get deviceID, timeStamp and field name
        timeStamp = str(dataArray[x]["timeStamp"])
        deviceId = str(dataArray[x]["deviceID"])
        field = dataArray[x][inputField]

        #split time to get hour
        timeStampSplit = timeStamp.split("T")
        day = timeStampSplit[0]
        time = timeStampSplit[1]
        timeSplit = time.split(":")
        hour = timeSplit[0]

        #get length of hourlyData
        hourlyDataLength = len(hourlyData)
        hourlyDataExists = False

        #loop through hourlyData and check if entry exists
        for j in range(0, hourlyDataLength):

            if (deviceId == hourlyData[j]["deviceID"]) and (day == hourlyData[j]["date"]) and (hour == hourlyData[j]["hour"]):
                #if exists, then update hourly fields accordingly
                hourlyData[j]["sumField"] += field
                hourlyData[j]["countEntries"] += 1

                if field > hourlyData[j]["maxField"]:
                    hourlyData[j]["maxField"] = field
                if field < hourlyData[j]["minField"]:
                    hourlyData[j]["minField"] = field

                hourlyData[j]["avgField"] = hourlyData[j]["sumField"] / hourlyData[j]["countEntries"]
                hourlyDataExists = True

        #if entry for hourly data does not exist, then create new one
        if hourlyDataExists == False:
            plotTimeStamp = day + "T" + hour + ":30:00.000Z"
            jsonData = {"deviceID": deviceId, "date": day, "hour": hour, "plotTimeStamp": plotTimeStamp, "maxField": field, "minField": field, "avgField": field, "sumField": field, "countEntries": 1, "maxSlopeLastHour": None, "minSlopeLastHour": None, "avgSlopeLastHour": None, "field": field}
            hourlyData.append(jsonData)

    #sort hourlyData per timeStamp
    sortedHourlyData = sorted(hourlyData, key=lambda k: k['plotTimeStamp'])
    sortedHourlyDataLength = len(sortedHourlyData)

    for k in range(1, sortedHourlyDataLength):
        sortedHourlyData[k]["maxSlopeLastHour"] = sortedHourlyData[k]["maxField"] - sortedHourlyData[k-1]["maxField"]
        sortedHourlyData[k]["minSlopeLastHour"] = sortedHourlyData[k]["minField"] - sortedHourlyData[k-1]["minField"]
        sortedHourlyData[k]["avgSlopeLastHour"] = sortedHourlyData[k]["avgField"] - sortedHourlyData[k-1]["avgField"]

    #return sortedHourlyData
    return sortedHourlyData
コード例 #40
0
ファイル: HABLoader.py プロジェクト: justkeating/stoqs
    def process_csv_file(self, fh):
        '''
        Iterate through lines of iterator to csv file and pull out data for loading into STOQS
        '''
        ds = {}
        DA = BaseType()
        DA.attributes = {'units': 'ng ml-1 ' , 
                         'long_name': 'Domoic Acid', 
                         'standard_name': 'domoic_acid',
                         'type': 'float', 
                         'description': 'Domoic acid' ,
                         'origin': 'www.sccoos.org' }
        PD = BaseType()
        PD.attributes = {'units': 'cells l-1', 
                         'long_name': 'Pseudo-nitzschia delicatissima group', 
                         'standard_name': 'pseudo_nitzschia_delicatissima', 
                         'name':  'pseudo_nitzschia_delicatissima' ,
                         'type':  'float' ,
                         'description': 'Pseudo-nitzschia delicatissima group (cells/L)' ,
                         'origin': 'www.sccoos.org' 
                         } 
        PA = BaseType()
        PA.attributes = {'units': 'cells l-1', 
                         'long_name': 'Pseudo-nitzschia seriata group', 
                         'standard_name': 'pseudo_nitzschia_seriata', 
                         'name':  'pseudo_nitzschia_seriata' ,
                         'type':  'float' ,
                         'description': 'Pseudo-nitzschia seriata group (cells/L)' ,
                         'origin': 'www.sccoos.org' 
                         }
        alexandrium = BaseType()
        alexandrium.attributes = {'units': 'cells l-1', 
                         'long_name': 'Alexandrium', 
                         'standard_name': 'alexandrium', 
                         'name':  'alexandrium' ,
                         'type':  'float' ,
                         'description': 'Alexandrium spp. (cells/L)' ,
                         'origin': 'www.sccoos.org' 
                         }
        phosphate = BaseType()
        phosphate.attributes = {'units': 'm-3 mol l-1', 
                         'long_name': 'Phosphate', 
                         'standard_name': 'phosphate_dissolved_in_seawater', 
                         'name':  'Phosphate' ,
                         'type':  'float' ,
                         'description': 'Phosphate (uM)' ,
                         'origin': 'www.sccoos.org' 
                         }
        ammonia = BaseType()
        ammonia.attributes = {'units': 'm-3 mol l-1', 
                         'long_name': 'Ammonia', 
                         'standard_name': 'ammonia_dissolved_in_seawater', 
                         'name':  'ammonia_dissolved_in_sewater' ,
                         'type':  'float' ,
                         'description': 'Ammonia (uM)' ,
                         'origin': 'www.sccoos.org' 
                         }
        silicate = BaseType()
        silicate.attributes = {'units': 'm-3 mol l-1', 
                         'long_name': 'Silicate', 
                         'standard_name': 'silicate_dissolved_in_seawater', 
                         'name':  'silicate_dissolved_in_seawater' ,
                         'type':  'float' ,
                         'description': 'Silicate (uM)' ,
                         'origin': 'www.sccoos.org' 
                         }
        chlorophyll = BaseType()
        chlorophyll.attributes = {'units': 'kg m-3', 
                         'long_name': 'Chlorophyll', 
                         'standard_name': 'mass_concentration_of_chlorophyll_in_sea_water', 
                         'name':  'mass_concentration_of_chlorophyll_in_sea_water' ,
                         'type':  'float' ,
                         'description': 'Chlorophyll (kg/m3)' ,
                         'origin': 'www.sccoos.org' 
                         }

        prorocentrum = BaseType()
        prorocentrum.attributes = {'units': 'cells l-1', 
                         'long_name': 'Prorocentrum', 
                         'standard_name': 'mass_concentration_of_prorocentrum_in_sea_water', 
                         'name':  'mass_concentration_of_prorocentrum_in_sea_water' ,
                         'type':  'float' ,
                         'description': 'Prorocentrum spp. (cells/L)' ,
                         'origin': 'www.sccoos.org' 
                         }

        self.ds = { 'Domoic Acid (ng/mL)': DA, 'Pseudo-nitzschia seriata group (cells/L)': PA,
                    'Pseudo-nitzschia delicatissima group (cells/L)': PD,
                    'Phosphate (uM)': phosphate,
                    'Silicate (uM)': silicate, 'Ammonia (uM)': ammonia,
                    'Chlorophyll (mg/m3)': chlorophyll, 'Chlorophyll 1 (mg/m3)': chlorophyll,
                    'Chlorophyll 2 (mg/m3)': chlorophyll ,
                    'Alexandrium spp. (cells/L)': alexandrium 
                    }
                    
   
        self.include_names = ['Pseudo-nitzschia seriata group (cells/L)',
                              'Pseudo-nitzschia delicatissima group (cells/L)',
                              'Domoic Acid (ng/mL)',
                              'Chlorophyll (mg/m3)', 'Chlorophyll 1 (mg/m3)', 'Chlorophyll 2 (mg/m3)',
                              'Prorocentrum spp. (cells/L)', 'Silicate (uM)', 'Ammonia (uM)',
                              'Nitrate (uM)', 'Phosphate (uM)', 
                              'Alexandrium spp. (cells/L)']

        self.initDB()

        for pn in self.include_names:
            self.parmCount[pn] = 0

        reader = csv.reader(fh)
        for line in fh:
            # Skip all lines that don't begin with '"' nor ' ' then open that with csv.DictReader
            if not line.startswith('"') and not line.startswith(' '):
                titles = reader.next()
                reader = csv.DictReader(fh, titles)
                for r in reader:
                    year = int(r['year'])
                    month = int(r['month'])
                    day = int(r['day'])
                    time = r['time']
                    lat = float(r['latitude'])
                    lon = float(r['longitude'])
                    depth = float(r['depth (m)'])
                    location = r['location']
                    hours = int(time.split(':')[0])
                    mins = int(time.split(':')[1])
                    secs = int(time.split(':')[2])

                    parmNameValues = []
                    for name in self.ds.keys():                  
                        if name.startswith('Chlorophyll'):
                            parmNameValues.append((name, 1e-5*float(r[name])))
                        else:
                           parmNameValues.append((name, float(r[name])))

                    # Check to make sure all data from this file are from the same location.
                    # The program could be modified to read data in one file from multiple locations by reading data into a hash keyed by location name 
                    # and then stepping through each key of the hash saving the data for each location into it's own activity.  For now just require
                    # each data file to have data from just one location.
                    try: 
                        if lat != lastlat or lon != lastlon:
                            logger.error("lat and lon are not the same for location = %s and lastlocation = %s.  The input data should have just one location." % (location, lastlocation))
                            sys.exit(-1)
                    except NameError, e:
                        # Expected first time through when lastlon & lastlat don't yet exist
                        pass

                    # Load data 
                    dt = datetime(year, month, day, hours, mins, secs)    
                    self.load_measurement(lon, lat, depth, dt, parmNameValues)

                    # Load sample
                    bName = dt.isoformat()
                    self.load_sample(lon, lat, depth, dt, bName)

                    lastlat = lat
                    lastlon = lon
                    lastlocation = location
コード例 #41
0
def srt_time_to_seconds(time):
    split_time = time.split(',')
    major, minor = (split_time[0].split(':'), split_time[1])
    return int(major[0]) * 3600 + int(major[1]) * 60 + int(major[2]) + float(minor) / 1000
コード例 #42
0
def convTimezone(time):
    hour = int(time.split('T')[1].split(':')[0])
    new_hr = (hour + 5)%24
    return (f'{new_hr:02}')
コード例 #43
0
def open_table(url):
    xml = ""
    z1 = m1
    table = url.split("/")[-3]
    key = url.split("/")[-2]
    tag = url.split("/")[-1]
    at = Airtable(key, table, api_key='keyikW1exArRfNAWj')
    match = at.search('category', tag ,view='Grid view') 
    for field in match:
        try:
            res = field['fields']   
            name = res['name']
            name = remove_non_ascii(name)
            thumbnail = res['thumbnail']
            fanart = res['fanart']
            link1 = res['link1']
            link2 = res['link2']
            link3 = res['link3']
            link4 = res['link4']
            link5 = res['link5']
            link6 = res['link6']
            time = res['Time']
            if time == "-":
                time = ""
                dsp = name    
            else:
                if "Final Score" in time:
                    time2 = time
                    dec = ""
                else:    
                    time2 = time.split("@")[-1]
                    dec = time.split("@")[0]    
                (display_time) = convDateUtil(time2, 'default', 'US/Eastern')
                dsp = ("[B][COLORdodgerblue]%s  %s[/COLOR][/B]" % (dec,display_time)) + "    " + name
            if link2 == "-":
                xml +=  "<item>"\
                        "<title>%s</title>"\
                        "<thumbnail>%s</thumbnail>"\
                        "<fanart>%s</fanart>"\
                        "<link>"\
                        "<sublink>%s</sublink>"\
                        "</link>"\
                        "</item>" % (dsp,thumbnail,fanart,link1)                                          
            elif link3 == "-":
                xml +=  "<item>"\
                        "<title>%s</title>"\
                        "<thumbnail>%s</thumbnail>"\
                        "<fanart>%s</fanart>"\
                        "<link>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "</link>"\
                        "</item>" % (dsp,thumbnail,fanart,link1,link2)
            elif link4 == "-":
                xml +=  "<item>"\
                        "<title>%s</title>"\
                        "<thumbnail>%s</thumbnail>"\
                        "<fanart>%s</fanart>"\
                        "<link>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "</link>"\
                        "</item>" % (dsp,thumbnail,fanart,link1,link2,link3)
            elif link5 == "-":
                xml +=  "<item>"\
                        "<title>%s</title>"\
                        "<thumbnail>%s</thumbnail>"\
                        "<fanart>%s</fanart>"\
                        "<link>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "</link>"\
                        "</item>" % (dsp,thumbnail,fanart,link1,link2,link3,link4)
            elif link6 == "-":
                xml +=  "<item>"\
                        "<title>%s</title>"\
                        "<thumbnail>%s</thumbnail>"\
                        "<fanart>%s</fanart>"\
                        "<link>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "</link>"\
                        "</item>" % (dsp,thumbnail,fanart,link1,link2,link3,link4,link5)
            else:                
                xml +=  "<item>"\
                        "<title>%s</title>"\
                        "<thumbnail>%s</thumbnail>"\
                        "<fanart>%s</fanart>"\
                        "<link>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "<sublink>%s</sublink>"\
                        "</link>"\
                        "</item>" % (dsp,thumbnail,fanart,link1,link2,link3,link4,link5,link6) 
        except:
            pass                                                                     
    jenlist = JenList(xml)
    display_list(jenlist.get_list(), jenlist.get_content_type())
コード例 #44
0
req = urllib.request.Request(url)
req.add_header("Authorization", "Basic %s" % base64string)
req.add_header("Referer", "{}://{}/userRpm/SystemLogRpm.htm".format(protocol,ip))
f = urllib.request.urlopen(req)
content = f.read()
decoded_content = content.decode('utf-8', errors="ignore")

time_diff = 0
now_time = datetime.now()
current_year = str(now_time.year)

print('"Timestamp","Event Created","Module","Severity","Message"')
for line in decoded_content.splitlines():
    line = line.strip()
    if len(line) == 0:
        continue
    if line.startswith("#"):
        if "Time = " in line:
            time = line.split("=")
            time = time[1].strip()
            time = time.split(" ")
            dt = datetime.strptime(time[0] + " " + time[2],  "%Y-%m-%d %H:%M:%S")
            current_year = time[0].split("-")[0]
            time_diff = (dt - now_time) // timedelta(seconds=1)
        continue
    columns = list(map(lambda x: x.strip(), line.split("\t")))
    dt = datetime.strptime(columns[0] + " " + current_year,  "%b %d %H:%M:%S %Y")
    dt = dt + timedelta(seconds=time_diff)
    integer_timestamp = (dt - epoch) // timedelta(seconds=1)
    print('"' + str(integer_timestamp) + '","' + columns[0] + '","' + columns[1] + '","' + columns[2] +  '","' + columns[3] + '"')
コード例 #45
0
def generate_list_dismax_part_and_filter_time_from_info(info):
    # info is a dict with keys of obj, loc, period, time, timeofday
    # Time will be in filter --> No score
    field_clip = 'description_clip'
    field_des = 'description'
    delta_hour = 0.5

    # For obj and loc --> query as normal
    obj_sentence = ', '.join(info['obj'])
    loc_sentence = ', '.join(info['loc'])

    queries_part = []
    basic_obj_part = create_json_query_string_part(query=obj_sentence,
                                                   field=field_clip,
                                                   boost=3)
    loc_as_obj_part = create_json_query_string_part(query=loc_sentence,
                                                    field=field_clip,
                                                    boost=3)
    loc_as_obj_part_des = create_json_query_string_part(query=loc_sentence,
                                                        field=field_des,
                                                        boost=5)

    queries_part = queries_part + [
        basic_obj_part
    ] if basic_obj_part is not None else queries_part
    queries_part = queries_part + [
        loc_as_obj_part
    ] if loc_as_obj_part is not None else queries_part
    queries_part = queries_part + [
        loc_as_obj_part_des
    ] if loc_as_obj_part_des is not None else queries_part

    having_location, location_query = create_location_query(info['loc'],
                                                            field="address",
                                                            boost=10)
    queries_part = queries_part + [location_query
                                   ] if having_location else queries_part

    # Expand object, location here

    # For time and timeofday --> filter is better
    filters_part = []
    if len(info['time']) > 0:
        for time in info['time']:
            if time in [
                    'monday', 'tuesday', 'wednesday', 'thursday', 'friday',
                    'saturday', 'sunday'
            ]:
                time_json = [{"match": {"weekday": time}}]
            else:
                time_convert = convert_text_to_date(time)
                if len(time_convert
                       ) == 10:  # full day, and month, year (be added)
                    day = int(time_convert[0:2])
                    month = int(time_convert[3:5])
                else:  # only month
                    day = -1
                    month = int(time_convert)
                time_json = [{"term": {"month": month}}]
                if day >= 0:
                    time_json += [{"term": {"day": day}}]
            filters_part += time_json

    if len(info['timeofday']) > 0:
        time_json = []
        for time in info['timeofday']:
            if ';' in time:
                temp = time.split('; ')
                oclock = parse(temp[1])
                oclock = oclock.hour
                if temp[0] in ['after']:
                    time_json += [{
                        "range": {
                            "hour": {
                                "gte": oclock - delta_hour
                            }
                        }
                    }]
                elif temp[0] in ['to', 'til', 'before']:
                    time_json += [{
                        "range": {
                            "hour": {
                                "lte": oclock + delta_hour
                            }
                        }
                    }]
                elif temp[0] in ['at', 'around']:
                    time_json += [{
                        "range": {
                            "hour": {
                                "gte": oclock - delta_hour,
                                "lte": oclock + delta_hour
                            }
                        }
                    }]
            else:
                oclock = parse(time)
                oclock = oclock.hour
                time_json += [{"range": {"hour": {"gte": oclock - 1}}}]
        filters_part += time_json

    return queries_part, filters_part
コード例 #46
0
ファイル: match_scraper.py プロジェクト: jliaz/msi-2021
 def __init__(self, type, time, team):
     self.type = type
     self.time = time
     self.time_in_sec = int(time.split(":")[0]) * 60 + int(
         time.split(":")[1])
     self.team = team
コード例 #47
0
def main():

    import matplotlib.pyplot as plt
    global nodes
    global cores

    # Parse command line arguments
    args = parse_args()
    global nrun
    nrun = args.nrun
    global attribute
    attribute = 'Time'
    TUNER_NAME = args.optimization

    (machine, processor, nodes, cores) = GetMachineConfiguration()
    print("machine: " + machine + " processor: " + processor + " num_nodes: " +
          str(nodes) + " num_cores: " + str(cores))
    os.environ['MACHINE_NAME'] = machine
    os.environ['TUNER_NAME'] = TUNER_NAME

    t = Categoricalnorm(["Time"], transform="onehot", name="t")
    #t = Real(0., 10., transform="normalize", name="t")
    k = Integer(5, 105, transform="normalize", name="k")
    l = Real(0., 1., transform="normalize", name="l")
    o = Real(float('-Inf'), float('Inf'), name="o")

    IS = Space([t])
    PS = Space([k, l])
    OS = Space([o])

    constraints = {}

    problem = TuningProblem(IS, PS, OS, objectives, constraints, None)

    computer = Computer(nodes=nodes, cores=cores, hosts=None)

    options = Options()
    options['model_restarts'] = 1
    options['distributed_memory_parallelism'] = False
    options['shared_memory_parallelism'] = False
    options['objective_evaluation_parallelism'] = False
    options['objective_multisample_threads'] = 1
    options['objective_multisample_processes'] = 1
    options['objective_nprocmax'] = 1
    options['model_processes'] = 1
    options['model_class'] = 'Model_LCM'
    #options['model_class'] = 'Model_GPy_LCM'
    options['verbose'] = False
    options.validate(computer=computer)

    giventask = [[attribute]]

    global X_train
    global Y_train
    global X_test
    global Y_test

    X_train = []
    Y_train = []
    X_test = []
    Y_test = []

    if not os.path.exists("gptune-search-lcm.db"):
        os.system("mkdir -p gptune-search-lcm.db")
    with open(
            "gptune-search-lcm.db/household." + str(nrun) + "." + attribute +
            ".log", "w") as f_out:
        f_out.write(
            "NKnots,Lambda,RegressionTime,InTestTime,InMSE,InR2,InAR2,OutTestTime,OutMSE,OutR2,OutAR2\n"
        )

    with open("household/household_power_consumption.txt", "r") as f_in:
        f_in.readline()
        datalines = f_in.readlines()

        traindata_arr = datalines[0:1000000]
        testdata_arr = datalines[1000000:1100000]

        wrong_data_cnt = 0

        import time
        from datetime import datetime

        start_dt = datetime(2006, 12, 16, 17, 24, 00)
        start_time_val = int(round(start_dt.timestamp()))

        for traindata in traindata_arr:
            data = traindata.split(";")
            date = data[0]
            time = data[1]
            date_split = date.split("/")
            time_split = time.split(":")

            try:
                dt = datetime(int(date_split[2]), int(date_split[1]),
                              int(date_split[0]), int(time_split[0]),
                              int(time_split[1]), int(time_split[2]))
                time_val = (int(round(dt.timestamp())) - start_time_val) / 60
                sub_metering_3 = float(data[-1])
                X_train.append(time_val)
                Y_train.append(sub_metering_3 + 0.00001)
            except:
                wrong_data_cnt += 1
        print("wrong_data_cnt (train): ", wrong_data_cnt)

        wrong_data_cnt = 0

        for testdata in testdata_arr:
            data = traindata.split(";")
            date = data[0]
            time = data[1]
            date_split = date.split("/")
            time_split = time.split(":")

            try:
                dt = datetime(int(date_split[2]), int(date_split[1]),
                              int(date_split[0]), int(time_split[0]),
                              int(time_split[1]), int(time_split[2]))
                time_val = (int(round(dt.timestamp())) - start_time_val) / 60
                sub_metering_3 = float(data[-1])
                X_test.append(time_val)
                Y_test.append(sub_metering_3 + 0.00001)
            except:
                wrong_data_cnt += 1
        print("wrong_data_cnt (test): ", wrong_data_cnt)

    NI = len(giventask)
    NS = nrun

    TUNER_NAME = os.environ['TUNER_NAME']

    if (TUNER_NAME == 'GPTune'):
        data = Data(problem)
        gt = GPTune(problem,
                    computer=computer,
                    data=data,
                    options=options,
                    driverabspath=os.path.abspath(__file__))
        (data, modeler, stats) = gt.MLA(NS=NS,
                                        Igiven=giventask,
                                        NI=NI,
                                        NS1=int(NS / 2))

        print("stats: ", stats)
        """ Print all input and parameter samples """
        for tid in range(NI):
            print("tid: %d" % (tid))
            print("    t:%s " % (data.I[tid][0]))
            print("    Ps ", data.P[tid])
            print("    Os ", data.O[tid].tolist())
            print('    Popt ', data.P[tid][np.argmin(data.O[tid])], 'Oopt ',
                  min(data.O[tid])[0], 'nth ', np.argmin(data.O[tid]))

    if (TUNER_NAME == 'opentuner'):
        (data, stats) = OpenTuner(T=giventask,
                                  NS=NS,
                                  tp=problem,
                                  computer=computer,
                                  run_id="OpenTuner",
                                  niter=1,
                                  technique=None)
        print("stats: ", stats)
        """ Print all input and parameter samples """
        for tid in range(NI):
            print("tid: %d" % (tid))
            print("    t:%s " % (data.I[tid][0]))
            print("    Ps ", data.P[tid])
            print("    Os ", data.O[tid].tolist())
            print('    Popt ', data.P[tid][np.argmin(data.O[tid])], 'Oopt ',
                  min(data.O[tid])[0], 'nth ', np.argmin(data.O[tid]))

    if (TUNER_NAME == 'hpbandster'):
        (data, stats) = HpBandSter(T=giventask,
                                   NS=NS,
                                   tp=problem,
                                   computer=computer,
                                   run_id="HpBandSter",
                                   niter=1)
        print("stats: ", stats)
        """ Print all input and parameter samples """
        for tid in range(NI):
            print("tid: %d" % (tid))
            print("    t:%s " % (data.I[tid][0]))
            print("    Ps ", data.P[tid])
            print("    Os ", data.O[tid].tolist())
            print('    Popt ', data.P[tid][np.argmin(data.O[tid])], 'Oopt ',
                  min(data.O[tid])[0], 'nth ', np.argmin(data.O[tid]))
history = lstm.fit(x=train_gen,
                   epochs=1000,
                   validation_data=val_gen,
                   verbose=1,
                   shuffle=False,
                   callbacks=[es])

plt.plot(history.history['auc'], label='train')
plt.plot(history.history['val_auc'], label='val')
plt.legend()
plt.show()

time_end = time.time()
time = time_end - time_start
time = str(datetime.timedelta(seconds=time))
time = time.split('.')[0]
print('Time: ', time)

# Testing
subjects = range(1, 13)

t = pd.DataFrame(index=subjects, columns=['AUROC'])
t.index.rename('Subject', inplace=True)

# testing
for subject in subjects:
    print('Testing subject %d' % subject)
    # load testing data
    x_test = load_subject_data(subject, 'data', 7, 8)
    y_test = load_subject_data(subject, 'events', 7, 8)
コード例 #49
0
ファイル: uoftical.py プロジェクト: Zylphrex/UofTiCal
def hour_minute(time):
    hour, minute = time.split(':')
    return int(hour.strip()), int(minute.strip())
コード例 #50
0
def getduration(time):
    (hms, ms) = time.split('.')
    (h, m, s) = hms.split(':')
    totalms = int(ms) + (int(s) * 100) + (int(m) * 100 * 60) + (int(h) * 100 *
                                                                60 * 60)
    return totalms
コード例 #51
0
def get_comment_week(self, _str):

    time = _str[0:10]
    times = time.split('-');
    return (times[0] + '-' + str(datetime.date(int(times[0]), int(times[1]), int(times[2])).isocalendar()[1]).zfill(2))
コード例 #52
0
ファイル: WorkTime.py プロジェクト: zyu911/autoyu
 def set_volume(self, time, present):
     second = time.split(':')[-1]
     if not self.data or (self.data[-1][0] != second
                          and self.data[-1][1] == present):
         self.data.append([second, present])
コード例 #53
0
ファイル: download.py プロジェクト: yyy1993/persepolis
def sigmaTime(time):
    hour, minute = time.split(":")
    return (int(hour)*60 + int(minute))
コード例 #54
0
ファイル: parallel_session.py プロジェクト: alcinos/dps
    def _run(self):
        if self.dry_run:
            print("Dry run, so not running.")
            return

        if "slurm" in self.kind:
            # Have to jump through a hoop to get the proper node-local storage on cedar/graham.
            self.local_scratch_prefix = self.get_slurm_var("SLURM_TMPDIR")
            self.local_scratch = os.path.join(
                self.local_scratch_prefix,
                os.path.basename(self.job_path))

            # Compute new time limits based on the actual time remaining (protect against e.g. job starting late)

            print("Time limits before adjustment:")
            self.print_time_limits()

            job_id = os.getenv("SLURM_JOBID")
            command = 'squeue -h -j {} -o "%L"'.format(job_id)
            returncode, stdout, stderr = self.execute_command(command, frmt=False, robust=False)
            days = 0
            if "-" in stdout:
                days, time = stdout.split("-")
                days = int(days)
            else:
                time = stdout

            time = time.split(":")

            hours = int(time[-3]) if len(time) > 2 else 0
            minutes = int(time[-2]) if len(time) > 1 else 0
            seconds = int(time[-1])

            wall_time_delta = datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)
            wall_time_seconds = int(wall_time_delta.total_seconds())

            print("Actual remaining walltime: {}".format(wall_time_delta))
            print("Time limits after adjustment:")

            (self.wall_time_seconds, self.total_seconds_per_step,
             self.parallel_seconds_per_step, self.python_seconds_per_step) = \
                self.compute_time_limits(
                    wall_time_seconds, self.cleanup_time, self.slack_time, self.step_time_limit, self.n_steps)

        self.print_time_limits()

        with cd(self.job_path):
            print("\n" + ("=" * 80))
            job_start = datetime.datetime.now()
            print("Starting job at {}".format(job_start))

            job = ReadOnlyJob(self.input_zip)
            subjobs_remaining = sorted([op.idx for op in job.ready_incomplete_ops(sort=False)])

            n_failures = defaultdict(int)
            dead_jobs = set()

            i = 0
            while subjobs_remaining:
                step_start = datetime.datetime.now()

                print("\nStarting step {} at: ".format(i) + "=" * 90)
                print("{} ({} since start of job)".format(step_start, step_start - job_start))

                if not self.host_pool:
                    if self.kind == "pbs":
                        with open(os.path.expandvars("$PBS_NODEFILE"), 'r') as f:
                            self.host_pool = list(set([s.strip() for s in iter(f.readline, '')]))
                            print(self.host_pool)
                    elif "slurm" in self.kind:
                        p = subprocess.run(
                            'scontrol show hostnames $SLURM_JOB_NODELIST', stdout=subprocess.PIPE, shell=True)
                        self.host_pool = list(set([host.strip() for host in p.stdout.decode().split('\n') if host]))
                    else:
                        raise Exception("NotImplemented")

                self.hosts, self.n_procs = self.recruit_hosts(
                    self.hpc, self.min_hosts, self.max_hosts, self.host_pool,
                    self.ppn, max_procs=len(subjobs_remaining))

                indices_for_step = subjobs_remaining[:self.n_procs]
                self._step(i, indices_for_step)
                self._checkpoint(i)

                job = ReadOnlyJob(self.archive_root)

                subjobs_remaining = set([op.idx for op in job.ready_incomplete_ops(sort=False)])

                for j in indices_for_step:
                    if j in subjobs_remaining:
                        n_failures[j] += 1
                        if n_failures[j] > self.n_retries:
                            print("All {} attempts at completing job with index {} have failed, "
                                  "permanently removing it from set of eligible jobs.".format(n_failures[j], j))
                            dead_jobs.add(j)

                subjobs_remaining = [idx for idx in subjobs_remaining if idx not in dead_jobs]
                subjobs_remaining = sorted(subjobs_remaining)

                i += 1

                print("Step duration: {}.".format(datetime.datetime.now() - step_start))

            self.execute_command("rm -rf {archive_root}", robust=True)

        print("Cleaning up dirty hosts...")
        command = "rm -rf {local_scratch}"
        for host in self.dirty_hosts:
            print("Cleaning host {}...".format(host))
            self.ssh_execute(command, host, robust=True)
コード例 #55
0
def process_video(video, lineStart, lineEnd, v_or_h, contourLimit, merchantid,
                  db, date, time):

    ret, frame1 = video.read()
    ret, frame2 = video.read()

    peopleID = 0
    custID = 0
    people = []

    st = time.split(":")

    hour = int(st[0])
    minutes = int(st[1])
    s = int(st[2])

    entry = 0
    exited = 0

    while video.isOpened():

        check, _ = video.read()

        if check:

            milli = video.get(cv2.CAP_PROP_POS_MSEC)
            sec = milli / 1000
            # print(round(sec))
            seconds = s + sec
            if seconds == 60:
                seconds = 0
                minutes += 1
            if minutes == 60:
                minutes = 0
                hour += 1
            if hour == 24:
                hour = 0

            currentVideoTime = str(hour) + ":" + str(minutes) + ":" + str(
                round(seconds))

            diff = cv2.absdiff(frame1, frame2)
            gray = cv2.cvtColor(diff, cv2.COLOR_BGR2GRAY)
            blur = cv2.GaussianBlur(gray, (5, 5), 0)
            _, thresh = cv2.threshold(blur, 20, 255, cv2.THRESH_BINARY)
            dilated = cv2.dilate(thresh, None, iterations=3)
            contours, _ = cv2.findContours(dilated, cv2.RETR_TREE,
                                           cv2.CHAIN_APPROX_SIMPLE)

            cv2.line(frame1, (lineStart[0], lineStart[1]),
                     (lineEnd[0], lineEnd[1]), (255, 0, 255), 2)

            for contour in contours:

                (x, y, w, h) = cv2.boundingRect(contour)

                if cv2.contourArea(contour) < contourLimit:
                    continue

                new = True

                for i in people:

                    if abs(x - i.getX()) <= w and abs(y - i.getY()) <= h:

                        new = False
                        i.updateCoords(x, y)

                        if v_or_h == 'v':

                            if i.enteringV(lineEnd[1]) == True:

                                entry += 1
                                custID += 1

                                insertPeopleData('enter', custID, merchantid,
                                                 0.0, date, currentVideoTime,
                                                 db)

                            if i.exitingV(lineStart[1]) == True:

                                exited += 1
                                custID += 1

                                insertPeopleData('exit', custID, merchantid,
                                                 0.0, date, currentVideoTime,
                                                 db)

                        if v_or_h == 'h':

                            if i.enteringH(lineEnd[0]) == True:

                                entry += 1
                                custID

                                insertPeopleData('enter', custID, merchantid,
                                                 0.0, date, currentVideoTime,
                                                 db)

                            if i.exitingH(lineStart[0]) == True:

                                exited += 1
                                custID += 1

                                insertPeopleData('exit', custID, merchantid,
                                                 0.0, date, currentVideoTime,
                                                 db)

                        break

                if new == True:
                    p = Customer(peopleID, x, y)
                    people.append(p)
                    peopleID += 1
            ##################################################
            # used for finding camera specific parameters#
            ##############################################
            # cv2.rectangle(frame1, (x,y), (x + w, y + h), (0, 255, 0), 2)
            #
            # cv2.putText(frame1, "Entered: {}".format(entry), (10, 40), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1)
            # cv2.putText(frame1, "Exited: {}".format(exited), (10, 60), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1)

            #cv2.imshow('vid', frame1)
            ##################################################
            frame1 = frame2
            ret, frame2 = video.read()

        else:
            break

        if cv2.waitKey(5) == ord('x'):
            break

    cv2.destroyAllWindows()
    video.release()
コード例 #56
0
 def get_date(self):
     time = self.bsobj.find_all('span', class_='time')[0].text
     time = time.split('日')[0]
     time = time.replace('年', '-')
     time = time.replace('月', '-')
     return time
コード例 #57
0
ファイル: wilber.py プロジェクト: carinaj/pyrocko
def to_secs(date, time):
    toks = date.split('/')
    toks.extend(time.split(':'))
    toks[-1] = round(float(toks[-1]))
    return calendar.timegm([int(x) for x in toks])
コード例 #58
0
def get_millisecond(time):
    item = time.split(":")
    sum = (int(item[0]) * 3600 + int(item[1]) * 60 +
           int(item[2])) * 1000 + int(item[3])
    return sum
コード例 #59
0
def real_time_get_timeline(user_id):
    allTweets = {}
    single_col = []
    timeline = api.user_timeline(screen_name=user_id, count=100)
    realTweets = []
    for status in timeline:
        json_form = status._json
        source_type = "not known"
        source = json_form["source"].encode('utf8')
        tweet = json_form["text"].encode('utf8')
        tweet_raw = json_form["text"].encode('utf8')
        tweet_arr = tweet.split(" ")
        interacted = "NULL"
        used_tag = "NULL"
        # user_id = json_form["id"].encode('utf8')
        time = json_form["created_at"].encode('utf8')
        time_arr = time.split(" ")
        month = ""
        if time_arr[1] == "Jan":
            month = "01"
        elif time_arr[1] == "Feb":
            month = "02"
        elif time_arr[1] == "Mar":
            month = "03"
        elif time_arr[1] == "Apr":
            month = "04"
        elif time_arr[1] == "May":
            month = "05"
        elif time_arr[1] == "Jun":
            month = "06"
        elif time_arr[1] == "Jul":
            month = "07"
        elif time_arr[1] == "Aug":
            month = "08"
        elif time_arr[1] == "Sep":
            month = "09"
        elif time_arr[1] == "Oct":
            month = "10"
        elif time_arr[1] == "Nov":
            month = "11"
        elif time_arr[1] == "Dec":
            month = "12"
        time = "%s-%s-%s %s" % (time_arr[5], month, time_arr[2], time_arr[3])
        interacted = []
        used_tag = []
        pure_name = ""
        for text in tweet_arr:
            if "://" in text:
                tweet = tweet.replace(text, '')
            if "@" in text:
                pure_name = re.sub("[^A-Za-z]", '', text.strip())
                if pure_name.lower() != user_id.lower():
                    interacted.append(pure_name)
            if "#" in text:
                used_tag.append(re.sub("[^A-Za-z]", '', text.strip()))
        tweet = re.sub("[^A-Za-z]", " ", tweet.strip())
        tweet = ' '.join(tweet.split())
        retweeted = "false"
        if tweet[:3] == "RT ":
            retweeted = "true"
        if "iphone" in source:
            source_type = "Iphone"
        elif "web" in source:
            source_type = "Web"
        elif "ipad" in source:
            source_type = "Ipad"
        elif "media" in source:
            soruce_type = "media studio"
        else:
            source_type = "Possibly android"
        if len(tweet) < 3:
            continue
        if len(interacted) == 0:
            interacted = "NULL"
        if len(used_tag) == 0:
            used_tag = "NULL"
        strCon = {
            "time": time,
            "text_raw": tweet_raw,
            "text": tweet,
            "fav_count": str(status.favorite_count),
            "source_type": source_type,
            "is_retweeted": retweeted,
            "interacted": interacted,
            "hashtag": used_tag
        }
        realTweets.append(strCon)
    return realTweets
コード例 #60
0
def grasp_weather(year, month, day, city):
    url = WEATHER_CITY[city].format(year, month, day)
    html_content = requests.get(url).content
    b = BeautifulSoup(html_content, "html.parser")
    table = b.find("table", {
        "id": "obsTable"
    }).find_all("tr", {"class": "no-metars"})
    item_count = 0
    for i in table:
        a = []
        for j in i.find_all("td"):
            a.append(j.text.replace("\r", "").replace("\n", ""))
        try:
            time = a[0]
            if "AM" in time:
                hour = int(time.split()[0].split(":")[0])
                minute = int(time.split()[0].split(":")[1])
                if hour == 12:
                    time = "%02d:%02d:00" % (0, minute)
                else:
                    time = "%02d:%02d:00" % (hour, minute)
            elif "PM" in time:
                hour = int(time.split()[0].split(":")[0])
                minute = int(time.split()[0].split(":")[1])
                if hour == 12:
                    time = "%02d:%02d:00" % (hour, minute)
                else:
                    time = "%02d:%02d:00" % (hour + 12, minute)
        except:
            time = "-"
        try:
            temp = float(a[1].split()[0])
        except:
            temp = float(-999)
        try:
            dew_point = float(a[2].split()[0])
        except:
            dew_point = float(-999)
        try:
            humidity = float(a[3][:-1]) / 100.0
        except:
            humidity = float(-999)
        try:
            pressure = int(a[4].split()[0])
        except:
            pressure = float(-999)
        try:
            visibility = float(a[5].split()[0])
        except:
            visibility = float(-999)
        try:
            wind_dir = a[6]
        except:
            wind_dir = "-"
        try:
            wind_speed = float(a[7].split()[0])
        except:
            wind_speed = float(-999)
        try:
            conditions = a[-1]
        except:
            conditions = "-"
        weather = Weather(city=city,
                          date=str("%s-%s-%s" % (year, month, day)),
                          time=time,
                          temp=temp,
                          dew_point=dew_point,
                          humidity=humidity,
                          pressure=pressure,
                          visibility=visibility,
                          wind_dir=wind_dir,
                          wind_speed=wind_speed,
                          conditions=conditions)
        weather.save()
        item_count += 1

    return item_count