Пример #1
1
    def settimes(self, path, accessed_time=None, modified_time=None):
        """Set the accessed time and modified time of a file

        :param path: path to a file
        :type path: string
        :param accessed_time: the datetime the file was accessed (defaults to current time)
        :type accessed_time: datetime
        :param modified_time: the datetime the file was modified (defaults to current time)
        :type modified_time: datetime

        """

        sys_path = self.getsyspath(path, allow_none=True)
        if sys_path is not None:
            now = datetime.datetime.now()
            if accessed_time is None:
                accessed_time = now
            if modified_time is None:
                modified_time = now
            accessed_time = int(time.mktime(accessed_time.timetuple()))
            modified_time = int(time.mktime(modified_time.timetuple()))
            os.utime(sys_path, (accessed_time, modified_time))
            return True
        else:
            raise UnsupportedError("settimes")
    def createTestWorkspace(self):
        """ Create a workspace for testing against with ideal log values
        """
        from mantid.simpleapi import CreateWorkspace
        from mantid.simpleapi import AddSampleLog
        from time import gmtime, strftime,mktime
        import numpy as np

        # Create a matrix workspace
        x = np.array([1.,2.,3.,4.])
        y = np.array([1.,2.,3.])
        e = np.sqrt(np.array([1.,2.,3.]))
        wksp = CreateWorkspace(DataX=x, DataY=y,DataE=e,NSpec=1,UnitX='TOF')

        # Add run_start
        tmptime = strftime("%Y-%m-%d %H:%M:%S", gmtime(mktime(gmtime())))
        AddSampleLog(Workspace=wksp,LogName='run_start',LogText=str(tmptime))

        tsp_a=kernel.FloatTimeSeriesProperty("SensorA")
        tsp_b=kernel.FloatTimeSeriesProperty("SensorB")
        tsp_c=kernel.FloatTimeSeriesProperty("SensorC")
        for i in arange(25):
            tmptime = strftime("%Y-%m-%d %H:%M:%S", gmtime(mktime(gmtime())+i))
            tsp_a.addValue(tmptime, 1.0*i*i)
            tsp_b.addValue(tmptime, 2.0*i*i)
            tsp_c.addValue(tmptime, 3.0*i*i)

        wksp.mutableRun()['SensorA']=tsp_a
        wksp.mutableRun()['SensorB']=tsp_b
        wksp.mutableRun()['SensorC']=tsp_c

        return wksp
Пример #3
0
	def processRepeated(self, findRunningEvent = True):
		if self.repeated != 0:
			now = int(time()) + 1

			#to avoid problems with daylight saving, we need to calculate with localtime, in struct_time representation
			localrepeatedbegindate = localtime(self.repeatedbegindate)
			localbegin = localtime(self.begin)
			localend = localtime(self.end)
			localnow = localtime(now)

			day = []
			flags = self.repeated
			for x in (0, 1, 2, 3, 4, 5, 6):
				if flags & 1 == 1:
					day.append(0)
				else:
					day.append(1)
				flags >>= 1

			# if day is NOT in the list of repeated days
			# OR if the day IS in the list of the repeated days, check, if event is currently running... then if findRunningEvent is false, go to the next event
			while ((day[localbegin.tm_wday] != 0) or (mktime(localrepeatedbegindate) > mktime(localbegin))  or
				(day[localbegin.tm_wday] == 0 and (findRunningEvent and localend < localnow) or ((not findRunningEvent) and localbegin < localnow))):
				localbegin = self.addOneDay(localbegin)
				localend = self.addOneDay(localend)

			#we now have a struct_time representation of begin and end in localtime, but we have to calculate back to (gmt) seconds since epoch
			self.begin = int(mktime(localbegin))
			self.end = int(mktime(localend))
			if self.begin == self.end:
				self.end += 1

			self.timeChanged()
Пример #4
0
def generate_sample_datasets (host_ips, metric_ids, year, month, day, hour):
    avro_schema = ''
    #load data from hdfs
    cat = subprocess.Popen(['sudo', '-u', 'hdfs', 'hadoop', 'fs', '-cat', '/user/pnda/PNDA_datasets/datasets/.metadata/schema.avsc'], stdout=subprocess.PIPE)
    for line in cat.stdout:
        avro_schema = avro_schema + line
    schema = avro.schema.parse(avro_schema)
    bytes_writer = io.BytesIO()
    encoder = avro.io.BinaryEncoder(bytes_writer)
    #create hdfs folder structure
    dir = create_hdfs_dirs (year, month, day, hour)
    filename = str(uuid.uuid4()) + '.avro'
    filepath = dir + filename
    tmp_file = '/tmp/' + filename
    
    writer = DataFileWriter(open(tmp_file, "w"), DatumWriter(), schema)
    
    start_dt = datetime.datetime(year, month, day, hour, 0, 0) 
    start_ts = int(time.mktime(start_dt.timetuple()))
    end_dt = start_dt.replace(hour=hour+1)
    end_ts = int(time.mktime(end_dt.timetuple()))

    for ts in xrange(start_ts, end_ts, 1):
        #generate random pnda record on per host ip basis
        for host_ip in host_ips:
           record = {}
           record['timestamp'] = (ts * 1000)
           record['src'] = 'test'
           record['host_ip'] = host_ip
           record['rawdata'] = generate_random_metrics(metric_ids)
           #encode avro
           writer.append(record)
    writer.close()
    subprocess.Popen(['sudo', '-u', 'hdfs', 'hadoop', 'fs', '-copyFromLocal', tmp_file, dir])
    return filepath
Пример #5
0
def validate_data(info):

    hack_license,pick_datetime,drop_datetime,n_passengers,trip_dist,pick_long,\
    pick_lat,drop_long,drop_lat,payment_type,fare_amount,\
    surcharge,tip_amount,mta_tax,tolls_amount,total_amount=info

    time_in_seconds = time.mktime(time.strptime(drop_datetime,'%Y-%m-%d %H:%M:%S'))-\
                      time.mktime(time.strptime(pick_datetime,'%Y-%m-%d %H:%M:%S'))
    try:
        pick_long = float(pick_long.strip())
        pick_lat = float(pick_lat.strip())
        drop_long = float(drop_long.strip())
        drop_lat = float(drop_lat.strip())
        trip_dist = float(trip_dist.strip())
        total_amount = float(total_amount.strip())
        n_passengers = int(n_passengers.strip())
    except ValueError:
        sys.stderr.write('CASTING TO FLOATS FAILED')
        return False
    # Is the straight distance shorter than the reported distance?
    euclidean = validate_euclidean(trip_dist,pick_long,pick_lat,drop_long,drop_lat)
    gps_pickup = validate_gps(pick_long,pick_lat) # Are the GPS coordinates present in Manhattan
    gps_dropoff = validate_gps(drop_long,drop_lat)
    distance = validate_distance(trip_dist,pick_long,pick_lat,drop_long,drop_lat) # Are distances too big
    val_time = validate_time(time_in_seconds) # Are times too long or 0? Are they positive?
    velocity = validate_velocity(time_in_seconds,trip_dist) # Is velocity too out of reach
    amount = validate_amount(total_amount)
    pass_validate = validate_passengers(n_passengers)

    return(euclidean and gps_pickup and gps_dropoff and distance and val_time and velocity and amount and pass_validate)
Пример #6
0
def morsel_to_cookie(morsel):
    """Convert a Morsel object into a Cookie containing the one k/v pair."""
    expiry = morsel["max-age"] or morsel["expires"]
    try:
        # Is the expiry a date string (expires) or number (max-age)?
        float(expiry)
    except ValueError:
        # The expiry is a date string, use strptime
        try:
            expiry = mktime(datetime.strptime(expiry, "%a, %d-%b-%Y %H:%M:%S %Z").timetuple())
        except ValueError:
            # There is no expiry
            expiry = None
    else:
        # The expiry is a max-age, use numbers
        expiry = int(mktime(gmtime()) + float(expiry))
    c = create_cookie(
        name=morsel.key,
        value=morsel.value,
        version=morsel['version'] or 0,
        domain=morsel['domain'],
        path=morsel['path'],
        secure=bool(morsel['secure']),
        expires=expiry,
        discard=False,
        rest={'HttpOnly': morsel['httponly']}
    )
    return c
Пример #7
0
    def loadAccountInfo(self, user, req):
        validuntil = None
        trafficleft = None
        premium = None

        html = req.load("http://uploading.com/")

        premium = re.search(self.PREMIUM_PATTERN, html) is None

        m = re.search(self.VALID_UNTIL_PATTERN, html)
        if m:
            expiredate = m.group(1).strip()
            self.logDebug("Expire date: " + expiredate)

            try:
                validuntil = time.mktime(time.strptime(expiredate, "%b %d, %Y"))

            except Exception, e:
                self.logError(e)

            else:
                if validuntil > time.mktime(time.gmtime()):
                    premium = True
                else:
                    premium = False
                    validuntil = None
Пример #8
0
 def to_dict(self):
     return {
         "DeliveryStreamDescription": {
             "CreateTimestamp": time.mktime(self.create_at.timetuple()),
             "DeliveryStreamARN": self.arn,
             "DeliveryStreamName": self.name,
             "DeliveryStreamStatus": self.status,
             "Destinations": [
                 {
                     "DestinationId": "string",
                     "RedshiftDestinationDescription": {
                         "ClusterJDBCURL": self.redshift_jdbc_url,
                         "CopyCommand": self.redshift_copy_command,
                         "RoleARN": self.redshift_role_arn,
                         "S3DestinationDescription": {
                             "BucketARN": self.redshift_s3_bucket_arn,
                             "BufferingHints": self.redshift_s3_buffering_hings,
                             "CompressionFormat": self.redshift_s3_compression_format,
                             "Prefix": self.redshift_s3_prefix,
                             "RoleARN": self.redshift_s3_role_arn,
                         },
                         "Username": self.redshift_username,
                     },
                 }
             ],
             "HasMoreDestinations": False,
             "LastUpdateTimestamp": time.mktime(self.last_updated.timetuple()),
             "VersionId": "string",
         }
     }
Пример #9
0
def log_query(request, requestOptions, requestContext, renderingTime):
  timeRange = requestOptions['endTime'] - requestOptions['startTime']
  logdata = {
    'graphType': requestOptions['graphType'],
    'graphClass': requestOptions.get('graphClass'),
    'format': requestOptions.get('format'),
    'start': int(mktime(requestOptions['startTime'].utctimetuple())),
    'end': int(mktime(requestOptions['endTime'].utctimetuple())),
    'range': timeRange.days * 24 * 3600 + int(round(timeRange.seconds/60.)),
    'localOnly': requestOptions['localOnly'],
    'useCache': 'noCache' not in requestOptions,
    'cachedResponse': requestContext.get('cachedResponse', False),
    'cachedData': requestContext.get('cachedData', False),
    'maxDataPoints': requestOptions.get('maxDataPoints', 0),
    'renderingTime': renderingTime,
  }

  if 'HTTP_X_REAL_IP' in request.META:
    logdata['source'] = request.META['HTTP_X_REAL_IP']
  else:
    logdata['source'] = request.get_host()

  for target,retrievalTime in requestContext['targets']:
    if isinstance(target, list):
      for t in target:
        logdata['target'] = t
        logdata['retrievalTime'] = retrievalTime
        query_log.info(logdata)
    else:
      logdata['target'] = target
      logdata['retrievalTime'] = retrievalTime
      query_log.info(logdata)
Пример #10
0
    def setup(self):
        self.record = ACROSCORE + strftime('%Y-%m-%d-%H%M') + '.game'
        open(self.record, "w")

        self.active = True

        self.cumulative = {}
        self.start = mktime(localtime())
        self.mark = mktime(localtime())
        self.round = 1

        self.stage = "waiting"

        self.matchlast = False
        self.killgame = False
        self.warned = False
        self.bypass = False
        self.displayed = False

        self.voters = []
        self.players = []
        self.gimps = {}
        self.selfsubbed = False
        self.paused = False
        self.killgame = False
Пример #11
0
    def add_separator(self, timestamp):
        '''Add whitespace and timestamp between chat sessions.'''
        time_with_current_year = \
            (time.localtime(time.time())[0], ) + \
            time.strptime(timestamp, '%b %d %H:%M:%S')[1:]

        timestamp_seconds = time.mktime(time_with_current_year)
        if timestamp_seconds > time.time():
            time_with_previous_year = \
                (time.localtime(time.time())[0] - 1, ) + \
                time.strptime(timestamp, '%b %d %H:%M:%S')[1:]
            timestamp_seconds = time.mktime(time_with_previous_year)

        message = TextBox(self,
                          style.COLOR_BUTTON_GREY, style.COLOR_BUTTON_GREY,
                          style.COLOR_WHITE, style.COLOR_BUTTON_GREY, False,
                          None, timestamp_to_elapsed_string(timestamp_seconds))
        self._message_list.append(message)
        box = Gtk.HBox()
        align = Gtk.Alignment.new(
            xalign=0.5, yalign=0.0, xscale=0.0, yscale=0.0)
        box.pack_start(align, True, True, 0)
        align.show()
        align.add(message)
        message.show()
        self._conversation.attach(box, 0, self._row_counter, 1, 1)
        box.show()
        self._row_counter += 1
        self.add_log_timestamp(timestamp)
        self._last_msg_sender = None
Пример #12
0
def do_offset(tuples_list, filename, format ='%b %d, %Y %H:%M:%S', offset_val=0):
    new_tuples_list = []
    firstval = time.strptime(tuples_list[0][0], format)
    if filename != "slide_timestamps.txt":
        def_time = 'Apr 01, 2000 00:00:00'
    else :
        def_time = 'Apr 01 2000 00:00:00'
    conversion_timer = time.mktime(time.strptime(def_time, format))

    for item in tuples_list:
        t= item[0]
        timer = time.strptime(t, format)  ##3,4,5
        timer = time.mktime(timer) - time.mktime(firstval) + conversion_timer + offset_val
        timer = time.strftime("%H:%M:%S",time.localtime(timer))
        if filename == "spectrum.txt":
            line_list = [timer]
            for i in json.loads(item[1]):
                line_list.append(i)
            #print line_list
            new_tuples_list.append(tuple(line_list))
            
        else:
            line_list = [timer]
            for i in item[1:]:
                line_list.append(i)
            #print line_list
            new_tuples_list.append(tuple(line_list))       
    return new_tuples_list
Пример #13
0
    def _createSearchRequest(self, search=None, tags=None,
                             notebooks=None, date=None,
                             exact_entry=None, content_search=None):

        request = ""
        if notebooks:
            for notebook in tools.strip(notebooks.split(',')):
                if notebook.startswith('-'):
                    request += '-notebook:"%s" ' % tools.strip(notebook[1:])
                else:
                    request += 'notebook:"%s" ' % tools.strip(notebook)

        if tags:
            for tag in tools.strip(tags.split(',')):

                if tag.startswith('-'):
                    request += '-tag:"%s" ' % tag[1:]
                else:
                    request += 'tag:"%s" ' % tag

        if date:
            date = tools.strip(date.split('-'))
            try:
                dateStruct = time.strptime(date[0] + " 00:00:00", "%d.%m.%Y %H:%M:%S")
                request += 'created:%s ' % time.strftime("%Y%m%d", time.localtime(time.mktime(dateStruct)))
                if len(date) == 2:
                    dateStruct = time.strptime(date[1] + " 00:00:00", "%d.%m.%Y %H:%M:%S")
                request += '-created:%s ' % time.strftime("%Y%m%d", time.localtime(time.mktime(dateStruct) + 60 * 60 * 24))
            except ValueError, e:
                out.failureMessage('Incorrect date format in --date attribute. '
                                   'Format: %s' % time.strftime("%d.%m.%Y", time.strptime('19991231', "%Y%m%d")))
                return tools.exitErr()
def _calculate_offset(date, local_tz):
    """
    input :
    date : date type
    local_tz : if true, use system timezone, otherwise return 0

    return the date of UTC offset.
    If date does not have any timezone info, we use local timezone,
    otherwise return 0
    """
    if local_tz:
        # handle year before 1970 most sytem there is no timezone information before 1970.
        if date.year < 1970:
            # Use 1972 because 1970 doesn't have a leap day
            t = time.mktime(date.replace(year=1972).timetuple)
        else:
            t = time.mktime(date.timetuple())

        # handle daylightsaving, if daylightsaving use altzone, otherwise use timezone
        if time.localtime(t).tm_isdst:
            return -time.altzone
        else:
            return -time.timezone
    else:
        return 0
Пример #15
0
def get_feed_entries(feeds):
    entries = []

    for feed in feeds:
        d = feedparser.parse(feed.get('feed_url'))

        for entry in d.entries:
            entry.publisher = feed['title']
            # entry.publisher_icon = feed['icon']

            if 'media_content' in entry:
                if entry.media_content[0]['medium'] == 'image':
                    entry.image = entry.media_content[0]['url']
            elif 'content' in entry:
                soup = BeautifulSoup(entry.content[0]['value'], 'html.parser')
                image = soup.find_all('img')[0]
                entry.image = image.get('src')

            published = datetime.fromtimestamp(mktime(entry.published_parsed))
            updated = datetime.fromtimestamp(mktime(entry.updated_parsed))

            entry.published = published
            entry.updated = updated

            entries.append(entry)

    return sorted(entries, key=attrgetter('published'), reverse=True)
Пример #16
0
 def next_reset(self, t=None):
     """ Determine next reset time """
     t = t or time.time()
     tm = time.localtime(t)
     if self.q_period == 'd':
         nx = (tm[0], tm[1], tm[2], self.q_hour, self.q_minute, 0, 0, 0, tm[8])
         if (tm.tm_hour * 60 + tm.tm_min) >= (self.q_hour * 60 + self.q_minute):
             # If today's moment has passed, it will happen tomorrow
             t = time.mktime(nx) + 24 * 3600
             tm = time.localtime(t)
     elif self.q_period == 'w':
         if self.q_day < tm.tm_wday + 1 or (self.q_day == tm.tm_wday + 1 and (tm.tm_hour * 60 + tm.tm_min) >= (self.q_hour * 60 + self.q_minute)):
             tm = time.localtime(next_week(t))
         dif = abs(self.q_day - tm.tm_wday - 1)
         t = time.mktime(tm) + dif * 24 * 3600
         tm = time.localtime(t)
     elif self.q_period == 'm':
         if self.q_day < tm.tm_mday or (self.q_day == tm.tm_mday and (tm.tm_hour * 60 + tm.tm_min) >= (self.q_hour * 60 + self.q_minute)):
             tm = time.localtime(next_month(t))
         day = min(last_month_day(tm), self.q_day)
         tm = (tm[0], tm[1], day, self.q_hour, self.q_minute, 0, 0, 0, tm[8])
     else:
         return
     tm = (tm[0], tm[1], tm[2], self.q_hour, self.q_minute, 0, 0, 0, tm[8])
     self.q_time = time.mktime(tm)
     logging.debug('Will reset quota at %s', tm)
Пример #17
0
    def _add_event(self, title, date, start, end, all_day, 
                   url=None, description=None):
        if isinstance(title, unicode):
            title = title.encode('utf8')
        values = dict(title=title,
                      all_day=all_day and 'true' or 'false',
                      guid=self.guid,
                      )
        if date:
            values['date'] = mktime(date.timetuple())
        else:
            values['start'] = mktime(start.timetuple())
            values['end'] = mktime(end.timetuple())

        if url is not None:
            values['url'] = url
        if description is not None:
            values['description'] = description
        data = urlencode(values)
        url = self.base_url + '/api/events'
        req = urllib2.Request(url, data)
        response = urllib2.urlopen(req)
        content = response.read()
        event = anyjson.deserialize(content)['event']
        self._massage_event(event)
        return event, response.code == 201
Пример #18
0
 def test_successfully_sets_utime(self):
     now = datetime.datetime.now(tzlocal())
     epoch_now = time.mktime(now.timetuple())
     with temporary_file("w") as f:
         set_file_utime(f.name, epoch_now)
         _, update_time = get_file_stat(f.name)
         self.assertEqual(time.mktime(update_time.timetuple()), epoch_now)
Пример #19
0
 def test_find_by_email(self):
     user = User.find_by_email('*****@*****.**')
     self.assertEqual(None, user.user_id)
     self.assertEqual('*****@*****.**', user.email)
     self.assertEqual('Joe', user.name)
     self.assertEqual('192.168.1.100', user.last_seen_ip)
     self.assertEqual('Mozilla/5.0', user.last_seen_user_agent)
     self.assertEqual(50, user.relationship_score)
     self.assertTrue(isinstance(user.last_impression_at, datetime))
     self.assertEqual(1331834352, 
             time.mktime(user.last_impression_at.timetuple()))
     self.assertTrue(isinstance(user.created_at, datetime))
     self.assertEqual(1331764344, 
             time.mktime(user.created_at.timetuple()))
     self.assertTrue(1, len(user.social_profiles))
     profile = user.social_profiles[0]
     self.assertTrue(isinstance(profile, SocialProfile))
     self.assertEqual('twitter', profile.type)
     self.assertEqual('foo', profile.username)
     self.assertEqual('http://twitter.com/foo', profile.url)
     self.assertEqual('1234567', profile.id)
     self.assertEqual('Santiago', user.location_data['city_name'])
     self.assertEqual('Santiago', user.location_data.city_name)
     self.assertTrue(isinstance(user.location_data, LocationData))
     self.assertEqual('johnny', user.custom_data['nick'])
Пример #20
0
    def get_all_bw_usage(self, instances, start_time, stop_time=None):
        """Return bandwidth usage info for each interface on each
           running VM"""

        # we only care about VMs that correspond to a nova-managed
        # instance:
        imap = dict([(inst.name, inst.uuid) for inst in instances])

        bwusage = []
        start_time = time.mktime(start_time.timetuple())
        if stop_time:
            stop_time = time.mktime(stop_time.timetuple())

        # get a dictionary of instance names.  values are dictionaries
        # of mac addresses with values that are the bw stats:
        # e.g. {'instance-001' : { 12:34:56:78:90:12 : {'bw_in': 0, ....}}
        iusages = self._vmops.get_all_bw_usage(start_time, stop_time)
        for instance_name in iusages:
            if instance_name in imap:
                # yes these are stats for a nova-managed vm
                # correlate the stats with the nova instance uuid:
                iusage = iusages[instance_name]

                for macaddr, usage in iusage.iteritems():
                    bwusage.append(dict(mac_address=macaddr,
                                        uuid=imap[instance_name],
                                        bw_in=usage['bw_in'],
                                        bw_out=usage['bw_out']))
        return bwusage
Пример #21
0
def  quotes_yahoo(ticker, begin, end):
    cookies = dict(B='79bclatd788ib&b=3&s=vt')
    crumb = 'x.eNt0GsePI'
    period1 = int(time.mktime(begin.timetuple()))
    period2 = int(time.mktime(end.timetuple()))
    url = '''https://query1.finance.yahoo.com/v7/finance/download/{0}?period1={1}&period2={2}&interval=1d&events=history&crumb={3}'''
    s = requests.Session()
    r = s.get(url.format(ticker, period1, period2, crumb), cookies=cookies)
    if r.text.startswith('{"chart":{"result":null,"error"'):
        raise IOError(r.text)

    quote = {}
    lines = r.text.split('\n')
    items = [item.lower() for item in lines[0].split(',')]
    for item in items:
        quote[item] = []
    for line in lines[1:-1]:
        i = 0
        for data in line.split(','):
            data = data.replace("'", "")
            try:
                quote[items[i]].append(float(data))
            except:
                quote[items[i]].append(data)
            i+=1
    return quote
Пример #22
0
    def pop_second(self):
        parsed_sec = AbstractReader.pop_second(self)
        if parsed_sec:
            self.pending_second_data_queue.append(parsed_sec)
        else:
            self.log.debug("No new seconds present")   
            
        if not self.pending_second_data_queue:
            self.log.debug("pending_second_data_queue empty")
            return None
        else:
            self.log.debug("pending_second_data_queue: %s", self.pending_second_data_queue)


        next_time = int(time.mktime(self.pending_second_data_queue[0].time.timetuple()))
            
        if self.last_sample_time and (next_time - self.last_sample_time) > 1:
            self.last_sample_time += 1
            self.log.debug("Adding phantom zero sample: %s", self.last_sample_time)
            res = self.get_zero_sample(datetime.datetime.fromtimestamp(self.last_sample_time))
        else:
            res = self.pending_second_data_queue.pop(0)
        
        self.last_sample_time = int(time.mktime(res.time.timetuple()))
        res.overall.planned_requests = self.__get_expected_rps()
        self.log.debug("Pop result: %s", res)
        return res
Пример #23
0
def __validateArgs():
#===============================================================================
  if len(sys.argv) < 5:
    print "python",sys.argv[0], "CIK ALIAS SINCE UNTIL"
    print "where CIK: one platform client key"
    print "    ALIAS: dataport alias"
    print "    SINCE: MM/DD/YYYY"
    print "    UNTIL: MM/DD/YYYY"
    sys.exit(1)
  cik, alias, since, until = sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4]
  if len(cik) != 40:
    print "Invalid cik"
    sys.exit(1)
  since = since + " 00:00:00"
  until = until + " 23:59:59"
  try:
    start = datetime.strptime(since, "%m/%d/%Y %H:%M:%S")
    end   = datetime.strptime(until, "%m/%d/%Y %H:%M:%S")
  except ValueError as err:
    print "Invalid time format."
    sys.exit(1)
  start_timestamp = int(time.mktime(start.timetuple()))
  end_timestamp = int(time.mktime(end.timetuple()))
  if start_timestamp > end_timestamp:
    print "SINCE must not be greater than UNTIL"
    sys.exit(1)
  return cik, alias, start_timestamp, end_timestamp
Пример #24
0
 def ger(self):
     query = self.get_argument('q')
     client = tornado.httpclient.AsyncHTTPClient()
     # yield关键字以及tornado.gen.Task对象的一个实例实现函数的调用和参数的传入
     # yield的使用返回程序对Tornado的控制,允许在HTTP请求进行中执行其他任务。
     # HTTP请求完成时,RequestHandler方法在其停止的地方恢复。
     response = yield tornado.gen.Task(client.fetch,
             "http://search.twitter.com/search.json?" + \
                     urllib.urlencode({"q": query, "result_type": "recent", "rpp": 100}))
     body = json.loads(response.body)
     result_count = len(body['results'])
     now = datetime.datetime.utcnow()
     raw_oldest_tweet_at = body['results'][-1]['created_at']
     oldest_tweet_at = datetime.datetime.strptime(raw_oldest_tweet_at,
             "%a, %d %b %Y %H:%M:%S +0000")
     seconds_diff = time.mktime(now.timetuple()) - \
             time.mktime(oldest_tweet_at.timetuple())
     tweets_per_second = float(result_count) / seconds_diff
     self.write("""
     <div style="text-align: center">
         <div style="font-size: 72px">%s</div>
         <div style="font-size: 144px">%.02f</div>
         <div style="font-size: 24px">tweets per second</div>
     </div>""" % (query, tweets_per_second))
     self.finish()
Пример #25
0
 def findSchedule(self):
     for event in self.new_events:
        is_scheduled = False
        curr_time = self.end_date - event.duration
        while not is_scheduled and not curr_time < self.start_date:
            event.start = curr_time
            event.end = curr_time + event.duration
            is_valid = True
            # check conflicts with current schedule
            for component in self.ical.walk():
                if component.name == 'VEVENT':
                    #try:
                    dc = component.decoded
                    dtstart = time.mktime(time.strptime(str(dc('dtstart')), '%Y-%m-%d %H:%M:%S+00:00'))/60
                    dtend = time.mktime(time.strptime(str(dc('dtend')), '%Y-%m-%d %H:%M:%S+00:00'))/60
                    if curr_time > dtstart and curr_time < dtend or curr_time + event.duration > dtstart and curr_time + event.duration < dtend or curr_time < dtstart and curr_time + event.duration > dtend or curr_time > dtstart and curr_time + event.duration < dtend or curr_time == dtstart or curr_time + event.duration == dtend:
                        is_valid = False
                        break
            if is_valid:
                for constraint in event.constraints:
                    if not constraint.isValid(event, self.ical):
                        is_valid = False
                        break
            if is_valid:
                self.addToCalendar(event)
                is_scheduled = True
            else:
                curr_time -= 30
Пример #26
0
def cont_position(posdata):
	#returns a list with a continious position of the user for every minute, None if unkown position
	cont_pos = []
	for t in range(24*60):
		prev = prevpos(posdata, time.mktime(config.SAMPLE_DAY.timetuple())+60*t)
		next = nextpos(posdata, time.mktime(config.SAMPLE_DAY.timetuple())+60*t)

		closest = None
		if prev != None and next != None:
			if abs(prev[0]-60*t) <= abs(next[0]-60*t): #select the closest position
				closest = prev
			else:
				closest = next
		elif prev != None:
			closest = prev
		elif next != None:
			closest = next
		else:
			closest = None

		if closest == None: #no position found
			cont_pos.append((None, None, 0.0)) #lat, lon, confidence
		elif abs(closest[0]-(time.mktime(config.SAMPLE_DAY.timetuple())+60*t)) < 10*60: #known position
			cont_pos.append((closest[1], closest[2], 1.0)) #lat, lon, confidence
		elif prev != None and next != None and (prev[1:2] == next[1:2]) and abs(prev[0]-next[0]) < 3*60*60: #probable position, if previous and next cell are the same
			cont_pos.append((closest[1], closest[2], 0.2)) #lat, lon, confidence
		else: #position too old
			cont_pos.append((None, None, 0.0)) #lat, lon, confidence
	
	assert(len(cont_pos) == 24*60)
	return cont_pos
Пример #27
0
  def runUntil(self, stopDate=None, **kw):
     """Runs the EventLoop until the given time plus interval have been
     reached or it runs out of things to monitor. This method
     should not be called when the EventLoop is already running.

     The current time is assumed, if no date time is passed in.

     Examples:(note, these aren't real doctests yet)

     Run until a given date, say St. Patty's day
     >> date=datetime.datetime(2007, 03,17, 17,00)
     >> EventLoop.currentEventLoop().runUntil(dateAndTime)

     Additionally you can pass in any keyword argument normally
     taken by daetutilse.relativedelta to derive the date. These
     include:

     years, months, weeks, days, hours, minutes, seconds, microseconds

     These are moste useful when you want to compute the relative
     offset from now. For example to run the EventLoop for 5 seconds
     you could do this.

     >> EventLoop.currentEventLoop().runUntil(seconds=5)

     Or, probably not as practical but still possible, wait one
     year and 3 days

     >> EventLoop.currentEventLoop().runUntil(years=1, days=3)
     

     """

     if self.running:
        raise RuntimeError("EventLoop is already running.")
     else:
        self.running = True

     delta = relativedelta(**kw)
     now = datetime.datetime.now()
     
     if stopDate is None:
        stopDate = now

     stopDate = now + delta

     # convert the time back into seconds since the epoch,
     # subtract now from it, and this will then be the delay we
     # can use

     seconds2Run = time.mktime(stopDate.timetuple()) - time.mktime(now.timetuple())
     self.waitBeforeCalling(seconds2Run, self.stop)
     
     while self._shouldRun(1):
        try:
           self.runOnce()
        except:
           self.log.exception("Caught unexpected error in RunOnce.")
           
     self.reset()
Пример #28
0
    def __init__(
            self, timestr=None, timezone=LOCALTZ,
            allowpast=True, allowfuture=True):
        """ Converts input to UTC timestamp. """

        if timestr is None:
            timestr = time.time()
        self.timezone = timezone

        if type(timestr) == str:
            self.__timestamp__ = self.__fromstring__(timestr)

        elif type(timestr) in [int, float]:
            self.__timestamp__ = timestr + self.timezone

        elif type(timestr) in [
                datetime.datetime,
                datetime.date,
                datetime.time
            ]:
            self.__timestamp__ = time.mktime(timestr.timetuple())

        elif type(timestr) == time.struct_time:
            self.__timestamp__ = time.mktime(timestr) + self.timezone

        else:
            raise TypeError("Invalid type specified.")

        if not allowpast and self.__timestamp__ < currentutc():
            raise DateRangeError("Values from the past are not allowed.")
        if not allowfuture and self.__timestamp__ > currentutc():
            raise DateRangeError("Values from the future are not allowed.")
Пример #29
0
    def get_actions(self,coordinator):
        accumulator=dict()
        accumulator['total']=0
        try:
            url = "http://" + self.host + ":" + str(self.port) + self.api_url['actions_from_coordinator'] % (coordinator,0,0)
            response = requests.get(url, auth=self.html_auth)
            if not response.ok:
                return {}
            total_actions=json.loads(response.content)['total']

            url = "http://" + self.host + ":" + str(self.port) + self.api_url['actions_from_coordinator'] % (coordinator,total_actions-self.query_size,self.query_size)
            response = requests.get(url, auth=self.html_auth)
            if not response.ok:
                return {}

            actions = json.loads(response.content)['actions']

            for action in actions:
                created=time.mktime(self.time_conversion(action['createdTime']))
                modified=time.mktime(self.time_conversion(action['lastModifiedTime']))
                runtime=modified-created
                if accumulator.get(action['status']) is None:
                    accumulator[action['status']]=defaultdict(int)
                accumulator[action['status']]['count']+=1
                accumulator[action['status']]['runtime']+=runtime
                accumulator['total']+=1
        except:
            logging.error('http request error: "%s"' % url)
            return {} 
        return accumulator
Пример #30
0
def create_id_token(user, aud, nonce):
    """
    Receives a user object and aud (audience).
    Then creates the id_token dictionary.
    See: http://openid.net/specs/openid-connect-core-1_0.html#IDToken

    Return a dic.
    """
    sub = settings.get('OIDC_IDTOKEN_SUB_GENERATOR')(user=user)

    expires_in = settings.get('OIDC_IDTOKEN_EXPIRE')

    # Convert datetimes into timestamps.
    now = timezone.now()
    iat_time = int(time.mktime(now.timetuple()))
    exp_time = int(time.mktime((now + timedelta(seconds=expires_in)).timetuple()))
    user_auth_time = user.last_login or user.date_joined
    auth_time = int(time.mktime(user_auth_time.timetuple()))

    dic = {
        'iss': get_issuer(),
        'sub': sub,
        'aud': str(aud),
        'exp': exp_time,
        'iat': iat_time,
        'auth_time': auth_time,
    }

    if nonce:
        dic['nonce'] = str(nonce)

    return dic
Пример #31
0
    def get_data_agents_resports_total_merchart(self,
                                                args=None,
                                                critern_name=None,
                                                critern_parent=None):

        # 查询总的
        res_total = db.session.query(
            OnlinetradesDao.user_name.label('user_name'),
            func.sum(OnlinetradesDao.real_cost_agent).label('cost_agent'),
            func.count(OnlinetradesDao.id).label('agents_number')).group_by(
                OnlinetradesDao.user_name).filter(*args)
        res_total = res_total.subquery()

        res_total_amount = db.session.query(
            OnlinetradesDao.user_name.label('user_name'),
            func.sum(
                OnlinetradesDao.amount).label('cost_agent_amount')).group_by(
                    OnlinetradesDao.user_name).filter(*args).filter(
                        OnlinetradesDao.state == 2)
        res_total_amount = res_total_amount.subquery()

        # 查询今日
        today = datetime.date.today()
        zeroPointToday = int(time.mktime(today.timetuple()))
        endPointToday = zeroPointToday + SECONDS_PER_DAY
        res_day = db.session.query(
            OnlinetradesDao.user_name.label('user_name'),
            func.sum(OnlinetradesDao.amount).label('sum_amount_day')).filter(
                and_(OnlinetradesDao.audit_time >= zeroPointToday,
                     OnlinetradesDao.audit_time <= endPointToday)).group_by(
                         OnlinetradesDao.user_name)
        res_day = res_day.outerjoin(
            MerchantDao, MerchantDao.code == OnlinetradesDao.mer_code)
        res_day = res_day.subquery()

        # 查讯昨日
        endPointToday = zeroPointToday + SECONDS_PER_DAY
        endPointYestoday = zeroPointToday
        # print('昨日%s' % endPointYestoday)
        zeroPointYestoday = endPointYestoday - 60 * 60 * 24
        res_yes = db.session.query(
            OnlinetradesDao.user_name.label('user_name'),
            func.sum(OnlinetradesDao.amount).label('sum_amount_yes')).filter(
                and_(OnlinetradesDao.audit_time >= zeroPointYestoday,
                     OnlinetradesDao.audit_time <= endPointYestoday)).group_by(
                         OnlinetradesDao.user_name)
        res_yes = res_yes.outerjoin(
            MerchantDao, MerchantDao.code == OnlinetradesDao.mer_code)
        res_yes = res_yes.subquery()

        q = db.session.query(
            res_total.c.user_name.label('user_name'),
            res_total.c.cost_agent.label('cost_agent'),
            res_total_amount.c.cost_agent_amount.label('cost_agent_amount'),
            res_total.c.agents_number.label('agents_number'),
            res_day.c.sum_amount_day.label('sum_amount_day'),
            res_yes.c.sum_amount_yes.label('sum_amount_yes'))
        q = q.outerjoin(res_day, res_day.c.user_name == res_total.c.user_name)
        q = q.outerjoin(res_yes, res_yes.c.user_name == res_total.c.user_name)
        q = q.outerjoin(res_total_amount,
                        res_total_amount.c.user_name == res_total.c.user_name)

        q = q.subquery()

        res = db.session.query(
            MerchantDao.parent_name.label('username'),
            func.sum(q.c.cost_agent).label('cost_agent'),
            func.sum(q.c.cost_agent_amount).label('cost_agent_amount'),
            func.sum(q.c.agents_number).label('agents_number'),
            func.sum(q.c.sum_amount_day).label('sum_amount_day'),
            func.sum(q.c.sum_amount_yes).label('sum_amount_yes'),
        ).group_by(MerchantDao.parent_name).filter(*critern_name)
        res = res.outerjoin(q, q.c.user_name == MerchantDao.username)
        res = res.subquery()

        res_res = db.session.query(
            MerchantDao.username.label('username'),
            MerchantDao.amount.label('amount'),
            res.c.cost_agent.label('cost_agent'),
            res.c.cost_agent_amount.label('cost_agent_amount'),
            res.c.agents_number.label('agents_number'),
            res.c.sum_amount_day.label('sum_amount_day'),
            res.c.sum_amount_yes.label('sum_amount_yes'),
        ).filter(*critern_parent)
        res_res = res_res.outerjoin(res,
                                    res.c.username == MerchantDao.username)
        res_res = res_res.all()
        return res_res
Пример #32
0
def set(text, context):
    """
    Allows individual settings to be created
    """
    if len(text) == 0 or text.strip() == '':
        return slack_response(FN_RESPONSE_SET)

    parts, setting = validate_input(text, FN_RESPONSE_SET)

    if setting == 'url':
        if len(parts) != 3:
            return slack_response(ERR_SET_SETTING_3_ARGS + FN_RESPONSE_SET)
        value = {parts[1]: parts[2]}
        val_type = "M"

    if setting == 'urlpattern':
        if len(parts) != 2:
            return slack_response(ERR_SET_SETTING_2_ARGS + FN_RESPONSE_SET)
        value = parts[1]
        if ('{environment}' not in value and '{urlseparator}' not in value
                and '{basedomain}' not in value):
            return slack_response(ERR_SET_URLPATTERN_SPECIFIC_VARS)
        val_type = "S"

    if setting == 'basedomain':
        if len(parts) != 2:
            return slack_response(ERR_SET_SETTING_2_ARGS + FN_RESPONSE_SET)
        value = parts[1]
        val_type = "S"

    if setting == 'urlseparator':
        if len(parts) != 2:
            return slack_response(ERR_SET_SETTING_2_ARGS + FN_RESPONSE_SET)
        value = parts[1]
        val_type = "S"

    setting = 'setting_{}'.format(setting)

    table = dynamodb.Table(os.environ['DYNAMODB_TABLE_PROJECT'])
    entries = table.scan()
    for entry in entries['Items']:
        if entry['slack_channelid'] == context['channel_id']:
            entry['updatedAt'] = int(time.mktime(datetime.now().timetuple()))
            if setting in entry and val_type == "M":
                # only override the key we've changed
                value = entry[setting]
                value[parts[1]] = parts[2]

            table.update_item(
                Key={
                    'repository': entry['repository'],
                    'slack_channelid': entry['slack_channelid']
                },
                UpdateExpression='SET {} = :v'.format(setting),
                ExpressionAttributeValues={
                    ':v': value,  # surely {val_type: value} for maps ?
                },
                ReturnValues="ALL_NEW")
            return slack_response(FN_RESPONSE_SET_CONFIRM %
                                  (setting[8:], " ".join(parts[1::])))

    return slack_response(ERR_SET)
Пример #33
0
    def import_item(self, item, out_folder=None):
        """Takes an item from the feed and creates a post file."""
        if out_folder is None:
            out_folder = 'posts'

        # link is something like http://foo.com/2012/09/01/hello-world/
        # So, take the path, utils.slugify it, and that's our slug
        link = item.link
        link_path = urlparse(link).path

        title = item.title

        # blogger supports empty titles, which Nikola doesn't
        if not title:
            LOGGER.warn("Empty title in post with URL {0}. Using NO_TITLE "
                        "as placeholder, please fix.".format(link))
            title = "NO_TITLE"

        if link_path.lower().endswith('.html'):
            link_path = link_path[:-5]

        slug = utils.slugify(link_path)

        if not slug:  # should never happen
            LOGGER.error("Error converting post:", title)
            return

        description = ''
        post_date = datetime.datetime.fromtimestamp(
            time.mktime(item.published_parsed))

        for candidate in item.content:
            if candidate.type == 'text/html':
                content = candidate.value
                break
                #  FIXME: handle attachments

        tags = []
        for tag in item.tags:
            if tag.scheme == 'http://www.blogger.com/atom/ns#':
                tags.append(tag.term)

        if item.get('app_draft'):
            tags.append('draft')
            is_draft = True
        else:
            is_draft = False

        self.url_map[link] = self.context['SITE_URL'] + '/' + \
            out_folder + '/' + slug + '.html'

        if is_draft and self.exclude_drafts:
            LOGGER.notice('Draft "{0}" will not be imported.'.format(title))
        elif content.strip():
            # If no content is found, no files are written.
            content = self.transform_content(content)

            self.write_metadata(
                os.path.join(self.output_folder, out_folder, slug + '.meta'),
                title, slug, post_date, description, tags)
            self.write_content(
                os.path.join(self.output_folder, out_folder, slug + '.html'),
                content)
        else:
            LOGGER.warn('Not going to import "{0}" because it seems to contain'
                        ' no content.'.format(title))
Пример #34
0
def getCurrentEpochTime() -> int:
    return int(time.mktime(datetime.datetime.now().timetuple()))
Пример #35
0
def get_week_start_timestamp(time_check, time_split=0):
    dt = datetime.datetime.fromtimestamp(time_check).replace(hour=0, minute=0, second=0, microsecond=0)
    monday = dt - datetime.timedelta(days=dt.weekday())
    return int(time.mktime((monday.year, monday.month, monday.day, 0, 0, 0, 0, 0, 0))) + time_split
Пример #36
0
def parse_date_to_float(date):
    try:
        return time.mktime(email.utils.parsedate(date))
    except TypeError:
        return 0.0
Пример #37
0
    def history(self, period="1mo", interval="1d",
                start=None, end=None, prepost=False, actions=True,
                auto_adjust=True, back_adjust=False,
                proxy=None, rounding=True, tz=None, **kwargs):
        """
        :Parameters:
            period : str
                Valid periods: 1d,5d,1mo,3mo,6mo,1y,2y,5y,10y,ytd,max
                Either Use period parameter or use start and end
            interval : str
                Valid intervals: 1m,2m,5m,15m,30m,60m,90m,1h,1d,5d,1wk,1mo,3mo
                Intraday data cannot extend last 60 days
            start: str
                Download start date string (YYYY-MM-DD) or _datetime.
                Default is 1900-01-01
            end: str
                Download end date string (YYYY-MM-DD) or _datetime.
                Default is now
            prepost : bool
                Include Pre and Post market data in results?
                Default is False
            auto_adjust: bool
                Adjust all OHLC automatically? Default is True
            back_adjust: bool
                Back-adjusted data to mimic true historical prices
            proxy: str
                Optional. Proxy server URL scheme. Default is None
            rounding: bool
                Round values to 2 decimal places?
                Optional. Default is False = precision suggested by Yahoo!
            tz: str
                Optional timezone locale for dates.
                (default data is returned as non-localized dates)
            **kwargs: dict
                debug: bool
                    Optional. If passed as False, will suppress
                    error message printing to console.
        """

        if start or period is None or period.lower() == "max":
            if start is None:
                start = -2208988800
            elif isinstance(start, _datetime.datetime):
                start = int(_time.mktime(start.timetuple()))
            else:
                start = int(_time.mktime(
                    _time.strptime(str(start), '%Y-%m-%d')))
            if end is None:
                end = int(_time.time())
            elif isinstance(end, _datetime.datetime):
                end = int(_time.mktime(end.timetuple()))
            else:
                end = int(_time.mktime(_time.strptime(str(end), '%Y-%m-%d')))

            params = {"period1": start, "period2": end}
        else:
            period = period.lower()
            params = {"range": period}

        params["interval"] = interval.lower()
        params["includePrePost"] = prepost
        params["events"] = "div,splits"

        # 1) fix weired bug with Yahoo! - returning 60m for 30m bars
        if params["interval"] == "30m":
            params["interval"] = "15m"

        # setup proxy in requests format
        if proxy is not None:
            if isinstance(proxy, dict) and "https" in proxy:
                proxy = proxy["https"]
            proxy = {"https": proxy}

        # Getting data from json
        url = "{}/v8/finance/chart/{}".format(self._base_url, self.ticker)
        data = _requests.get(url=url, params=params, proxies=proxy)
        if "Will be right back" in data.text:
            raise RuntimeError("*** YAHOO! FINANCE IS CURRENTLY DOWN! ***\n"
                               "Our engineers are working quickly to resolve "
                               "the issue. Thank you for your patience.")
        data = data.json()

        # Work with errors
        debug_mode = True
        if "debug" in kwargs and isinstance(kwargs["debug"], bool):
            debug_mode = kwargs["debug"]

        err_msg = "No data found for this date range, symbol may be delisted"
        if "chart" in data and data["chart"]["error"]:
            err_msg = data["chart"]["error"]["description"]
            shared._DFS[self.ticker] = utils.empty_df()
            shared._ERRORS[self.ticker] = err_msg
            if "many" not in kwargs and debug_mode:
                print('- %s: %s' % (self.ticker, err_msg))
            return shared._DFS[self.ticker]

        if "chart" not in data or data["chart"]["result"] is None or \
                not data["chart"]["result"]:
            shared._DFS[self.ticker] = utils.empty_df()
            shared._ERRORS[self.ticker] = err_msg
            if "many" not in kwargs and debug_mode:
                print('- %s: %s' % (self.ticker, err_msg))
            return shared._DFS[self.ticker]

        # parse quotes
        try:
            quotes = utils.parse_quotes(data["chart"]["result"][0], tz)
        except Exception:
            shared._DFS[self.ticker] = utils.empty_df()
            shared._ERRORS[self.ticker] = err_msg
            if "many" not in kwargs and debug_mode:
                print('- %s: %s' % (self.ticker, err_msg))
            return shared._DFS[self.ticker]

        # 2) fix weired bug with Yahoo! - returning 60m for 30m bars
        if interval.lower() == "30m":
            quotes2 = quotes.resample('30T')
            quotes = _pd.DataFrame(index=quotes2.last().index, data={
                'Open': quotes2['Open'].first(),
                'High': quotes2['High'].max(),
                'Low': quotes2['Low'].min(),
                'Close': quotes2['Close'].last(),
                'Adj Close': quotes2['Adj Close'].last(),
                'Volume': quotes2['Volume'].sum()
            })
            try:
                quotes['Dividends'] = quotes2['Dividends'].max()
            except Exception:
                pass
            try:
                quotes['Stock Splits'] = quotes2['Dividends'].max()
            except Exception:
                pass

        if auto_adjust:
            quotes = utils.auto_adjust(quotes)
        elif back_adjust:
            quotes = utils.back_adjust(quotes)

        if rounding:
            quotes = _np.round(quotes, data[
                "chart"]["result"][0]["meta"]["priceHint"])
        quotes['Volume'] = quotes['Volume'].fillna(0).astype(_np.int64)

        quotes.dropna(inplace=True)

        # actions
        dividends, splits = utils.parse_actions(data["chart"]["result"][0], tz)

        # combine
        df = _pd.concat([quotes, dividends, splits], axis=1, sort=True)
        df["Dividends"].fillna(0, inplace=True)
        df["Stock Splits"].fillna(0, inplace=True)

        # index eod/intraday
        df.index = df.index.tz_localize("UTC").tz_convert(
            data["chart"]["result"][0]["meta"]["exchangeTimezoneName"])

        if params["interval"][-1] in {"m", "h"}:
            df.index.name = "Datetime"
        else:
            df.index = _pd.to_datetime(df.index.date)
            if tz is not None:
                df.index = df.index.tz_localize(tz)
            df.index.name = "Date"

        self._history = df.copy()

        if not actions:
            df.drop(columns=["Dividends", "Stock Splits"], inplace=True)

        return df
    def setSearchDisplay(self, pages):

        shControl = self.getControl(PANEL_LIST)

        shControl.reset()

        items = utils.parseValue(pages, ['items', 'item'], False)

        if items:
            if not isinstance(items, list):
                items = list([items])

            for item in items:

                try:

                    button = None
                    plPath = None
                    plParams = None

                    if self.myQueue is None:
                        self.myQueue = dict()

                    shTitle = utils.parseValue(item, ['title'])
                    shPath = utils.parseValue(item, ['pointer', 'path'])
                    shParams = utils.parseValue(item, ['pointer', 'params'])
                    shThumbnail = utils.parseValue(
                        item, ['thumbnail', 'alternate'], True,
                        ['parseAlternateImg', '@platforms', 'firetv'])
                    shThumbnail = shThumbnail if shThumbnail is not None else utils.parseValue(
                        item, ['thumbnail', '#text'])
                    shThumbnail = funimationnow.formatImgUrl(shThumbnail,
                                                             theme='show')
                    shStarRating = utils.parseValue(item,
                                                    ['starRating', 'rating'])
                    shTitleimg = os.path.join(self.shows_search_title,
                                              ('s_%s.png' % shParams))
                    shRecentContentItem = utils.parseValue(
                        item, ['content', 'metadata', 'recentContentItem'])
                    shRecentlyAdded = utils.parseValue(
                        item, ['content', 'metadata', 'recentlyAdded'])
                    buttons = utils.parseValue(item, ['legend', 'button'],
                                               False)

                    if buttons:

                        if not isinstance(buttons, list):
                            buttons = list([buttons])

                        for btn in buttons:

                            bTarget = utils.parseValue(btn, ['pointer'], False)

                            if utils.parseValue(
                                    bTarget, ['target']) == 'togglewatchlist':

                                button = btn

                            elif utils.parseValue(bTarget,
                                                  ['target']) == 'player':

                                plPath = utils.parseValue(bTarget, ['path'])
                                plParams = utils.parseValue(
                                    bTarget, ['params'])

                    shToggleParams = utils.parseValue(
                        button, ['pointer', 'toggle', 'data', 'params'])
                    shMyQueuePath = utils.parseValue(
                        self.myQueue, [shToggleParams, 'myQueuePath'])
                    shTogglePath = utils.parseValue(
                        btn, ['pointer', 'toggle', 'data', 'path'])
                    shToggleParams = shToggleParams
                    shMyQueuePath = shMyQueuePath
                    shMyQueueParams = utils.parseValue(
                        self.myQueue, [shToggleParams, 'myQueueParams'])
                    shInQueue = str((0 if shMyQueuePath is not None else 1))

                    shListitem = xbmcgui.ListItem(shTitle, '', shThumbnail,
                                                  shThumbnail)

                    titles = [shTitle]

                    shTitleimg = utils.text2Title(list(titles),
                                                  self.details_search_title,
                                                  shTitleimg)

                    if shTitleimg:
                        shListitem.setProperty('ctitle', shTitleimg)

                    if shInQueue is not None:
                        shListitem.setProperty('qtexture', str(shInQueue))

                    if shRecentContentItem:

                        if shRecentContentItem == 'Episode':
                            shRecentContentItem = 'Movie'

                        tempImg = os.path.join(
                            self.shows_search_subtitle, ('%s.png' % re.sub(
                                r'[^\w\d]+', '_', shRecentContentItem, re.I)))

                        if not os.path.isfile(tempImg):
                            utils.text2Display(shRecentContentItem,
                                               'RGB', (255, 255, 255),
                                               (0, 0, 0),
                                               26,
                                               'Regular',
                                               tempImg,
                                               multiplier=1,
                                               sharpen=False,
                                               bgimage=None)

                        shListitem.setProperty('subtitle', tempImg)

                    if shRecentlyAdded:

                        tfname = re.sub(r'[^\d]+', '', shRecentlyAdded, re.I)
                        ttname = 'added 0d ago'

                        try:

                            import time
                            import dateutil.parser

                            from time import mktime
                            from datetime import datetime

                            ttdate = datetime.fromtimestamp(
                                mktime(time.gmtime(float(tfname))))
                            ttday = (datetime.utcnow() - ttdate).days

                            if ttday >= 365:
                                ttname = 'added %sy ago' % int(
                                    round(float(ttday) / 365))

                            elif ttday >= 1:
                                ttname = 'added %sd ago' % ttday

                            else:

                                ttday = (datetime.utcnow() -
                                         ttdate).total_seconds()

                                if (ttday / 60) <= 59:
                                    ttname = 'added %sm ago' % int(
                                        round(float(ttday) / 60))

                                else:
                                    ttname = 'added %sh ago' % int(
                                        round((float(ttday) / 60) / 60))

                        except Exception as inst:
                            self.logger.error(inst)
                            ttname = 'added 0d ago'

                        tempImg = os.path.join(self.shows_search_added,
                                               ('%s.png' % tfname))

                        #if not os.path.isfile(tempImg):
                        utils.text2Display(ttname,
                                           'RGB', (255, 255, 255), (0, 0, 0),
                                           26,
                                           'Italic',
                                           tempImg,
                                           multiplier=1,
                                           sharpen=False,
                                           bgimage=None)

                        shListitem.setProperty('addedon', tempImg)

                    if shStarRating:

                        shStarRating = str(
                            utils.roundQuarter(str(shStarRating)))
                        shListitem.setProperty('starrating', shStarRating)

                    else:
                        shListitem.setProperty('starrating', '0.0')

                    shListitem.setProperty('title', shTitle)
                    shListitem.setProperty('thumbnail', shThumbnail)
                    shListitem.setProperty('path', shPath)
                    shListitem.setProperty('params', shParams)
                    shListitem.setProperty('titleimg', shTitleimg)
                    shListitem.setProperty('recentContentItem',
                                           shRecentContentItem)
                    #shListitem.setProperty('recentlyAdded', shRecentlyAdded);
                    shListitem.setProperty('togglePath', shTogglePath)
                    shListitem.setProperty('toggleParams', shToggleParams)
                    shListitem.setProperty('myQueuePath', shMyQueuePath)
                    shListitem.setProperty('myQueueParams', shMyQueueParams)
                    shListitem.setProperty('inQueue', shInQueue)
                    #shListitem.setProperty('starRating', shStarRating);

                    shControl.addItem(shListitem)

                except Exception as inst:
                    self.logger.error(inst)
Пример #39
0
 def convert_ts(self, ts):
     return time.mktime(ts.timetuple()) + (ts.microsecond * 1e-6)
Пример #40
0
 def to_python(self, value):
     time_array = time.strptime(value, "%Y-%m-%d-%H-%M-%S")
     time_stamp = int(time.mktime(time_array))
     return time_stamp
Пример #41
0
def associationView(request, site=None, group=None):

    group_req = group
    site_req = site

    #Get the data to generate the table of datasets to clean
    runDate = None
    blockTable = False
    datasets = victorDao.getDatasetsToClean('%s^%s' % (site, group_req))
    if datasets:
        runDate = datasets[0][6]
        blockTable = True

    #Get the data to generate the space evolution plot for the ASSOCIATION
    rows = victorDao.getAssociationEvolution('%s^%s' % (site, group_req))
    data_used = []
    data_total = []

    for row in rows:
        total, used, runDate = row
        timestamp = time.mktime(
            runDate.timetuple()) * 1000  #multiply by 1000 for javascript
        if used != None:
            data_used.append([timestamp, int(used)])
        if total != None:
            data_total.append([timestamp, int(total)])

    full = False
    if used and total:
        if float(used) / float(total) > 0.9:
            full = True

    #------------------------------------------------------------------------
    #Get the data to generate the space evolution plot for the SITE with all groups
    #------------------------------------------------------------------------
    rows_groups = victorDao.getGroupsOnSiteEvolutions(site_req)
    group_accounting = {}
    for row in rows_groups:
        association, used, run_date = row
        if not used:
            continue
        group = association.split('^')[1]
        group_accounting.setdefault(group, [])
        group_accounting[group].append(
            [time.mktime(run_date.timetuple()) * 1000,
             float(used)])

    group_accounting_flat = []
    for group in group_accounting:
        group_accounting_flat.append([group, group_accounting[group]])

    rows_total = victorDao.getTotalEvolutionSite(site_req)
    total_processed_group = []
    for row in rows_total:
        total, run_date = row
        total_processed_group.append(
            [time.mktime(run_date.timetuple()) * 1000,
             float(total)])

    #------------------------------------------------------------------------
    #Get the data to generate the space evolution plot for the GROUP on all sites
    #------------------------------------------------------------------------
    rows_sites = victorDao.getSitesForGroupEvolutions(group_req)
    site_accounting = {}
    for row in rows_sites:
        association, used, run_date = row
        if not used:
            continue
        site = association.split('^')[0]
        site_accounting.setdefault(site, [])
        site_accounting[site].append(
            [time.mktime(run_date.timetuple()) * 1000,
             float(used)])

    site_accounting_flat = []
    for site in site_accounting:
        site_accounting_flat.append([site, site_accounting[site]])

    rows_total = victorDao.getTotalEvolutionGroup(group_req)
    total_processed_site = []
    for row in rows_total:
        total, run_date = row
        total_processed_site.append(
            [time.mktime(run_date.timetuple()) * 1000,
             float(total)])

    #Get the date and the menu
    today = str(date.today())
    menu = generateAssociationMenu()

    return render_to_response(
        'space_evolution_view.html', {
            'site': site_req,
            'full': full,
            'group': group_req,
            'today': today,
            'run_date': runDate,
            'datasets': datasets,
            'block_table': blockTable,
            'data_used': str(data_used),
            'data_total': str(data_total),
            'data_site': group_accounting_flat,
            'totals_site': total_processed_group,
            'data_group': site_accounting_flat,
            'totals_group': total_processed_site,
            'regions': menu
        })
Пример #42
0
 def _timestamp_to_prefix(self, ts):
     ts = time.mktime(ts.timetuple()) + (ts.microsecond * 1e-6)
     return '%012x' % int(ts * 1000)
Пример #43
0
def get_device_event_data(request, *args, **kwargs):
    """
    Get a list of event data according to the request and the argument giving.

    @param member: Member object to whom vehicule belongs to or the owner
    @param device_id: Id of the vehicle object in the database
    @param string_date: string format date sent from the client eg: 01/05/2016 12:00 - 07/05/2016 11:00
    @param string_start_date: building from the string format date
    @param string_end_date: building from the string format date
    @param positions: queryset of even data happened during the period choosen by the client
    @param data_count: number of data event of the current user; this is use during the live dislay to know if the device sent a new event data or not
    this function return a JSON objet of: event data and data count
    """
    member = request.user
    device_id = request.GET.get('device_id')
    string_date = request.GET.get('string_date')
    string_start_date = None
    string_end_date = None
    device = Device.objects.get(pk=device_id)
    if string_date:
        dates_list = retrieve_dates_from_interval(string_date)
        string_start_date = dates_list[0]
        string_end_date = dates_list[1]
    vehicle = Vehicle.objects.get(device=device)
    positions = EventData.objects.using('opengts').filter(deviceID=device_id)
    # positions = EventData.objects.using('opengts')
    data_count = positions.count()
    start_date, end_date = None, None
    if string_start_date is not None:
        start_date = int(time.mktime(datetime.strptime(string_start_date, '%d-%m-%Y %H:%M').timetuple()))
    if string_end_date is not None:
        end_date = int(time.mktime(datetime.strptime(string_end_date, '%d-%m-%Y %H:%M').timetuple()))

    if start_date and end_date:
        positions = positions.filter(Q(creationTime__gte=start_date) & Q(creationTime__lt=end_date))
    elif start_date and not end_date:
        now = datetime.now()
        end_date = time.mktime(now.timetuple())
        positions = positions.filter(Q(creationTime__gte=start_date) & Q(creationTime__lt=end_date))
    elif end_date and not start_date:
        end_date_dtime = datetime.strptime(string_end_date, '%d-%m-%Y %H:%M')
        end_date_dt = datetime(end_date_dtime.year, end_date_dtime.month, end_date_dtime.day, 0)
        start_date = int(time.mktime(end_date_dt.timetuple()))
        positions = positions.filter(Q(creationTime__gte=start_date) & Q(creationTime__lt=end_date))
    if not start_date and not end_date and len(positions)>0:
        positions = [positions.order_by('-creationTime')[0]]
    else:
        # Order by -creationTime and grab the first 1000,
        # that is equivalent to grab the 1000 latest eventData
        positions = list(positions.order_by('-creationTime')[:1000])
        positions = reversed(positions)
    event_data = []

    late_lat = 0.0
    late_lng = 0.0
    for position in positions:
        if position.speedKPH > 0:
            icon_url = get_the_right_icon(position, device)
        else:
            icon_url = vehicle.type.static_icon_img.url
        if late_lat != position.latitude and late_lng != position.longitude:
            if position.latitude != 0.0 and position.longitude != 0.0:
                pos = {
                    'latitude': position.latitude,
                    'longitude': position.longitude,
                    'displayName': device.displayName,
                    'dateTime': change_date_to_string(datetime.fromtimestamp(position.creationTime)),
                    'speed': position.speedKPH,
                    'heading': position.heading,
                    'address': position.address,
                    'description': vehicle.name + " / " + device.displayName,
                    'icon': icon_url
                }
                event_data.append(pos)
        late_lat = position.latitude
        late_lng = position.longitude
    return HttpResponse(json.dumps({'event_data': event_data, 'data_count': data_count}), 'content-type: text/json', **kwargs)
Пример #44
0
 def date(self):
     if self.exists:
         return time.mktime(
             default_storage.get_modified_time(self.path).timetuple())
     return None
Пример #45
0
    '_', '').isalnum()
is_name_eq = lambda x: '=' in x and (lambda a, b: is_name(a) and not b.
                                     startswith('='))(*x.split('=', 1))
#-----------------------------------------------------------------------------
#   OUT
#-----------------------------------------------------------------------------
str_len = lambda S: int(
    reduce(lambda r, c: r + 1. / (1 + (ord(c) > 127)) + 4 * (c == '\t'), S, 0))
time2string = lambda x, precision=3: "%i:%02i:%0*.*f" % (int(
    x / 3600), int(x / 60) % 60, 2 + bool(precision) + precision, precision, x
                                                         % 60)
#string2time = lambda x: reduce(lambda S, v: S+float(v[0])*v[1], map(None, x.split(':'), (3600, 60, 1)), 0.)
string2time = lambda x: reduce(lambda S, v: S + float(v[0]) * v[1],
                               zip(x.split(':'), (3600, 60, 1)), 0.)
date2string = lambda t: time.strftime("%Y.%m.%d-%X", time.localtime(t))
string2date = lambda s: time.mktime(time.strptime(s, "%Y.%m.%d-%X"))


def string2bool(value):
    if value in 'Y y YES Yes yes ON On on TRUE True true V v 1'.split():
        return True
    if value in 'N n NO No no OFF Off off FALSE False false X x 0'.split():
        return False
    raise Exception(
        'incorrect value=%s for convert to bool, Y|y|YES|Yes|yes|ON|On|on|TRUE|True|true|V|v|1'
        ' or N|n|NO|No|no|OFF|Off|off|FALSE|False|false|X|x|0 expected' %
        value)


def size2string(sz):
    for d, p in ((2**40, '%.2fT'), (2**30, '%.1fG'), (2**20, '%.1fM'),
Пример #46
0
 def _get_timestamp(self, entry):
     """Convert the given RSS entry timestamp into a Python datetime compatible with our DB"""
     return datetime.datetime.fromtimestamp(
         time.mktime(entry.updated_parsed)).replace(tzinfo=utc)
sys.path.insert(0, folder_path + "\Library")
sys.path.insert(0, folder_path + "\Syslibrary")
sys.path.insert(0, folder_path + "\Data")
sys.path.insert(0, folder_path + "\Object")
from launcheTender import LauncheTenderclass
from eTenderUpdateProjectDetails import updatedetails
from Tenderplan import Tenderplans
from tenderDetails import Tenderdetails
from datadriven import DataDriver
from setupenviron import setupValue
from logouteTender import Userprofilemenu
from logdriver import logvalue
from TenderModification import TenderClass
logs = logvalue.logger
logclose = logvalue()
ftime = time.mktime(time.localtime())
ptime = time.strftime("%d-%m-%Y_%H%M%S", time.localtime(ftime))
#filename = 'TestCase-100287-{0}.png'.format(ptime)
tf = 'test_Plantemplateactualforecastdates'
filename = 'Testcase-%s.png' % (tf)
path = setupValue().screenpath
fullpath = os.path.join(path, filename)


#Test case Number = 100287
class Plantemplateactualforecastdates(unittest.TestCase):
    def test_Plantemplateactualforecastdates(self):
        try:
            browserInstance = setupValue()
            browser = browserInstance.setupfunction()
            browser.implicitly_wait(5)
Пример #48
0
from xml.etree import ElementTree as ET
import codecs
import time, datetime

members_xml = ET.parse('all-members-2010.xml')

attrs = ['firstname', 'lastname', 'party', 'constituency', 'fromdate', 'todate', 'house']

members = []
for member in members_xml.findall('member'):
	member = { attr: member.get(attr) for attr in attrs }
	try:
		member['fromdate'] = int(time.mktime(datetime.datetime.strptime(member['fromdate'], "%Y-%m-%d").timetuple()))
		member['todate'] = int(time.mktime(datetime.datetime.strptime(member['todate'], "%Y-%m-%d").timetuple()))
		member['fullname'] = "%s %s" % (member['firstname'], member['lastname'])
		members.append(member)
	except Exception, e:
		print 'error', member
		pass

attrs.append('fullname')

with codecs.open('members.csv', 'w', 'utf-8') as outfile:
	outfile.write(','.join(attrs) + '\n')
	for member in members:
		if member['party'] != 'unknown':
			outfile.write(','.join(['"%s"' % member[attr] for attr in attrs]) + '\n')
Пример #49
0
    async def sendWxMessage(self, openId, bid, tempId, msg, fromType, tourl):
        if openId is None:
            return None
        templateInfo = self.db.query(online_message_template).filter(
            online_message_template.bid == bid,
            online_message_template.sendtype == 2,
            online_message_template.weixinId == tempId).first()
        if not templateInfo:
            return {'info': '发送失败', 'status': -1, 'msg': '没有相关模版'}
        else:
            sendContent = ''
            config_wt = online_config_weixin_template
            weixinTempInfo = self.db.query(config_wt).filter(
                config_wt.id == templateInfo.weixinTemplateId).first()
            if weixinTempInfo is None:
                return {'info': '发送失败', 'status': -1, 'msg': '没有微信默认配置模板'}
            fieldList = weixinTempInfo.fieldList
            # print(weixinTempInfo)
            fieldListArr = fieldList.split(',')  # 取出微信模版字段然后格式化
            params = {}
            if len(msg) != len(fieldListArr):
                return {'info': '发送失败', 'status': -1, 'msg': '所传内容字段和模板不符'}
            else:
                for key, val in enumerate(fieldListArr):
                    if bid == 86:  # 万菱汇模板消息要黑色字体、
                        params[val] = {msg[key], '#000000'}
                        sendContent += msg[key] + "|"
                        pass
                    else:
                        params[val] = msg[key]
            if tourl:
                params['tourl'] = tourl

            returnArr = await self.sendWeixinTemplateMessage(
                openId, bid, templateInfo.weixinTemplateId, params)
            if returnArr['errcode'] == 0:
                # 给message_list表插入发送内容
                # $message_list = M('message_list');
                data = {}  # 组织数据添加
                data['bid'] = bid
                data['valueType'] = 3  # //1文本格式;2html;3模版
                data['fromType'] = fromType  # 注册
                data['sendType'] = 2  # 2微信
                data['openId'] = openId
                if bid == 86:  # 万菱汇模板消息要黑色字体
                    data['sendContent'] = sendContent.strip('|')  # 发送微信通知内容
                else:
                    data['sendContent'] = '|'.join(params)  # 发送微信通知内容

                data['errorCode'] = returnArr['errcode']
                data['errorMsg'] = returnArr['errmsg']
                data['sendTime'] = int(time.time())  # 发送时间
                data['intime'] = int(time.time())
                data['indate'] = int(
                    time.mktime(
                        time.strptime(
                            time.strftime('%Y-%m-%d', time.localtime()),
                            '%Y-%m-%d')))

                try:
                    dbreturn = self.db.execute(
                        online_message_list.__table__.insert(), data)
                    self.db.commit()
                    self.AppLogging.info('online_message_log,insetr to db %s',
                                         data)
                except:
                    # print('----db insert online_message_log error-----')
                    self.AppLogging.warning(
                        '----db insert online_message_log error-----')
                if dbreturn is not None:
                    lastId = dbreturn.lastrowid
                else:
                    lastId = None
                # lastId=session.query(func.max(User.tid)).one()[0]
                # $lastId = $message_list->add($data);
                if lastId is not None:  # 在message_log表插入信息
                    # $message_log = M('message_log');
                    obj = {}
                    obj['mlistId'] = lastId
                    obj['bid'] = bid
                    obj['valueType'] = 3  # 1文本格式;2html;3模版
                    obj['fromType'] = fromType  # 注册
                    obj['sendType'] = 2  # 2微信
                    obj['errorCode'] = data['errorCode']
                    obj['errorMsg'] = data['errorMsg']
                    obj['intime'] = int(time.time())
                    obj['indate'] = int(
                        time.mktime(
                            time.strptime(
                                time.strftime('%Y-%m-%d', time.localtime()),
                                '%Y-%m-%d')))

                    try:
                        rr = self.db.execute(
                            online_message_log.__table__.insert(), obj)
                        self.db.commit()
                        self.AppLogging.info(
                            'online_message_log,insetr to db %s', obj)
                    except:
                        self.AppLogging.warning(
                            '----db insert online_message_log error-----')
                        # print('----db insert online_message_log error-----')

                return {'info': '发送成功', 'status': returnArr['errcode']}
            else:
                return {
                    'info': '发送失败',
                    'status': returnArr['errcode'],
                    'msg': returnArr['errmsg']
                }

        pass
Пример #50
0
	def getTimestamp(self, date, mytime):
		d = localtime(date)
		dt = datetime(d.tm_year, d.tm_mon, d.tm_mday, mytime[0], mytime[1])
		return int(mktime(dt.timetuple()))
Пример #51
0

def v(text, args):
    if args.verbose:
        print(text)


while True:
    v("Starting iteration " + str(iteration_count), args)
    feed = feedparser.parse(args.address)  # Obtain the feed
    if iteration_count is 0:  # Iteration 0, send posts sent after a certain time in the past or just place all posts that are currently in the feed into seen_posts
        if args.preload is not 0:  # Preload is on.
            for post in feed['items']:
                # struct_time doesnt support subtraction, so we convert it to datetime with a bit of a hack
                post_time_delta = datetime.datetime.fromtimestamp(
                    time.mktime(
                        time.gmtime())) - datetime.datetime.fromtimestamp(
                            time.mktime(post['published_parsed']))
                if post_time_delta < datetime.timedelta(
                        seconds=int(args.preload)):
                    # We don't really need to check if the post is in seen_posts as this is the first iteration, but we might dump seen_posts to a file in the future.
                    if post.id not in seen_posts:
                        v(
                            'Publishing message with title "' + post.title +
                            '" as part of preload', args)
                        telegram.send_message(args.token, args.destination_id,
                                              post, args.localtz,
                                              args.sourcetz, args.msgformat,
                                              args.parsemode, args.timefmt)
                        seen_posts.append(post.id)
                else:
                    if post.id not in seen_posts:  # Prevent duplicates.
Пример #52
0
def mktime(timestamp):
    return time.mktime(timestamp.timetuple())
Пример #53
0
        utc_ts  = int(t)
        dt = datetime.fromtimestamp(utc_ts)
    except ValueError, e:
        ## Pytimes earlier than the epoch are a pain in the rear end. 
        dt = datetime(year=t.year,
                      month=t.month,
                      day=t.day,
                      hour=t.hour,
                      minute=t.minute,
                      second=t.second)

    d = dt + utc_off
    if ret_dt:
        return d
    else:
        return _time.mktime(d.timetuple())

def classify_email_addr (addr, domains):
    """Return a tuple of (home, work, other) booleans classifying if the
    specified address falls within one of the domains."""

    res = {'home' : False, 'work' : False, 'other' : False}

    for cat in res.keys():
        try:
            for domain in domains[cat]:
                if re.search((domain + '$'), addr):
                    res[cat] = True
        except KeyError, e:
            logging.warning('Invalid email_domains specification.')
Пример #54
0
    if "saas_store" in sys.argv:
        def getAppkey():
            datatype_list = []
            try:
                from MysqlClient import MysqlClient
                client = MysqlClient("saas_meta")
                result = client.getAppkey_app()
                datatype_list = [item["appkey"] for item in result]
                client.closeMysql()
            except:
                import traceback
                print traceback.print_exc()
            datatype_list.append("hbtv") if "hbtv" not in datatype_list else None
            return datatype_list
        datatype_list = ["huiyue_ad", "huiyue_ios"]
        startstamp = time.mktime(time.strptime('20170320+000000', '%Y%m%d+%H%M%S'))
        endstamp = time.mktime(time.strptime('20170324+110600', '%Y%m%d+%H%M%S'))
        while startstamp <= endstamp:
            try:
                collectFiles(timestamp=endstamp, remote_dir_format="/data1/logs/transformsaaslogs/%(datatype)s/%(yyyymmdd)s/%(hhmm)s.log.gz",
                             local_dir_part_format="/data1/logs/transformsaaslogs/%(datatype)s/%(yyyymmdd)s/%(hhmm)s_%(part)d.log.gz",
                             local_dir_format="/data1/logs/transformsaaslogs/%(datatype)s/%(yyyymmdd)s/%(hhmm)s.log.gz",
                             datatypeList=datatype_list, is_store=True)
            except:
                import traceback
                print traceback.print_exc()
            endstamp -= 60


    if 'store' in sys.argv:
        def getAppkey():
Пример #55
0
    def test_03_stats(self):
        """Test STATS stats method works"""
        today = unicode(datetime.date.today())
        hour = int(datetime.datetime.utcnow().strftime('%H'))
        date_ms = time.mktime(time.strptime(today, "%Y-%m-%d")) * 1000
        anon = 0
        auth = 0
        TaskRunFactory.create(task=self.project.tasks[0])
        TaskRunFactory.create(task=self.project.tasks[1])
        dates_stats, hours_stats, user_stats = stats.get_stats(self.project.id)
        for item in dates_stats:
            if item['label'] == 'Anon + Auth':
                assert item['values'][-1][0] == date_ms, item['values'][0][0]
                assert item['values'][-1][1] == 10, "There should be 10 answers"
            if item['label'] == 'Anonymous':
                assert item['values'][-1][0] == date_ms, item['values'][0][0]
                anon = item['values'][-1][1]
            if item['label'] == 'Authenticated':
                assert item['values'][-1][0] == date_ms, item['values'][0][0]
                auth = item['values'][-1][1]
            if item['label'] == 'Total Tasks':
                assert item['values'][-1][0] == date_ms, item['values'][0][0]
                assert item['values'][-1][1] == 4, "There should be 4 tasks"
            if item['label'] == 'Expected Answers':
                assert item['values'][0][0] == date_ms, item['values'][0][0]
                for i in item['values']:
                    assert i[1] == 100, "Each date should have 100 answers"
                assert item['values'][0][1] == 100, "There should be 10 answers"
        assert auth + anon == 10, "date stats sum of auth and anon should be 10"

        max_hours = 0
        for item in hours_stats:
            if item['label'] == 'Anon + Auth':
                max_hours = item['max']
                print item
                assert item['max'] == 10, item['max']
                assert item['max'] == 10, "Max hours value should be 10"
                for i in item['values']:
                    if i[0] == hour:
                        assert i[1] == 10, "There should be 10 answers"
                        assert i[2] == 5, "The size of the bubble should be 5"
                    else:
                        assert i[1] == 0, "There should be 0 answers"
                        assert i[2] == 0, "The size of the buggle should be 0"
            if item['label'] == 'Anonymous':
                anon = item['max']
                for i in item['values']:
                    if i[0] == hour:
                        assert i[1] == anon, "There should be anon answers"
                        assert i[2] == (anon * 5) / max_hours, "The size of the bubble should be 5"
                    else:
                        assert i[1] == 0, "There should be 0 answers"
                        assert i[2] == 0, "The size of the buggle should be 0"
            if item['label'] == 'Authenticated':
                auth = item['max']
                for i in item['values']:
                    if i[0] == hour:
                        assert i[1] == auth, "There should be anon answers"
                        assert i[2] == (auth * 5) / max_hours, "The size of the bubble should be 5"
                    else:
                        assert i[1] == 0, "There should be 0 answers"
                        assert i[2] == 0, "The size of the buggle should be 0"
        assert auth + anon == 10, "date stats sum of auth and anon should be 8"

        err_msg = "user stats sum of auth and anon should be 7"
        assert user_stats['n_anon'] + user_stats['n_auth'] == 7, err_msg
Пример #56
0
    sys.exit(1)

file = ''
for key, val in optlist:
    if key == '--file':
        file = val

if file == '' or os.path.exists(file) == 0:
    print ts() + 'no file assigned.'
    sys.stdout.flush()
    sys.exit(1)

lines = open(file).readlines()

output = []
for line in lines:
    if string.find(line, 'RUN_START_TIME:') != -1:
        tstart = line.split()[1] + ' ' + line.split()[2].split('.')[0]
    elif (string.find(line, 'RUN_END_TIME:') != -1):
        tend = line.split()[1] + ' ' + line.split()[2].split('.')[0]

t1 = time.mktime(
    datetime.datetime.strptime(tstart, "%Y-%m-%d %H:%M:%S").timetuple())
t2 = time.mktime(
    datetime.datetime.strptime(tend, "%Y-%m-%d %H:%M:%S").timetuple())

print tstart, tend
print t1, t2, t2 - t1

sys.exit(0)
Пример #57
0
def timestamp(dt):
    """Returns the timestamp of a datetime object."""
    if not dt: return None
    return time.mktime(dt.timetuple())
Пример #58
0
 def get_file_stats(self, path):
     # type: (str) -> FileStats
     return FileStats(
         size=self._size,
         mtime=time.mktime(datetime.now().timetuple())
     )
Пример #59
0
def parse_all_edges(inputfile, outputfile, node_map, noencode):
    """
	Parse all edges with the timestamp to a file.
	Format: <source_node_id> \t <destination_node_id> \t <hashed_source_type>:<hashed_destination_type>:<hashed_edge_type>:<edge_timestamp>
	"""
    # Scan through the file to validate edges (i.e., both end nodes must exist) and find the smallest timestamp.
    description = '\x1b[6;30;43m[i]\x1b[0m Edge Scanning Progress of File: \x1b[6;30;42m{}\x1b[0m'.format(
        inputfile)
    pb = tqdm.tqdm(desc=description, mininterval=1.0, unit="recs")
    total_edges = 0
    smallest_timestamp = None
    with open(inputfile, 'r') as f:
        for line in f:
            pb.update()
            json_object = json.loads(line)

            if "used" in json_object:
                used = json_object["used"]
                for uid in used:
                    if "prov:type" not in used[uid]:
                        logging.debug(
                            "Edge (used) record without type. UUID: %s", uid)
                        continue
                    if "cf:date" not in used[uid]:
                        logging.debug(
                            "Edge (used) record without date. UUID: %s", uid)
                        continue
                    if "prov:entity" not in used[uid]:
                        logging.debug(
                            "Edge (used/{}) record without srcUUID. UUID: {}".
                            format(used[uid]["prov:type"], uid))
                        continue
                    if "prov:activity" not in used[uid]:
                        logging.debug(
                            "Edge (used/{}) record without dstUUID. UUID: {}".
                            format(used[uid]["prov:type"], uid))
                        continue
                    srcUUID = used[uid]["prov:entity"]
                    dstUUID = used[uid]["prov:activity"]
                    if srcUUID not in node_map:
                        logging.debug(
                            "Edge (used/{}) record with an unmatched srcUUID. UUID: {}"
                            .format(used[uid]["prov:type"], uid))
                        continue
                    if dstUUID not in node_map:
                        logging.debug(
                            "Edge (used/{}) record with an unmatched dstUUID. UUID: {}"
                            .format(used[uid]["prov:type"], uid))
                        continue
                    total_edges += 1
                    timestamp_str = used[uid]["cf:date"]
                    ts = time.mktime(
                        datetime.datetime.strptime(
                            timestamp_str, "%Y:%m:%dT%H:%M:%S").timetuple())
                    if smallest_timestamp == None or ts < smallest_timestamp:
                        smallest_timestamp = ts
            if "wasGeneratedBy" in json_object:
                wasGeneratedBy = json_object["wasGeneratedBy"]
                for uid in wasGeneratedBy:
                    if "prov:type" not in wasGeneratedBy[uid]:
                        logging.debug(
                            "Edge (wasGeneratedBy) record without type. UUID: %s",
                            uid)
                        continue
                    if "cf:date" not in wasGeneratedBy[uid]:
                        logging.debug(
                            "Edge (wasGeneratedBy) record without date. UUID: %s",
                            uid)
                        continue
                    if "prov:entity" not in wasGeneratedBy[uid]:
                        logging.debug(
                            "Edge (wasGeneratedBy/{}) record without srcUUID. UUID: {}"
                            .format(wasGeneratedBy[uid]["prov:type"], uid))
                        continue
                    if "prov:activity" not in wasGeneratedBy[uid]:
                        logging.debug(
                            "Edge (wasGeneratedBy/{}) record without dstUUID. UUID: {}"
                            .format(wasGeneratedBy[uid]["prov:type"], uid))
                        continue
                    srcUUID = wasGeneratedBy[uid]["prov:activity"]
                    dstUUID = wasGeneratedBy[uid]["prov:entity"]
                    if srcUUID not in node_map:
                        logging.debug(
                            "Edge (wasGeneratedBy/{}) record with an unmatched srcUUID. UUID: {}"
                            .format(wasGeneratedBy[uid]["prov:type"], uid))
                        continue
                    if dstUUID not in node_map:
                        logging.debug(
                            "Edge (wasGeneratedBy/{}) record with an unmatched dstUUID. UUID: {}"
                            .format(wasGeneratedBy[uid]["prov:type"], uid))
                        continue
                    total_edges += 1
                    timestamp_str = wasGeneratedBy[uid]["cf:date"]
                    ts = time.mktime(
                        datetime.datetime.strptime(
                            timestamp_str, "%Y:%m:%dT%H:%M:%S").timetuple())
                    if smallest_timestamp == None or ts < smallest_timestamp:
                        smallest_timestamp = ts
            if "wasInformedBy" in json_object:
                wasInformedBy = json_object["wasInformedBy"]
                for uid in wasInformedBy:
                    if "prov:type" not in wasInformedBy[uid]:
                        logging.debug(
                            "Edge (wasInformedBy) record without type. UUID: %s",
                            uid)
                        continue
                    if "cf:date" not in wasInformedBy[uid]:
                        logging.debug(
                            "Edge (wasInformedBy) record without date. UUID: %s",
                            uid)
                        continue
                    if "prov:informant" not in wasInformedBy[uid]:
                        logging.debug(
                            "Edge (wasInformedBy/{}) record without srcUUID. UUID: {}"
                            .format(wasInformedBy[uid]["prov:type"], uid))
                        continue
                    if "prov:informed" not in wasInformedBy[uid]:
                        logging.debug(
                            "Edge (wasInformedBy/{}) record without dstUUID. UUID: {}"
                            .format(wasInformedBy[uid]["prov:type"], uid))
                        continue
                    srcUUID = wasInformedBy[uid]["prov:informant"]
                    dstUUID = wasInformedBy[uid]["prov:informed"]
                    if srcUUID not in node_map:
                        logging.debug(
                            "Edge (wasInformedBy/{}) record with an unmatched srcUUID. UUID: {}"
                            .format(wasInformedBy[uid]["prov:type"], uid))
                        continue
                    if dstUUID not in node_map:
                        logging.debug(
                            "Edge (wasInformedBy/{}) record with an unmatched dstUUID. UUID: {}"
                            .format(wasInformedBy[uid]["prov:type"], uid))
                        continue
                    total_edges += 1
                    timestamp_str = wasInformedBy[uid]["cf:date"]
                    ts = time.mktime(
                        datetime.datetime.strptime(
                            timestamp_str, "%Y:%m:%dT%H:%M:%S").timetuple())
                    if smallest_timestamp == None or ts < smallest_timestamp:
                        smallest_timestamp = ts
            if "wasDerivedFrom" in json_object:
                wasDerivedFrom = json_object["wasDerivedFrom"]
                for uid in wasDerivedFrom:
                    if "prov:type" not in wasDerivedFrom[uid]:
                        logging.debug(
                            "Edge (wasDerivedFrom) record without type. UUID: %s",
                            uid)
                        continue
                    if "cf:date" not in wasDerivedFrom[uid]:
                        logging.debug(
                            "Edge (wasDerivedFrom) record without date. UUID: %s",
                            uid)
                        continue
                    if "prov:usedEntity" not in wasDerivedFrom[uid]:
                        logging.debug(
                            "Edge (wasDerivedFrom/{}) record without srcUUID. UUID: {}"
                            .format(wasDerivedFrom[uid]["prov:type"], uid))
                        continue
                    if "prov:generatedEntity" not in wasDerivedFrom[uid]:
                        logging.debug(
                            "Edge (wasDerivedFrom/{}) record without dstUUID. UUID: {}"
                            .format(wasDerivedFrom[uid]["prov:type"], uid))
                        continue
                    srcUUID = wasDerivedFrom[uid]["prov:usedEntity"]
                    dstUUID = wasDerivedFrom[uid]["prov:generatedEntity"]
                    if srcUUID not in node_map:
                        logging.debug(
                            "Edge (wasDerivedFrom/{}) record with an unmatched srcUUID. UUID: {}"
                            .format(wasDerivedFrom[uid]["prov:type"], uid))
                        continue
                    if dstUUID not in node_map:
                        logging.debug(
                            "Edge (wasDerivedFrom/{}) record with an unmatched dstUUID. UUID: {}"
                            .format(wasDerivedFrom[uid]["prov:type"], uid))
                        continue
                    total_edges += 1
                    timestamp_str = wasDerivedFrom[uid]["cf:date"]
                    ts = time.mktime(
                        datetime.datetime.strptime(
                            timestamp_str, "%Y:%m:%dT%H:%M:%S").timetuple())
                    if smallest_timestamp == None or ts < smallest_timestamp:
                        smallest_timestamp = ts
            if "wasAssociatedWith" in json_object:
                wasAssociatedWith = json_object["wasAssociatedWith"]
                for uid in wasAssociatedWith:
                    if "prov:type" not in wasAssociatedWith[uid]:
                        logging.debug(
                            "Edge (wasAssociatedWith) record without type. UUID: %s",
                            uid)
                        continue
                    if "cf:date" not in wasAssociatedWith[uid]:
                        logging.debug(
                            "Edge (wasAssociatedWith) record without date. UUID: %s",
                            uid)
                        continue
                    if "prov:agent" not in wasAssociatedWith[uid]:
                        logging.debug(
                            "Edge (wasAssociatedWith/{}) record without srcUUID. UUID: {}"
                            .format(wasAssociatedWith[uid]["prov:type"], uid))
                        continue
                    if "prov:activity" not in wasAssociatedWith[uid]:
                        logging.debug(
                            "Edge (wasAssociatedWith/{}) record without dstUUID. UUID: {}"
                            .format(wasAssociatedWith[uid]["prov:type"], uid))
                        continue
                    srcUUID = wasAssociatedWith[uid]["prov:agent"]
                    dstUUID = wasAssociatedWith[uid]["prov:activity"]
                    if srcUUID not in node_map:
                        logging.debug(
                            "Edge (wasAssociatedWith/{}) record with an unmatched srcUUID. UUID: {}"
                            .format(wasAssociatedWith[uid]["prov:type"], uid))
                        continue
                    if dstUUID not in node_map:
                        logging.debug(
                            "Edge (wasAssociatedWith/{}) record with an unmatched dstUUID. UUID: {}"
                            .format(wasAssociatedWith[uid]["prov:type"], uid))
                        continue
                    total_edges += 1
                    timestamp_str = wasAssociatedWith[uid]["cf:date"]
                    ts = time.mktime(
                        datetime.datetime.strptime(
                            timestamp_str, "%Y:%m:%dT%H:%M:%S").timetuple())
                    if smallest_timestamp == None or ts < smallest_timestamp:
                        smallest_timestamp = ts
    f.close()
    pb.close()

    output = open(outputfile, "w+")
    description = '\x1b[6;30;43m[i]\x1b[0m Progress of Generating Output of File: \x1b[6;30;42m{}\x1b[0m'.format(
        inputfile)
    pb = tqdm.tqdm(desc=description, mininterval=1.0, unit="recs")
    with open(inputfile, 'r') as f:
        for line in f:
            pb.update()
            json_object = json.loads(line)

            if "used" in json_object:
                used = json_object["used"]
                for uid in used:
                    if "prov:type" not in used[uid]:
                        continue
                    else:
                        edgetype = valgencf(used[uid])
                    if "cf:id" not in used[uid]:
                        logging.debug(
                            "Edge (used) record without timestamp. UUID: %s",
                            uid)
                        continue
                    else:
                        timestamp = used[uid][
                            "cf:id"]  # Can be used as timestamp
                    if "prov:entity" not in used[uid]:
                        continue
                    if "prov:activity" not in used[uid]:
                        continue
                    srcUUID = used[uid]["prov:entity"]
                    dstUUID = used[uid]["prov:activity"]
                    if srcUUID not in node_map:
                        continue
                    else:
                        srcVal = node_map[srcUUID]
                    if dstUUID not in node_map:
                        continue
                    else:
                        dstVal = node_map[dstUUID]

                    ts_str = used[uid]["cf:date"]
                    ts = time.mktime(
                        datetime.datetime.strptime(
                            ts_str, "%Y:%m:%dT%H:%M:%S").timetuple())
                    adjusted_ts = ts - smallest_timestamp

                    if noencode:
                        output.write(str(srcUUID) + '\t' \
                         + str(dstUUID) + '\t' \
                         + str(srcVal) + ':' + str(dstVal) \
                         + ':' + str(edgetype) + ':' + str(timestamp) \
                         + ':' + str(adjusted_ts) + '\t' + '\n')
                    else:
                        output.write(str(hashgen([srcUUID])) + '\t' \
                         + str(hashgen([dstUUID])) + '\t' \
                         + str(srcVal) + ':' + str(dstVal) \
                         + ':' + str(edgetype) + ':' + str(timestamp) \
                         + ':' + str(adjusted_ts) + '\t' + '\n')
            if "wasGeneratedBy" in json_object:
                wasGeneratedBy = json_object["wasGeneratedBy"]
                for uid in wasGeneratedBy:
                    if "prov:type" not in wasGeneratedBy[uid]:
                        continue
                    else:
                        edgetype = valgencf(wasGeneratedBy[uid])
                    if "cf:id" not in wasGeneratedBy[uid]:
                        logging.debug(
                            "Edge (wasGeneratedBy) record without timestamp. UUID: %s",
                            uid)
                        continue
                    else:
                        timestamp = wasGeneratedBy[uid]["cf:id"]
                    if "prov:entity" not in wasGeneratedBy[uid]:
                        continue
                    if "prov:activity" not in wasGeneratedBy[uid]:
                        continue
                    srcUUID = wasGeneratedBy[uid]["prov:activity"]
                    dstUUID = wasGeneratedBy[uid]["prov:entity"]
                    if srcUUID not in node_map:
                        continue
                    else:
                        srcVal = node_map[srcUUID]
                    if dstUUID not in node_map:
                        continue
                    else:
                        dstVal = node_map[dstUUID]

                    ts_str = wasGeneratedBy[uid]["cf:date"]
                    ts = time.mktime(
                        datetime.datetime.strptime(
                            ts_str, "%Y:%m:%dT%H:%M:%S").timetuple())
                    adjusted_ts = ts - smallest_timestamp

                    if noencode:
                        output.write(str(srcUUID) + '\t' \
                         + str(dstUUID) + '\t' \
                         + str(srcVal) + ':' + str(dstVal) \
                         + ':' + str(edgetype) + ':' + str(timestamp) \
                         + ':' + str(adjusted_ts) + '\t' + '\n')
                    else:
                        output.write(str(hashgen([srcUUID])) + '\t' \
                         + str(hashgen([dstUUID])) + '\t' \
                         + str(srcVal) + ':' + str(dstVal) \
                         + ':' + str(edgetype) + ':' + str(timestamp) \
                         + ':' + str(adjusted_ts) + '\t' + '\n')
            if "wasInformedBy" in json_object:
                wasInformedBy = json_object["wasInformedBy"]
                for uid in wasInformedBy:
                    if "prov:type" not in wasInformedBy[uid]:
                        continue
                    else:
                        edgetype = valgencf(wasInformedBy[uid])
                    if "cf:id" not in wasInformedBy[uid]:
                        logging.debug(
                            "Edge (wasInformedBy) record without timestamp. UUID: %s",
                            uid)
                        continue
                    else:
                        timestamp = wasInformedBy[uid]["cf:id"]
                    if "prov:informant" not in wasInformedBy[uid]:
                        continue
                    if "prov:informed" not in wasInformedBy[uid]:
                        continue
                    srcUUID = wasInformedBy[uid]["prov:informant"]
                    dstUUID = wasInformedBy[uid]["prov:informed"]
                    if srcUUID not in node_map:
                        continue
                    else:
                        srcVal = node_map[srcUUID]
                    if dstUUID not in node_map:
                        continue
                    else:
                        dstVal = node_map[dstUUID]

                    ts_str = wasInformedBy[uid]["cf:date"]
                    ts = time.mktime(
                        datetime.datetime.strptime(
                            ts_str, "%Y:%m:%dT%H:%M:%S").timetuple())
                    adjusted_ts = ts - smallest_timestamp

                    if noencode:
                        output.write(str(srcUUID) + '\t' \
                         + str(dstUUID) + '\t' \
                         + str(srcVal) + ':' + str(dstVal) \
                         + ':' + str(edgetype) + ':' + str(timestamp) \
                         + ':' + str(adjusted_ts) + '\t' + '\n')
                    else:
                        output.write(str(hashgen([srcUUID])) + '\t' \
                         + str(hashgen([dstUUID])) + '\t' \
                         + str(srcVal) + ':' + str(dstVal) \
                         + ':' + str(edgetype) + ':' + str(timestamp) \
                         + ':' + str(adjusted_ts) + '\t' + '\n')
            if "wasDerivedFrom" in json_object:
                wasDerivedFrom = json_object["wasDerivedFrom"]
                for uid in wasDerivedFrom:
                    if "prov:type" not in wasDerivedFrom[uid]:
                        continue
                    else:
                        edgetype = valgencf(wasDerivedFrom[uid])
                    if "cf:id" not in wasDerivedFrom[uid]:
                        logging.debug(
                            "Edge (wasDerivedFrom) record without timestamp. UUID: %s",
                            uid)
                        continue
                    else:
                        timestamp = wasDerivedFrom[uid]["cf:id"]
                    if "prov:usedEntity" not in wasDerivedFrom[uid]:
                        continue
                    if "prov:generatedEntity" not in wasDerivedFrom[uid]:
                        continue
                    srcUUID = wasDerivedFrom[uid]["prov:usedEntity"]
                    dstUUID = wasDerivedFrom[uid]["prov:generatedEntity"]
                    if srcUUID not in node_map:
                        continue
                    else:
                        srcVal = node_map[srcUUID]
                    if dstUUID not in node_map:
                        continue
                    else:
                        dstVal = node_map[dstUUID]

                    ts_str = wasDerivedFrom[uid]["cf:date"]
                    ts = time.mktime(
                        datetime.datetime.strptime(
                            ts_str, "%Y:%m:%dT%H:%M:%S").timetuple())
                    adjusted_ts = ts - smallest_timestamp

                    if noencode:
                        output.write(str(srcUUID) + '\t' \
                         + str(hashgen([dstUUID])) + '\t' \
                         + str(srcVal) + ':' + str(dstVal) \
                         + ':' + str(edgetype) + ':' + str(timestamp) \
                         + ':' + str(adjusted_ts) + '\t' + '\n')
                    else:
                        output.write(str(hashgen([srcUUID])) + '\t' \
                         + str(hashgen([dstUUID])) + '\t' \
                         + str(srcVal) + ':' + str(dstVal) \
                         + ':' + str(edgetype) + ':' + str(timestamp) \
                         + ':' + str(adjusted_ts) + '\t' + '\n')
            if "wasAssociatedWith" in json_object:
                wasAssociatedWith = json_object["wasAssociatedWith"]
                for uid in wasAssociatedWith:
                    if "prov:type" not in wasAssociatedWith[uid]:
                        continue
                    else:
                        edgetype = valgencfe(wasAssociatedWith[uid])
                    if "cf:id" not in wasAssociatedWith[uid]:
                        logging.debug(
                            "Edge (wasAssociatedWith) record without timestamp. UUID: %s",
                            uid)
                        continue
                    else:
                        timestamp = wasAssociatedWith[uid]["cf:id"]
                    if "prov:agent" not in wasAssociatedWith[uid]:
                        continue
                    if "prov:activity" not in wasAssociatedWith[uid]:
                        continue
                    srcUUID = wasAssociatedWith[uid]["prov:agent"]
                    dstUUID = wasAssociatedWith[uid]["prov:activity"]
                    if srcUUID not in node_map:
                        continue
                    else:
                        srcVal = node_map[srcUUID]
                    if dstUUID not in node_map:
                        continue
                    else:
                        dstVal = node_map[dstUUID]

                    ts_str = wasAssociatedWith[uid]["cf:date"]
                    ts = time.mktime(
                        datetime.datetime.strptime(
                            ts_str, "%Y:%m:%dT%H:%M:%S").timetuple())
                    adjusted_ts = ts - smallest_timestamp

                    if noencode:
                        output.write(str(srcUUID) + '\t' \
                         + str(hashgen([dstUUID])) + '\t' \
                         + str(srcVal) + ':' + str(dstVal) \
                         + ':' + str(edgetype) + ':' + str(timestamp) \
                         + ':' + str(adjusted_ts) + '\t' + '\n')
                    else:
                        output.write(str(hashgen([srcUUID])) + '\t' \
                         + str(hashgen([dstUUID])) + '\t' \
                         + str(srcVal) + ':' + str(dstVal) \
                         + ':' + str(edgetype) + ':' + str(timestamp) \
                         + ':' + str(adjusted_ts) + '\t' + '\n')
    f.close()
    output.close()
    pb.close()
    return total_edges
Пример #60
0
def ahora():
    then = datetime.datetime.now()
    return (time.mktime(then.timetuple())*1e3 + then.microsecond/1e3)/1000