예제 #1
0
 def test_parse_datetime_timezone(self):
     # Time zone information is ignored.
     from mx.DateTime import DateTime
     expected = DateTime(2007, 1, 1, 13, 30, 29)
     self.assertEqual(expected,
                      self.DATETIME("2007-01-01 13:30:29+01", self.curs))
     self.assertEqual(expected,
                      self.DATETIME("2007-01-01 13:30:29-01", self.curs))
     self.assertEqual(expected,
                      self.DATETIME("2007-01-01 13:30:29+01:15", self.curs))
     self.assertEqual(expected,
                      self.DATETIME("2007-01-01 13:30:29-01:15", self.curs))
     self.assertEqual(
         expected, self.DATETIME("2007-01-01 13:30:29+01:15:42", self.curs))
     self.assertEqual(
         expected, self.DATETIME("2007-01-01 13:30:29-01:15:42", self.curs))
예제 #2
0
파일: shared.py 프로젝트: dendriel/SENAS
    def mountTime(self, time_str):

        dic_time = {"day": 0, "month": 0, "year": 0, "hour": 0, "minute": 0}

        dic_time["day"] = int(time_str[0:2])
        dic_time["month"] = int(time_str[2:4])
        dic_time["year"] = int(time_str[4:8])
        dic_time["hour"] = int(time_str[8:10])
        dic_time["min"] = int(time_str[10:12])

        ret = self._validateTime(dic_time)

        if ret == OK:
            time = DateTime(dic_time["year"], dic_time["month"],
                            dic_time["day"], dic_time["hour"], dic_time["min"],
                            0)
            return time
        else:
            return INVALID
예제 #3
0
    def createPerson(self, birth=DateTime(1970,2,3), first_name=None,
                     last_name=None, system=None, gender=True, fnr=None,
                     ansnr=None, studnr=None):
        """Shortcut for creating a test person in the db"""
        pe = Factory.get('Person')(self.db)
        co = Factory.get('Constants')(self.db)
        if gender:
            gender = co.gender_female
        else:
            gender = co.gender_male
        pe.populate(birth, gender=gender)
        pe.write_db()
        pe_id = pe.entity_id

        if first_name or last_name:
            pe.affect_names(co.system_sap, co.name_first, co.name_last)
            pe.populate_name(co.name_first, first_name)
            pe.populate_name(co.name_last, last_name)
            pe.write_db()

        if fnr:
            pe.affect_external_id(co.system_sap, co.externalid_fodselsnr)
            pe.populate_external_id(co.system_sap, co.externalid_fodselsnr, fnr)
            pe.write_db()
            self.db.commit()
            pe.clear()
            pe.find(pe_id)
        if studnr:
            pe.affect_external_id(co.system_fs, co.externalid_studentnr)
            pe.populate_external_id(co.system_fs, co.externalid_studentnr, studnr)
            pe.write_db()
            self.db.commit()
            pe.clear()
            pe.find(pe_id)
        if ansnr:
            pe.affect_external_id(co.system_sap, co.externalid_sap_ansattnr)
            pe.populate_external_id(co.system_sap, co.externalid_sap_ansattnr, ansnr)
            pe.write_db()
            self.db.commit()
        pe.write_db()
        self.db.commit()
        return pe
예제 #4
0
 def date(self, alternative_source=False, return_str=False):
     """return a datetime object for the email's date or None if no date is
     set or if it can't be parsed
     """
     value = self.get('date')
     if value is None and alternative_source:
         unix_from = self.message.get_unixfrom()
         if unix_from is not None:
             try:
                 value = unix_from.split(" ", 2)[2]
             except IndexError:
                 pass
     if value is not None:
         datetuple = parsedate(value)
         if datetuple:
             if lgc.USE_MX_DATETIME:
                 return DateTime(*datetuple[:6])
             return datetime(*datetuple[:6])
         elif not return_str:
             return None
     return value
예제 #5
0
def fromDecimalYear(decimalyear):
    """
    return mx.DateTime object corresponding to (floating point) decimal year

    mx.DateTime provides:
     .absdate - integer days since epoch
     .abstime - (fractional) seconds from beginning (time 00:00:00) of day (no leap seconds)
     .absdays - (fractional) days since epoch
    """

    # get year = integer fraction of decimalyear
    # NOTE: if decimal year is very close to the next higher integer value, rounding takes place
    year_fraction, year = math.modf(decimalyear)
    startyear_dt = DateTime(int(year), 1, 1)

    # get seconds that have passed in fraction of the current year
    if startyear_dt.is_leapyear:
        year_seconds = year_fraction * 86400.0 * 366
    else:
        year_seconds = year_fraction * 86400.0 * 365

    return startyear_dt + DateTimeDeltaFromSeconds(year_seconds)
예제 #6
0
        def CheckDateTypes(self):
            dt = DateTime(2002, 6, 15)
            dtd = DateTimeDelta(0, 0, 0, 1)

            self.cur.execute("create table test (t timestamp)")
            self.cur.execute("insert into test(t) values (?)", (dt, ))
            self.cur.execute("select t from test")
            res = self.cur.fetchone()

            self.failUnlessEqual(
                dt, res.t, "DateTime object should have been %s, was %s" %
                (repr(dt), repr(res.t)))

            self.cur.execute("drop table test")
            self.cur.execute("create table test(i interval)")
            self.cur.execute("insert into test(i) values (?)", (dtd, ))
            self.cur.execute("select i from test")
            res = self.cur.fetchone()

            self.failUnlessEqual(
                dtd, res.i,
                "DateTimeDelta object should have been %s, was %s" %
                (repr(dtd), repr(res.i)))
예제 #7
0
 def __init__(self,
              surname,
              given_names,
              sex=None,
              DOB=None,
              DOB_prec=0,
              street_address=None,
              locality=None,
              state=None,
              postcode=None,
              country=None,
              alt_street_address=None,
              alt_locality=None,
              alt_state=None,
              alt_postcode=None,
              alt_country=None,
              work_street_address=None,
              work_locality=None,
              work_state=None,
              work_postcode=None,
              work_country=None,
              passport_number=None,
              passport_country=None,
              passport_number_2=None,
              passport_country_2=None,
              home_phone=None,
              work_phone=None,
              mobile_phone=None,
              fax_phone=None,
              e_mail=None,
              last_update=None):
     self.__dict__.update(vars())
     if self.DOB:
         self.DOB = DateTime(*[int(d) for d in self.DOB.split('-')])
     Person.id += 1
     self.person_id = self.id
     self.last_update = None
예제 #8
0
        def __init__(self, locked=False):
            init_dict = self.__init_dict__
            for key in init_dict:
                value = init_dict[key]
                date, mo, da, ye, time, ho, mi, se = [None] * 8
                if type(value) in [str, str]:
                    date, da, mo, ye, time, ho, mi, se = reDateTime.match(
                        value).groups()
                    if mo and int(mo) > 12:
                        mo, da = da, mo

                if type(init_dict[key]) == dict:
                    setattr(self, key, makeFMData(
                        init_dict[key],
                        locked=False))  # lock all substructures??
                elif type(init_dict[key]) == list:
                    l = []
                    for d in init_dict[key]:
                        if type(d) == dict:
                            l.append(makeFMData(d))  # lock ??
                        else:
                            l.append(d)
                    setattr(self, key, l)
                elif date and time:
                    setattr(
                        self, key,
                        DateTime(int(ye), int(mo), int(da), int(ho), int(mi),
                                 int(se)))
                elif date:
                    setattr(self, key, Date(int(ye), int(mo), int(da)))
                elif time:
                    setattr(self, key, Time(int(ho), int(mi), int(se)))
                else:
                    setattr(self, key, init_dict[key])
            if locked:
                self.__modified__.add('__locked__')
예제 #9
0
def main(argv):
    if len(argv) != 3:
        print "Usage: apache_count.py configfile logfile"
        raise SystemExit
    # Read config file
    p = ConfigParser.ConfigParser()
    p.read(argv[1])
    # Read server-relative URI prefix
    files_url = urlparse.urlsplit(p.get('webui', 'files_url'))[2]
    # Setup database connection
    dbname = p.get('database', 'name')
    dbuser = p.get('database', 'user')
    dbpass = p.get('database', 'password')
    dbconn = psycopg.connect(database=dbname, user=dbuser, password=dbpass)
    cursor = dbconn.cursor()

    filename = argv[2]
    if filename.endswith(".gz"):
        f = gzip.open(filename)
    elif filename.endswith(".bz2"):
        f = bz2.BZ2File(filename)
    else:
        f = open(filename)

    cursor.execute("select value from timestamps where name='http'")
    last_http = cursor.fetchone()[0]

    downloads = {}
    for line in f:
        m = logre.search(line)
        if not m:
            continue
        path = m.group('path')
        if not path.startswith(files_url):
            continue
        day = int(m.group('day'))
        month = m.group('month').lower()
        month = month_index[month]
        year = int(m.group('year'))
        hour = int(m.group('hour'))
        minute = int(m.group('min'))
        sec = int(m.group('sec'))
        date = DateTime(year, month, day, hour, minute, sec)
        zone = utc_offset(m.group('zone'))
        date = date - zone

        if date < last_http:
            continue

        filename = os.path.basename(path)
        # see if we have already read the old download count
        if not downloads.has_key(filename):
            cursor.execute(
                "select downloads from release_files "
                "where filename=%s", (filename, ))
            record = cursor.fetchone()
            if not record:
                # No file entry. Could be a .sig file
                continue
            # make sure we're working with a number
            downloads[filename] = record[0] or 0
        # add a download
        downloads[filename] += 1

    if not downloads:
        return

    # Update the download counts
    for filename, count in downloads.items():
        cursor.execute(
            "update release_files set downloads=%s "
            "where filename=%s", (count, filename))
    # Update the download timestamp
    date = psycopg.TimestampFromMx(date)
    cursor.execute("update timestamps set value=%s "
                   "where name='http'", (date, ))
    dbconn.commit()
예제 #10
0
def main(config_file, logfile):
    """Populate the download counts."""
    # Read config file
    p = ConfigParser.ConfigParser()
    p.read(config_file)

    # Read mirror infos
    mirrors = p.get('mirrors', 'folder')

    # Read server-relative URI prefix
    files_url = urlparse.urlsplit(p.get('webui', 'files_url'))[2]
    # Setup database connection
    dbconn, cursor = get_cursor(p)

    # create a log reader, that filters on files_url
    # build an iterator here with chain and all distant files
    cursor.execute("select * from mirrors")
    def read_distant_stats(mirror, filename):
        mirror_domain = urlparse.urlparse(mirror[0])[1]
        mirror_domain = os.path.join(mirrors, mirror_domain)
        distant_reader = ApacheDistantLocalStats(mirror_domain)
        stat_file_url = '%s/%s/%s' % (mirror[0], mirror[3], filename)
        return distant_reader.read_stats(stat_file_url)

    # it supposes it runs the program at day + 1
    yesterday = datetime.datetime.now() - datetime.timedelta(1)
    filename = yesterday.strftime('%Y-%m-%d.bz2')
    mirrors = [read_distant_stats(mirror, filename) 
               for mirror in cursor.fetchall()]

    logs = chain(*[ApacheLogReader(logfile, files_url)] + mirrors)
    _log('Working with local stats and %d mirror(s)' % len(mirrors))

    # get last http access
    cursor.execute("select value from timestamps where name='http'")
    last_http = cursor.fetchone()[0]
    _log('Last time stamp was : %s' % last_http)

    downloads = {}

    # let's read the logs in the apache file
    for line in logs:
        day = int(line.get('day', yesterday.day))
        month = line.get('month', yesterday.month)
        year = int(line.get('year', yesterday.year))
        hour = int(line.get('hour', 0))
        minute = int(line.get('min', 0))
        sec = int(line.get('sec', 0))
        date = DateTime(year, month, day, hour, minute, sec)
        zone = utc_offset(line.get('zone', 0))
        date = date - zone
        count = int(line.get('count', 1))
        if date < last_http:
            continue
        
        filename = line['filename']
    
        _dotlog('.')
        # see if we have already read the old download count
        if not downloads.has_key(filename):
            cursor.execute("select downloads from release_files "
                           "where filename=%s", (filename,))
            record = cursor.fetchone()
            if not record:
                # No file entry. Could be a .sig file
                continue
            # make sure we're working with a number
            downloads[filename] = record[0] or 0
        # add a download
        downloads[filename] += count

    if downloads != []:

        for filename, count in downloads.items():
            # Update the download counts in the DB
            _log('Updating download count for %s: %s' % (filename, count))
            cursor.execute("update release_files set downloads=%s "
                        "where filename=%s", (count, filename))
        
        # Update the download timestamp
        date = psycopg.TimestampFromMx(datetime.datetime.now())
        cursor.execute("update timestamps set value=%s "
                    "where name='http'", (date,))

        dbconn.commit()

    # now creating the local stats file
    _log('Building local stats file')
    stats = ApacheLocalStats()
    stats_dir = p.get('mirrors', 'local-stats')
    if not os.path.exists(stats_dir):
        raise ValueError('"%s" folder not found (local-stats in config.ini)' \
                    % stats_dir)
    stats_file = os.path.join(stats_dir, filename) 
    stats.build_daily_stats(yesterday.year, yesterday.month, yesterday.day,
                            logfile, stats_file, files_url, 'bz2')


    # now creating the global stats file
    # which is built with the latest database counts
    _log('Building global stats file')
    globalstats_dir = p.get('mirrors', 'global-stats')   
    if not os.path.exists(globalstats_dir):
        raise ValueError('"%s" folder not found (global-stats in config.ini)' \
                % globalstats_dir)
    cursor.execute("select name, filename, downloads from release_files")

    def get_line(files_url):
        for line in cursor:
            data = {}
            data['day'] = yesterday.day
            data['month'] = yesterday.month
            data['year'] = yesterday.year
            data['filename'] = line[1]
            data['useragent'] = 'Unkown' # not stored yet
            data['packagename'] = line[0]
            data['count'] = line[2]
            yield data

    gstats = LocalStats()
    stats_file = os.path.join(globalstats_dir, filename) 
    gstats.build_daily_stats(yesterday.year, yesterday.month, yesterday.day,
                             get_line, stats_file, files_url, 'bz2')
 def test_early_date(self):
     "This test illustrates inability to handle dates earlier than 100 AD."
     conv = SourceDataTypes.get_conversion('date')
     self.assertNotEqual(conv('25/12/0000'), DateTime(0, 12, 25))
 def test_date(self):
     conv = SourceDataTypes.get_conversion('date')
     self.assertEqual(conv('23/12/1970'), DateTime(1970, 12, 23))
     self.assertEqual(conv('23/12/70'), DateTime(1970, 12, 23))
     self.assertEqual(conv('25/12/00'), DateTime(2000, 12, 25))
     self.assertEqual(conv('25/12/1900'), DateTime(1900, 12, 25))
     self.assertEqual(conv('25/12/900'), DateTime(900, 12, 25))
     self.assertEqual(conv('25/12/9'), DateTime(2009, 12, 25))
     self.assertEqual(conv('3/2/2004'), DateTime(2004, 2, 3))
     self.assertEqual(conv('3/2/04'), DateTime(2004, 2, 3))
     self.assertEqual(conv('03/2/04'), DateTime(2004, 2, 3))
     self.assertEqual(conv('3/02/04'), DateTime(2004, 2, 3))
     self.assertEqual(conv('03/02/04'), DateTime(2004, 2, 3))
     self.assertEqual(conv('29/02/04'), DateTime(2004, 2, 29))
     self.assertEqual(conv(None), None)
     self.assertEqual(conv(''), None)
     self.assertRaises(ValueError, conv, '2 3 04')
     self.assertRaises(ValueError, conv, '23-12-1970')
예제 #13
0
파일: pgdb.py 프로젝트: zkbt/MITTENS
def Timestamp(year, month, day, hour, minute, second):
	return DateTime(year, month, day, hour, minute, second)
예제 #14
0
 def test_type_roundtrip_datetime_array(self):
     from mx.DateTime import DateTime
     self._test_type_roundtrip_array(DateTime(2010, 5, 3, 10, 20, 30))
예제 #15
0
    def maintenance_qty_change(self,
                               cr,
                               uid,
                               ids,
                               maintenance_product_qty=False,
                               maintenance_month_qty=False,
                               maintenance_start_date=False,
                               maintenance_end_date=False,
                               is_maintenance=False,
                               fleet_id=False):
        result = {}
        if not is_maintenance:
            return result

        result['value'] = {}
        warning_messages = ""

        if maintenance_start_date:
            start = DateTime.strptime(maintenance_start_date, '%Y-%m-%d')
            if start.day != fixed_month_init_day:
                warning_messages += "- Start date should should ideally start at day %s of the month; corrected to day %s\n" % (
                    fixed_month_init_day, fixed_month_init_day)
                start = DateTime.DateTime(start.year, start.month,
                                          fixed_month_init_day)

            result['value'].update(
                {'maintenance_start_date': start.strftime('%Y-%m-%d')})
            #return result

        if maintenance_end_date:
            end = DateTime.strptime(maintenance_end_date, '%Y-%m-%d')
            en_date_check = end + DateTime.RelativeDateTime(
                days=fixed_days_before_month_end + 1)

            if end.month == en_date_check.month or en_date_check.day != 1:
                warning_messages += "- End date should should end %s days before the end of the month! It has been reset to the correct value.\n" % fixed_days_before_month_end
                day = end.days_in_month - fixed_days_before_month_end
                end = DateTime.DateTime(end.year, end.month, day, 0, 0, 0.0)
                result['value'].update(
                    {'maintenance_end_date': end.strftime('%Y-%m-%d')})

        if maintenance_start_date and maintenance_end_date:
            if end < start:
                warning_messages += "- End date should be AFTER Start date!\n"
                day = start.days_in_month - fixed_days_before_month_end  #then we set the minimal end date
                end = DateTime.DateTime(start.year, start.month, day, 0, 0,
                                        0.0)
                result['value'].update(
                    {'maintenance_end_date': end.strftime('%Y-%m-%d')})

            maintenance_month_qty = self._get_maintenance_month_qty_from_start_end(
                cr, uid, start, end)
            result['value'].update(
                {'maintenance_month_qty': maintenance_month_qty})
            if maintenance_month_qty < min_maintenance_months:
                warning_messages += "- we usually try to sell %s months at least!\n" % min_maintenance_months

            if fleet_id:
                fleet = self.pool.get('stock.location').browse(
                    cr, uid, fleet_id)
                theoretic_end = self._get_end_date_from_start_date(
                    cr, uid, start, fleet)
                if theoretic_end.year != end.year or theoretic_end.month != end.month or theoretic_end.day != end.day:
                    warning_messages += "- Theoretic Maintenance End Date was: %s !\n" % theoretic_end.strftime(
                        '%Y-%m-%d')

        if maintenance_product_qty and maintenance_month_qty:  #only set the default fleet at init
            result['value'].update({
                'product_uom_qty':
                maintenance_product_qty * maintenance_month_qty
            })
            result['value'].update({
                'product_uos_qty':
                maintenance_product_qty * maintenance_month_qty
            })  # TODO * product_obj.uos_coeff

        if len(warning_messages) > 1:
            result['warning'] = {
                'title': 'Maintenance Dates Warning',
                'message': warning_messages
            }
        return result
예제 #16
0
def betalight(names, verbose = False):
# Read Images and their headers and reorder in case they were taken with
# some other BIAS script than the correct one
    nimages = len(names)
    if nimages != 4:
        print "I need four images. You provided me with %d" % nimages
        return

    if verbose: print 'start reading of MEF images'
    # files is a list of FITS files, already open for use
    files = [(pyfits.open(x), x) for x in names]

    # We want to sort the files attending to their decreasing mean value
    # To to this, we prepare a list of "tuples". Each tuple will contain
    # three values: the mean value of the file, a reference to the file
    # itself, and its name. It seems redundant to have the files listed
    # in two places, but it is not. Python copy references, not whole
    # values, so it's cheap to do like this.
    #
    # You can use a FITS object like a sequence. Each element of the
    # sequence is a FITS HDU. In ALFOSC files, file[0] is the Primary HDU
    # and file[1] is the first image (the only one).
    l = [ (fitsfile[1].data.mean(), fitsfile, name)
               for (fitsfile, name)
               in files ]

    # And now we get the list of sorted files. How? Well, when you sort
    # a list of tuples, what Python does is: sort using the first element,
    # and if there's a coincidence, use the second element, and if ... You
    # get the idea. "l" is a list of tuples having the mean value of the
    # file as a first element and thus "sorted(l)" will return the tuples
    # of "l" sorted by mean value.
    # Then we discard that mean value to create sortedfits.
    sl = sorted(l, reverse = True)
    sortednames = [x[2] for x in sl]
    sortedfits = [x[1] for x in sl]

    # The we produce a list of the first image (fistfile[1]) data, for
    # everyone of those sorted FITS files. Alse a list of primary headers.
    # We assign them also to discrete variables (f1, f2, ... hf1, hf2, ...)
    # for later use.
    datalist = [fitsfile[1].data for fitsfile in sortedfits]
    f1, f2, b1, b2 = datalist
    headerlist = [fitsfile[1].header for fitsfile in sortedfits]
    hf1, hf2, hb1, hb2 = headerlist

    if verbose: print 'end reading of MEF images'

    # Test that the images are of the same size
    # We could do it, for example, comparing the shape of the first
    # data with the second one, and then with the third one, and then
    # with the fourth one.
    # That's a direct but a bit cumbersome way to do it. Instead, we
    # use "set". set is a native type to Python that behaves as... a
    # set ;). That means it will contain only one copy of a value. Eg:
    #
    #  >>> set([1, 2, 3, 1, 2, 4, 1, 2, 5])
    #  set([1, 2, 3, 4, 5])
    #
    # so... if this set of image shapes has more than one element...
    # at least one of them is different to the others.
    if len(set(x.shape for x in datalist)) > 1:
        print 'Images not of same size! Aborted!'
        return
    if verbose: print 'Images are the same size'

    # Cut edges out of the images
    #
    # Images should be 101 x 101 pixels, since that is the size of the
    # image of betalight on alfosc
    bsize	= 16
    nareas	= int(float(f1.shape[1])/bsize)

    ysize, xsize = f1.shape
    c1 = c3 = nareas - 1
    c2 = xsize
    c4 = ysize

    if xsize < 200 or ysize < 200:
        cf1 = f1[c3:c4, c1:c2]
        cf2 = f2[c3:c4, c1:c2]
        cb1 = b1[c3:c4, c1:c2]
        cb2 = b2[c3:c4, c1:c2]
    else:
        cf1 = f1[50:ysize-50, 50:xsize-50]
        cf2 = f2[50:ysize-50, 50:xsize-50]
        cb1 = b1[50:ysize-50, 50:xsize-50]
        cb2 = b2[50:ysize-50, 50:xsize-50]

    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
    # ; Measure some easy statistical properties for the user to see ;
    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

    # This empty class is just for convenience. That way we can set arbitrary
    # attributes to its instances. See below
    class Dummy(object): pass
    stats = []
        
    if verbose:
        print ("%14s" + "%13s"*5) % ("Name", "Min", "Max",
                                     "StDev", "Mean", "Median")
    frmt = "%-14s%13.2f%13.2f%13.2f%13.2f%13.2f"
    for img, name in zip((cf1, cf2, cb1, cb2), sortednames):
        st = Dummy()
        st.min, st.max = img.min(), img.max()
        st.stdev, st.mean, st.median, st.nrejects = scstdev(img)
        stats.append(st)

        if verbose:
            print frmt % (name, st.min, st.max, st.stdev, st.mean, st.median)

    if verbose: print

    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
    # ; Check difference of bias frames, should be smaller than the stdev ;
    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

    biasdiff = abs(stats[2].mean - stats[3].mean)
    if biasdiff > stats[2].stdev or biasdiff > stats[3].stdev:
        print
        print " Difference of averages of bias frames", biasdiff
        print " is larger than the standard deviation"
        print " of either of the bias images         ", stats[2].stdev, stats[3].stdev
        print " Aborted! "
        return

    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
    # ; Divide image to areas (subimages) of 16x16 pix and ;
    # ; calculate statistics of individual areas           ;
    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

    class ImgPiece(object):
        def __init__(self, frame):
            st, ma, me, nrej = scstdev(frame)
            self.frame  = frame
            self.min    = frame.min()
            self.max    = frame.max()
            self.std    = st
            self.mean   = ma
            self.median = me

    # The original script did this on three loops, what of course is
    # the obvious way. Python is not lightning fast when it comes to
    # long loops, but for this small 101x101  (36 16x16 squares)

    # The original also creates a 3D array shaped (nareas*nareas, 5, 4).
    # Instead, I create a dict structured like this:
    #
    #    pieces[n] -> [ImgPiece(flat1[n]), ..., ImgPiece(bias2[n])]
    #
    # Where "n" is the number for a 16x16 area

    pieces = defaultdict(list)
    for img in (cf1, cf2, cb1, cb2):
        for ycoord in range(0, nareas):
            vert = ycoord * bsize
            row = img[vert: vert + bsize]
            base = ycoord * nareas
            for xcoord in range(0, nareas):
                horiz = xcoord * bsize
                pieces[base + xcoord].append(ImgPiece(row[:,horiz:horiz + bsize]))

    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
    # ; Calculate COUNTS, BIAS, RON and GAIN for individual areas    ;
    # ;                                                              ;
    # ; gain = ( ( flat1 + flat2 ) - ( bias1 + bias2) ) /            ;
    # ;	( STDEV( flat1 - flat2 )^2 - STDEV(bias1 - bias2 )^2 ) ;
    # ;                                                              ;
    # ; ron  = gain * STDEV( bias1 - bias2 ) / SQRT( 2 )             ;
    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

    gaintbl = []
    rontbl  = []

    sqrt2 = sqrt(2)
    for l in range(0, nareas*nareas):
        pf1, pf2, pb1, pb2 = pieces[l]
        stdFlats = (scstdev(pf1.frame - pf2.frame)[0])
        stdBias  = (scstdev(pb1.frame - pb2.frame)[0])
        gaintbl.append( ((pf1.mean+pf2.mean) - (pb1.mean+pb2.mean)) /
                        (stdFlats**2 - stdBias**2) )
        rontbl.append( stdBias / sqrt2 )

    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
    # ; Take the individual measurements of the subimages and ;
    # ; do sigma clipping on them                             ;
    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

    std, gain, _, nr1 = scstdev(gaintbl)
    gainerr  = std / sqrt(numpy.array(gaintbl).size - nr1)

    std, mean, _, nr2 = scstdev(rontbl)
    ron      = gain * mean
    ronerr   = gain * std / sqrt(numpy.array(rontbl).size - nr2)

    # Ok
    fltmean = numpy.array([(x[0].mean, x[1].mean) for x in pieces.values()])
    std, counts, _, nr3 = scstdev(fltmean)
    counterr = std / sqrt(fltmean.size - nr3)

    # Ok
    biasmean = numpy.array([(x[2].mean, x[3].mean) for x in pieces.values()])
    std, bias, _, nr4 = scstdev(biasmean)
    biaserr  = std / sqrt(biasmean.size - nr3)

    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
    # ; Print results to screen, these values are the ones going to DB ;
    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

    if verbose:
        print "  COUNTS:%7.1f +/- %6.2f" % (counts, counterr)
        print "  BIAS:  %7.1f +/- %6.2f" % (bias, biaserr)
        print "  GAIN:  %7.4f +/- %6.4f" % (gain, gainerr)
        print "  RON:   %7.4f +/- %6.4f" % (ron, ronerr)
        print

    results = Dummy()
    results.counts   = counts
    results.counterr = counterr
    results.bias     = bias  
    results.biaserr  = biaserr 
    results.gain     = gain
    results.gainerr  = gainerr
    results.ron      = ron
    results.ronerr   = ronerr

    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
    # ; extract required keywords from the FITS header ;
    # ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

    # Obtain the primary HDU headers from the first file
    hf0 = files[0][0][0].header

    #                      012345678901234567890
    # Format for DATE-AVG: 2008-01-22T14:53:12.5
    date_avg = hf0['DATE-AVG']
    results.date  = DateTime(int(date_avg[0:4]), # year
                             int(date_avg[5:7]), # month
                             int(date_avg[8:10]) # day
                             ).mjd
    results.time  = hf0['UT']
    results.amp   = hf0['AMPLMODE']
    results.gmode = hf0['GAINM']
    if verbose:
        print "amp   ", results.amp
        print "gmode ", results.gmode

    return results
예제 #17
0
 def default_maintenance_start_date(self, cr, uid, context={}):
     now = DateTime.now()
     date = DateTime.DateTime(now.year, now.month, fixed_month_init_day, 0,
                              0, 0.0) + DateTime.RelativeDateTime(
                                  months=default_maintenance_start_delta)
     return date.strftime('%Y-%m-%d')
예제 #18
0
def test_mxdatetime():
    assert json.dumps(DateTime(2018, 1, 1, 12, 0,
                               0)) == '"2018-01-01T12:00:00+01:00"'
    assert json.dumps(DateTime(2018, 1, 1, 0, 0, 0)) == '"2018-01-01"'
예제 #19
0
                        lon_0=radarLon)
geoProj = GeographicSystem()

dx, dy = (8.0e3, ) * 2
# dx, dy = 500.0, 500.0
minute_intervals = [2.0]
count_scale_factor = dx / 1000.0

b = Bounds()
#Crude limits to the domain of the storm
b.x = (-60e3, 140e3)
b.y = (-150e3, 50e3)
b.z = (-20e3, 20e3)
b.chi2 = (0, 1.0)
b.stations = (7, 99)
start_time = DateTime(2009, 6, 10, 20, 50, 0)
end_time = DateTime(2009, 6, 10, 21, 00, 0)
max_count_baseline = 450 * count_scale_factor  #/ 10.0

pad = 0  #-25e3

# approximate velocity of reference frame. used to adjust the viewport.
# is average speed of LMA density center between 830 and 945 UTC
u = 0  #17.8    # m/s
v = 0  #15.6
view_dx = b.x[1] - b.x[0]  #200.0e3
view_dy = b.y[1] - b.y[0]  #200.0e3
# Position at some initial time
x0 = b.x[1] - view_dx / 2.0  #-150.0e3
y0 = b.y[1] - view_dy / 2.0  #-150.0e3
t0 = DateTime(2009, 6, 10, 22, 40, 0)
예제 #20
0
 def test_datetime(self):
     data = [DateTime(2004,1,1,0,0), 
             DateTime(1900,12,31,23,59,59), 
             DateTime(2050,2,28),
             None]
     self._test_array(soomarray.ArrayDateTime, data)
예제 #21
0
 def test_type_roundtrip_datetime_array(self):
     self._test_type_roundtrip_array(DateTime(2010, 5, 3, 10, 20, 30))
 def test_iso_date(self):
     conv = SourceDataTypes.get_conversion('iso-date')
     self.assertEqual(conv('1970-12-23'), DateTime(1970, 12, 23))
     self.assertEqual(conv('70-12-23'), DateTime(1970, 12, 23))
     self.assertEqual(conv('2004-2-3'), DateTime(2004, 2, 3))
     self.assertEqual(conv('04-2-3'), DateTime(2004, 2, 3))
     self.assertEqual(conv('00-12-25'), DateTime(2000, 12, 25))
     self.assertEqual(conv('1900-12-25'), DateTime(1900, 12, 25))
     self.assertEqual(conv('900-12-25'), DateTime(900, 12, 25))
     self.assertEqual(conv('9-12-25'), DateTime(2009, 12, 25))
     self.assertEqual(conv('04-2-03'), DateTime(2004, 2, 3))
     self.assertEqual(conv('04-02-3'), DateTime(2004, 2, 3))
     self.assertEqual(conv('04-02-03'), DateTime(2004, 2, 3))
     self.assertEqual(conv('04-02-29'), DateTime(2004, 2, 29))
     self.assertEqual(conv(None), None)
     self.assertEqual(conv(''), None)
     self.assertRaises(ValueError, conv, '2 3 04')
     self.assertRaises(ValueError, conv, '1970/12/23')
예제 #23
0
 def test_adapt_datetime(self):
     from mx.DateTime import DateTime
     value = self.execute('select (%s)::timestamp::text',
                          [DateTime(2007, 1, 1, 13, 30, 29.123456)])
     self.assertEqual(value, '2007-01-01 13:30:29.123456')
 def test_us_date(self):
     conv = SourceDataTypes.get_conversion('us-date')
     self.assertEqual(conv('12/23/1970'), DateTime(1970, 12, 23))
     self.assertEqual(conv('12/23/70'), DateTime(1970, 12, 23))
     self.assertEqual(conv('12/25/00'), DateTime(2000, 12, 25))
     self.assertEqual(conv('12/25/1900'), DateTime(1900, 12, 25))
     self.assertEqual(conv('12/25/900'), DateTime(900, 12, 25))
     self.assertEqual(conv('12/25/9'), DateTime(2009, 12, 25))
     self.assertEqual(conv('2/3/2004'), DateTime(2004, 2, 3))
     self.assertEqual(conv('2/3/04'), DateTime(2004, 2, 3))
     self.assertEqual(conv('2/03/04'), DateTime(2004, 2, 3))
     self.assertEqual(conv('02/3/04'), DateTime(2004, 2, 3))
     self.assertEqual(conv('02/03/04'), DateTime(2004, 2, 3))
     self.assertEqual(conv('02/29/04'), DateTime(2004, 2, 29))
     self.assertEqual(conv(None), None)
     self.assertEqual(conv(''), None)
     self.assertRaises(ValueError, conv, '3 2 04')
     self.assertRaises(ValueError, conv, '12-23-1970')
예제 #25
0
    def reload_configuration(self):

        # Loglevel
        loglevel_value = self.config.get("loglevel")
        loglevel = self.builder.get_object('loglevel')
        loglevel.set_active_id(loglevel_value)

        # Mode always simulated for now
        self.config._options["mode"] = "simulated"
        self.config._options["policy-host-selection"] = "greedy"
        self.config._options["policy-matchmaking"] = "condor"
        self.config._options['clock'] = 'simulated'
        self.config._options['override-memory'] = -1  #no memory override
        now = datetime.now()

        self.config._options['starttime'] = DateTime(now.year, now.month,
                                                     now.day, now.hour,
                                                     now.minute, now.second)

        # Lease preparation
        lease_preparation_value = self.config.get("lease-preparation")
        lease_prep = self.builder.get_object('lease_preparation')
        lease_prep.set_active_id(lease_preparation_value)

        # lease failure handking
        lease_failure_handling_value = self.config.get(
            "lease-failure-handling")
        lease_failure_handling = self.builder.get_object(
            'lease_failure_handling')
        lease_failure_handling.set_active_id(lease_failure_handling_value)

        # mapper
        mapper_value = self.config.get("mapper")
        mapper = self.builder.get_object('mapper')
        mapper.set_active_id(mapper_value)

        # admission policy
        self.builder.get_object('policy-admission').set_active_id(
            self.config.get("policy.admission"))

        # preemption policy
        self.builder.get_object('policy-preemption').set_active_id(
            self.config.get("policy.preemption"))

        # wakeup interval
        h, m, s = (self.builder.get_object('wakeup-interval-hours'),
                   self.builder.get_object('wakeup-interval-minutes'),
                   self.builder.get_object('wakeup-interval-seconds'))

        values = str(self.config.get("wakeup-interval")).split(":")

        h.set_text(values[0])
        m.set_text(values[1])
        s.set_text(values[2].split('.')[0])

        # backfilling
        self.builder.get_object('backfilling').set_active_id(
            self.config.get("backfilling"))

        # backfilling reservations
        backfilling_reservations = self.builder.get_object(
            'backfilling-reservations')
        backfilling_reservations.set_text(
            self.config.get("backfilling-reservations") or '')

        if self.config.get("backfilling") != 'intermediate':
            self.builder.get_object('backfilling-reservations-label').hide()
            backfilling_reservations.hide()

        # suspension
        self.builder.get_object('suspension').set_active_id(
            self.config.get("suspension"))

        # suspend rate
        self.builder.get_object('suspend-rate').set_text(
            str(self.config.get("suspend-rate")))

        # resume rate
        self.builder.get_object('resume-rate').set_text(
            str(self.config.get("resume-rate")))

        # suspendresume-exclusion
        self.builder.get_object('suspendresume-exclusion').set_active_id(
            self.config.get("suspendresume-exclusion"))

        # scheduling-threshold-factor
        self.builder.get_object('scheduling-threshold-factor').set_text(
            str(self.config.get("scheduling-threshold-factor")))

        # override-suspend-time
        self.builder.get_object('override-suspend-time').set_text(
            str(self.config.get("override-suspend-time") or ''))

        # override-resume-time
        self.builder.get_object('override-resume-time').set_text(
            str(self.config.get("override-resume-time") or ''))

        # migration
        self.builder.get_object('migration').set_active_id(
            self.config.get("migration"))

        # force-scheduling-threshold
        h, m, s = (
            self.builder.get_object('force-scheduling-threshold-hours'),
            self.builder.get_object('force-scheduling-threshold-minutes'),
            self.builder.get_object('force-scheduling-threshold-seconds'))

        values = self.config.get("force-scheduling-threshold")
        values = str(values).split(":") if values else ('', '', '')
        h.set_text(values[0])
        m.set_text(values[1])
        s.set_text(values[2].split('.')[0])

        # non-schedulable-interval
        h, m, s = (self.builder.get_object('non-schedulable-interval-hours'),
                   self.builder.get_object('non-schedulable-interval-minutes'),
                   self.builder.get_object('non-schedulable-interval-seconds'))

        values = str(self.config.get("non-schedulable-interval"))
        values = values.split(":") if values else ('', '', '')

        h.set_text(values[0])
        m.set_text(values[1])
        s.set_text(values[2].split('.')[0])

        # shutdown-time
        h, m, s = (self.builder.get_object('shutdown-time-hours'),
                   self.builder.get_object('shutdown-time-minutes'),
                   self.builder.get_object('shutdown-time-seconds'))

        values = self.config.get("shutdown-time")
        values = str(values).split(":") if values else ('', '', '')
        h.set_text(values[0])
        m.set_text(values[1])
        s.set_text(values[2].split('.')[0])

        # enactment-overhead
        h, m, s = (self.builder.get_object('enactment-overhead-hours'),
                   self.builder.get_object('enactment-overhead-minutes'),
                   self.builder.get_object('enactment-overhead-seconds'))

        values = self.config.get("enactment-overhead")
        values = str(values).split(":") if values else ('', '', '')
        h.set_text(values[0])
        m.set_text(values[1])
        s.set_text(values[2].split('.')[0])

        # imagetransfer-bandwidth
        self.builder.get_object('imagetransfer-bandwidth').set_text(
            str(self.config.get("imagetransfer-bandwidth")))

        #stop-when
        self.builder.get_object('stop-when').set_active_id(
            self.config.get("stop-when"))

        # status-message-interval
        self.builder.get_object('status-message-interval').set_text(
            str(self.config.get("status-message-interval") or ''))

        # probes

        self.probe_initializing = True

        if "ar" in self.config.get("accounting-probes"):
            self.builder.get_object('ar').set_active(True)

        if "best-effort" in self.config.get("accounting-probes"):
            self.builder.get_object('best-effort').set_active(True)

        if "immediate" in self.config.get("accounting-probes"):
            self.builder.get_object('immediate').set_active(True)

        if "cpu_pnodes" in self.config.get("accounting-probes"):
            self.builder.get_object('cpu_pnodes').set_active(True)

        if "disk_pnodes" in self.config.get("accounting-probes"):
            self.builder.get_object('disk_pnodes').set_active(True)

        if "memory_pnodes" in self.config.get("accounting-probes"):
            self.builder.get_object('memory_pnodes').set_active(True)

        if "net_in_pnodes" in self.config.get("accounting-probes"):
            self.builder.get_object('net_in_pnodes').set_active(True)

        if "net_out_pnodes" in self.config.get("accounting-probes"):
            self.builder.get_object('net_out_pnodes').set_active(True)

        self.probe_initializing = False

        # db_folder
        db_folder = os.sep.join(
            os.path.expanduser(self.config.get("datafile")).split(os.sep)[:-1])
        self.builder.get_object('db_folder').set_filename(db_folder)

        # transfer-mechanism
        self.builder.get_object('transfer-mechanism').set_active_id(
            self.config.get("transfer-mechanism"))

        #avoid-redundant-transfers
        val = "True" if self.config.get(
            "avoid-redundant-transfers") else "False"
        self.builder.get_object('avoid-redundant-transfers').set_active_id(val)

        # diskimage-reuse
        self.builder.get_object('diskimage-reuse').set_active_id(
            self.config.get("diskimage-reuse"))

        if self.config.get("diskimage-reuse") == 'none':
            self.builder.get_object('diskimage-cache-size').hide()
            self.builder.get_object('diskimage-cache-size-label').hide()
            self.builder.get_object('diskimage-cache-size-label2').hide()

        # diskimage-cache-size
        self.builder.get_object('diskimage-cache-size').set_text(
            str(self.config.get("diskimage-cache-size")))

        #force-imagetransfer-time
        h, m, s = (self.builder.get_object('force-imagetransfer-time-hours'),
                   self.builder.get_object('force-imagetransfer-time-minutes'),
                   self.builder.get_object('force-imagetransfer-time-seconds'))

        values = self.config.get("force-imagetransfer-time")
        values = str(values).split(":") if values else ('', '', '')
        h.set_text(values[0])
        m.set_text(values[1])
        s.set_text(values[2].split('.')[0])

        # runtime-slowdown-overhead
        self.builder.get_object('runtime-slowdown-overhead').set_text(
            str(self.config.get("runtime-slowdown-overhead") or ''))

        # add-overhead
        val = self.config.get("add-overhead")
        self.builder.get_object('add-overhead').set_active_id(val)

        if val == 'none':
            self.builder.get_object('runtime-slowdown-overhead').hide()
            self.builder.get_object('runtime-slowdown-overhead-label').hide()
            self.builder.get_object('runtime-slowdown-overhead-label2').hide()

        #bootshutdown-overhead
        h, m, s = (self.builder.get_object('bootshutdown-overhead-hours'),
                   self.builder.get_object('bootshutdown-overhead-minutes'),
                   self.builder.get_object('bootshutdown-overhead-seconds'))

        values = self.config.get("bootshutdown-overhead")
        values = str(values).split(":") if values else ('', '', '')
        h.set_text(values[0])
        m.set_text(values[1])
        s.set_text(values[2].split('.')[0])
예제 #26
0
파일: pgdb.py 프로젝트: zkbt/MITTENS
def Date(year, month, day):
	return DateTime(year, month, day)