Exemplo n.º 1
0
    def __init__(self, dirName=None, net=None, station=None, fullName=None):

        if dirName == None: dir = Globals.KeyfileFolder()
        else: dir = dirName

        if fullName != None:
            net = DataTypes.toNetwork(fullName)
            station = DataTypes.toStation(fullName)

        self.dirName = dir
        self.fullName = net + '_' + station
        self.key = None
Exemplo n.º 2
0
def getSite(stationName):

    keyfolder = Globals.KeyfileFolder()
    net = DataTypes.toNetwork(stationName)
    sta = DataTypes.toStation(stationName)
    keyfile = KeyFileObj(keyfolder, net, sta)
    sta = keyfile.read()

    if sta == None: site = None
    else: site = sta.site + '(' + sta.provider + ')'

    return site
Exemplo n.º 3
0
def group_anomalies(loaded_data, window_size, window_step):
    tanks_number = 3
    samples_number = loaded_data.measures.size()
    assert (samples_number > window_size)
    windows_number = ((samples_number - window_size) // window_step) + 1
    retval = [
        [0] * windows_number
    ] * tanks_number  # each number represents the number of anomalies found in that window.
    # extract list of window_ids to which each samples belongs.
    map_sample_to_windows = [None] * samples_number
    for sample_id in range(0, samples_number):
        window_ids = []
        # for every possible position of the sample in the window:
        for window_index in range(0, min(sample_id, window_size), window_step):
            window_ids.append((sample_id - window_index) // window_step)
        map_sample_to_windows[sample_id] = window_ids
    anomaly_idx = 0
    for index in range(0, loaded_data.anomaly_indexes.size()):
        sample_id = loaded_data.anomaly_indexes[index][0]
        windows = map_sample_to_windows[sample_id]
        anomalies = DataTypes.AnomaliesList(
            loaded_data.anomaly_indexes[index][1])
        for anomaly_idx in range(0, anomalies.size()):
            anomaly = anomalies[anomaly_idx]
            for tank_id in range(0, anomaly.tanks.size()):
                tank = anomaly.tanks[tank_id]
                for window in windows:
                    retval[tank][window] += 1
    return retval
Exemplo n.º 4
0
    def printTable(self, headLine, names, maxNr=-1):  # ??? noch nicht benutzt

        s = headLine + ': ' + str(len(names))

        if maxNr != -1: s += ' / ' + str(maxNr)

        Logfile.add(' ', s, ' ')

        sameNet = []
        line = ''

        for i in range(0, len(names)):
            s = names[i]
            sameNet.append(s)

            if DataTypes.isSameNetwork(sameNet[0], s): continue

            line = ''

            for j in range(len(sameNet)):
                line += ("%-10s" % sameNet[j])

                if j != 0 and (j % 5) == 0:
                    Logfile.add(line)
                    line = ''
            #endfor

            sameNet = []
        #endfor

        if Logfile != '': Logfile.add(line)

        print '--------------------------------'
        self.printTable2(headLine, names)
Exemplo n.º 5
0
def startServer(stationList, options):

    network = options.network
    mask = KeyFile.getIrisMask(None, stations=stationList)
    irisList = Basic.selectStrings(stationList, mask)
    geofonList = Basic.selectStrings(stationList, Basic.Not(mask))

    Conf = Globals.ConfigDict
    args = Server.joinClientArgs([Conf['pwd'], Conf['mail']])

    if not network or network == 'iris':
        if len(irisList) == 0:
            Logfile.add('All iris entries set')

        else:
            if not startIrisServer(irisList, args):
                return True  # aborted with ctrl c

    if not network or network == 'geofon':
        if len(irisList) == 0:
            Logfile.add('All geofon entries set')

        else:
            startGeofonServer(geofonList, args)
            return True

    if network and network != 'iris' and network != 'geofon':
        if not DataDir.isNetwork(network):
            return Logfile.error('Illegal network name <' + network + '>')

        list2 = DataTypes.selectNetwork(irisList, network)

        if len(list2) > 0:
            startIrisServer(list2, args)
            return True

        list2 = DataTypes.selectNetwork(geofonList, network)

        if len(list2) > 0:
            startGeofonServer(list2, args)
            return True

        Logfile.add('All network enties set')
    #endif

    return False  # nothing done
Exemplo n.º 6
0
    def poll(self):
        for event in pygame.event.get():
            if event.type == pygame.KEYDOWN:
                if event.key == K_LEFT and self.pos_x != -self.tank_width / 2:
                    self.pos_x -= 1
                if event.key == K_RIGHT and self.pos_x != self.tank_width / 2:
                    self.pos_x += 1
                if event.key == K_UP and self.pos_y != -self.tank_height / 2:
                    self.pos_y -= 1
                if event.key == K_DOWN and self.pos_y != self.tank_height / 2:
                    self.pos_y += 1

        self.sim.redraw()
        return DataTypes.FishPosition(x=self.pos_x / (self.tank_width / 2),
                                      y=self.pos_y / (self.tank_height / 2))
Exemplo n.º 7
0
def filterStations(StationList, Config, Origin):
    F = []
    cfg = ConfigObj(dict=Config)

    minDist, maxDist = cfg.FloatRange('mindist', 'maxdist')
    origin = DataTypes.dictToLocation(Origin)

    Logfile.red('Filter stations with configured parameters')

    for i in StationList:
        sdelta = loc2degrees(origin, i)

        if sdelta > minDist and sdelta < maxDist:
            F.append(
                Station(i.net, i.sta, i.loc, i.comp, i.lat, i.lon, i.ele,
                        i.dip, i.azi, i.gain))
    Logfile.red('%d STATIONS LEFT IN LIST' % len(F))
    return F
Exemplo n.º 8
0
def getAllClients():
    try:
        clientTable = openpyxl.load_workbook('client_info.xlsx',
                                             data_only=True)
        clientSheet = clientTable['clients']
    except:
        print("ERROR: Could not access the Client Table! Shutting down!")
        Logger.Log("{}: {}".format(
            __name__,
            "ERROR: Could not access the Client Table! Shutting down!"))
        raise SystemExit

    allCustomerInfo = DataTypes.AllCustomers()

    numberofCustomers = clientSheet.cell(row=1, column=10).value
    print("-> Getting information for ", numberofCustomers, " customers:")
    Logger.Log("{}: {} {} {}".format(__name__, "-> Getting information for ",
                                     numberofCustomers, " customers:"))
    for it in range(0, numberofCustomers):
        allCustomerInfo.name.insert(
            it,
            clientSheet.cell(row=(2 + it), column=1).value)
        print(allCustomerInfo.name[it], end='  ')
        Logger.Log("{}: {}".format(__name__, allCustomerInfo.name[it]))
        allCustomerInfo.isCompany.insert(
            it, checkIfCompany(clientSheet.cell(row=(2 + it), column=2).value))

        if (allCustomerInfo.isCompany[it]):
            allCustomerInfo.CUI.insert(
                it,
                clientSheet.cell(row=(2 + it), column=3).value)
            allCustomerInfo.nrInreg.insert(
                it,
                clientSheet.cell(row=(2 + it), column=4).value)
            print(allCustomerInfo.CUI[it], allCustomerInfo.nrInreg[it])
            Logger.Log("{}: {} {}".format(__name__, allCustomerInfo.CUI[it],
                                          allCustomerInfo.nrInreg[it]))
        allCustomerInfo.address.insert(
            it, ("Adresa: " + clientSheet.cell(row=(2 + it), column=5).value))
        print(allCustomerInfo.address[it])
        Logger.Log("{}: {}".format(__name__, allCustomerInfo.address[it]))

    return allCustomerInfo
Exemplo n.º 9
0
def getAllProducts():
    try:
        productTable = openpyxl.load_workbook('product_info.xlsx',
                                              data_only=True)
        productSheet = productTable['products']
    except:
        print("ERROR: Could not access the Product Table! Shutting down!")
        Logger.Log("{}: {}".format(
            __name__,
            "ERROR: Could not access the Product Table! Shutting down!"))
        raise SystemExit

    allProducts = DataTypes.AllProducts()
    numberOfProducts = productSheet.cell(row=1, column=6).value

    print("-> Getting information for", numberOfProducts, "products:")
    Logger.Log("{}: {} {} {}".format(__name__, "-> Getting information for ",
                                     numberOfProducts, " products:"))

    for it in range(0, numberOfProducts):
        allProducts.productId.insert(
            it,
            productSheet.cell(row=(2 + it), column=1).value)
        allProducts.productName.insert(
            it,
            productSheet.cell(row=(2 + it), column=2).value)
        allProducts.productPrice.insert(
            it,
            productSheet.cell(row=(2 + it), column=3).value)

        print(allProducts.productId[it], allProducts.productName[it],
              allProducts.productPrice[it])
        Logger.Log("{}: {} {} {}".format(__name__, allProducts.productId[it],
                                         allProducts.productName[it],
                                         allProducts.productPrice[it]))

    return allProducts
Exemplo n.º 10
0
def load_data():
    retval = DataTypes.Data()
    loader = DataLoader.DataLoader("./dataset/")
    #loader.load_subset(retval, 1000)
    loader.load_all(retval)
    return retval
Exemplo n.º 11
0
    def printStatistic_2(self, d):

        finished = list(set(DataTypes.toStationNames(d.finished)))
        withRetryFound = list(set(DataTypes.toStationNames(d.withRetryFound)))
        notFound = list(set(DataTypes.toStationNames(d.notFound)))
        hasData = list(set(DataTypes.toStationNames(d.hasData)))
        withError = list(set(DataTypes.toStationNames(d.withError)))

        netFinished = list(set(DataTypes.toNetworkNames(finished)))
        netWithData = list(set(DataTypes.toNetworkNames(hasData)))

        Logfile.addDelim()
        Logfile.add(' ')

        nFinished = len(finished)
        anyDataFound = False
        withoutData = []

        for station in sorted(finished):
            if station in hasData: anyDataFound = True
            else: withoutData.append(station)
        #endfor

        net = list(set(DataTypes.toNetworkNames(withoutData)))
        netWithoutData = []

        for s in net:
            if not s in netWithData: netWithoutData.append(s)

        self.printTable2('Networks with data', sorted(netWithData))

        if len(netWithoutData) == 0:
            Logfile.add(' ', 'No Networks without data', ' ')
        else:
            self.printTable2('Networks without data', sorted(netWithoutData),
                             len(netFinished))

        if len(withoutData) == 0:
            Logfile.add('All stations with data')
        else:
            self.printTable2('Stations without data', sorted(withoutData),
                             len(finished))

        # --------------------------------------------------------------------

        if len(withRetryFound) > 0:
            Logfile.add(' ', 'With retry: ' + str(len(withRetryFound)), ' ')

            for station in sorted(withRetryFound):
                if station in hasData: s = '(Data)'
                else: s = '       '

                printMsg(station + s, ' ', withError.count(station))
            #endfor
        #endif

        # --------------------------------------------------------------------
        Logfile.add(' ')
        return  # ???

        if len(notFound) == 0: Logfile.add('All stations found')
        else:
            n = str(len(notFound))
            Logfile.add('Not found: ' + n + ' after ' + str(N_RETRIES) +
                        ' retries')

        Logfile.add(' ')

        for station in sorted(notFound):
            Logfile.add(station)

        # --------------------------------------------------------------------

        Logfile.addDelim()
        return
Exemplo n.º 12
0
def main(argv):

    #load configuration
    parameters = load_configuration()

    #load parameters

    #dataset
    path_to_dataset = parameters['path_to_dataset']
    load_size = parameters['load_size']

    #SAX
    alphabet_size = parameters['alphabet_size']
    paa_size = parameters['paa_size']
    window_size = parameters['window_size']
    step = parameters['step']
    substring_size = parameters['substring_size']

    #smoothing
    threshold_freq = parameters['threshold_freq']

    #projections
    prj_size = parameters['prj_size']
    prj_iterations = parameters['prj_iterations']
    anomaly_threshold = parameters['anomaly_threshold']

    #loading data
    loader = DataLoader.DataLoader(path_to_dataset)
    data = DataTypes.Data()

    #loader.load_all(data,200)
    loader.load_subset(data, load_size, 100)

    #period from which extract anomalies
    begin_date = datetime.datetime.fromtimestamp(data.index_to_time[0])
    end_date = datetime.datetime.fromtimestamp(data.index_to_time[load_size -
                                                                  1])

    if parameters['power_type'] == -1:
        tank = parameters['tank']
        sensor_type = parameters['sensor_type']
        #print(data.measures[0])
        print("Loading of %i tank %i  data from %s to %s " %
              (sensor_type, tank, begin_date, end_date))
        s_values = [
            data.measures[i][0][tank][sensor_type]
            for i in range(0, len(data.measures))
        ]
    else:
        power_type = parameters['power_type']
        print("Loading measures of power %i from %s to %s " %
              (power_type, begin_date, end_date))
        s_values = [
            data.measures[i][1][power_type]
            for i in range(0, len(data.measures))
        ]

    len_serie = len(s_values)
    hash_table_substrings = {}

    #getting first n alphabet letters
    alphabet = get_alphabet_letters(alphabet_size)
    #creating hash table indexed by all of substrings of length k
    hash_table_substrings = get_hash_table(alphabet, prj_size)

    #list containg score for each window
    anomalies_score = []

    for index in range(0, len_serie, step):
        begin = index
        end = begin + window_size

        if end < len_serie:
            window_values = s_values[begin:end]
            window_znorm = znorm(s_values)
            window_paa = paa(window_znorm, paa_size)
            window_string = ts_to_string(window_paa,
                                         cuts_for_asize(alphabet_size))

            #each character of the string corresponds to k values of the series
            k = window_size // paa_size

            #get smoothed string
            window_smoothed = smoothing(window_string, threshold_freq)

            #fill hash table by applying random projection
            hash_table_substrings = put_in_bucket(hash_table_substrings,
                                                  window_smoothed, begin,
                                                  prj_iterations, prj_size,
                                                  substring_size, k)

            total = 0
            for key, values in hash_table_substrings.items():
                total = total + len(values)

            buckets_with_anomalies, bucket_freq = analyzed_bucket(
                hash_table_substrings, total, anomaly_threshold)
            #number of bucket with anomalies
            n_buckets_anomalies = len(buckets_with_anomalies.keys())

            #getting score for current window
            avg_window_score = getting_score(hash_table_substrings,
                                             buckets_with_anomalies,
                                             n_buckets_anomalies)
            anomalies_score.append(avg_window_score)

            #reset table
            hash_table_substrings = get_hash_table(alphabet, prj_size)

        else:
            break

    print(anomalies_score)
Exemplo n.º 13
0
    def read(self):

        net = DataTypes.toNetwork(self.fullName)
        station = DataTypes.toStation(self.fullName)
        fname = self._keyfileName(net, station)

        if not os.path.isfile(fname): return None

        lines = Basic.readTextFile(fname)

        if len(lines) == 0: return None

        sta = DataTypes.Station(net, station, loc='???', comp='???')

        try:
            END_FLAG = 'PACKAGES'
            endFound = False

            for i in range(len(lines)):
                line = lines[i].strip()
                #print 'line= ', line

                w = line.split('=')
                key = self._String(w[0])
                _g_Key = key
                val = w[1]

                if key == 'KEY_VERSION':
                    dummy = self._Float(val)  #  0 KEY_VERSION='2.5'
                elif key == 'STAT_DESC':
                    sta.site = val  #  1 STAT_DESC='Ganja, Azerbaijan'
                elif key == 'LATITUDE':
                    sta.lat = self._Float(val, -90.0,
                                          90.0)  #  2 LATITUDE='40.6519'
                elif key == 'LONGITUDE':
                    sta.lon = self._Float(val, -180.0,
                                          360.0)  #  3 LONGITUDE='46.3297'
                elif key == 'ELEVATION':
                    sta.ele = self._Float(val)  #  4 ELEVATION='560.0'
                elif key == 'DATALOGGER':
                    dummy = self._String(val)  #  5 DATALOGGER='Q380-M'
                elif key == 'DATALOGGER_SN':
                    dummy = self._String(val)  #  6 DATALOGGER_SN='xxxx'
                elif key == 'SEISMOMETER1':
                    dummy = self._String(val)  #  7 SEISMOMETER1='STS-2N'
                elif key == 'SEISMOMETER_SN1':
                    dummy = self._String(val)  #  8 SEISMOMETER_SN1='yyyy'
                elif key == 'GAIN_MULT1':
                    dummy = self._Float(val)  #  9 GAIN_MULT1='1.0'
                elif key == 'SAMPLING1':
                    dummy = self._String(val)  # 10 SAMPLING1='20/40/80/100'
                elif key == 'DEPTH1':
                    dummy = self._Float(val)  # 11 DEPTH1='0.0'
                elif key == 'SEISMOMETER2':
                    dummy = self._String(val)  # 12 SEISMOMETER2=''
                elif key == 'SEISMOMETER_SN2':
                    dummy = self._String(val)  # 13 SEISMOMETER_SN2=''
                elif key == 'GAIN_MULT2':
                    dummy = self._String(val)  # 14 GAIN_MULT2=''
                elif key == 'SAMPLING2':
                    dummy = self._String(val)  # 15 SAMPLING2=''
                elif key == 'DEPTH2':
                    dummy = self._String(val)  # 16 DEPTH2=''
                elif key == 'START_DATE':
                    dummy = self._String(val)  # 17 START_DATE='1980/001'
                elif key == 'CONFIGURED':
                    dummy = self._String(val)  # 18 CONFIGURED='yes'
                elif key == 'PACKAGES':
                    sta.provider = self._String(val)[
                        1:-1]  # 19 PACKAGES='WEB_DC'

                else:  #self._error('Invalid key ' + key)
                    Logfile.error('Invalid key ' + key)

                if key == END_FLAG:
                    endFound = True
                    break
            #endfor

        except:
            Logfile.exception('readKeyFile', fname)

        if not endFound: Logfile.error(self.fullName + ': keyfile cut')

        return sta
Exemplo n.º 14
0
 def _handle_bundled_messages(self, widget, outcome, failure_reason=None):
     try:
         DataTypes.check_template(outcome, OurMessages.BUNDLED_MESSAGES_TEMPL)
     except MojoMessage.BadFormatError, le:
         debugprint("BadFormatError: %s, stack[-4:]: %s\n", args=(le, traceback.extract_stack()[-4:],), v=0, vs="debug")
         raise le
Exemplo n.º 15
0
def collectSemb(SembList, Config, Origin, Folder, ntimes, arrays, switch,
                array_centers):
    '''
    method to collect semblance matrizes from all processes and write them to file for each timestep
    '''
    Logfile.add('start collect in collectSemb')
    cfg = ConfigObj(dict=Config)
    origin = ConfigObj(dict=Origin)

    dimX = cfg.dimX()  #('dimx')
    dimY = cfg.dimY()  #('dimy')
    if switch == 0:
        winlen = cfg.winlen()  #('winlen')
        step = cfg.step()  #('step')
    if switch == 1:
        winlen = cfg.winlen_f2()  #('winlen')
        step = cfg.step_f2()  #('step')

    latv = []
    lonv = []

    gridspacing = cfg.Float('gridspacing')
    migpoints = dimX * dimY
    o_lat = origin.lat()  # float(Origin['lat'])
    o_lon = origin.lon()  # float(Origin['lon'])
    oLatul = 0
    oLonul = 0

    z = 0

    for i in xrange(dimX):
        oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing

        if z == 0 and i == 0:
            Latul = oLatul
        o = 0

        for j in xrange(dimY):
            oLonul = o_lon - ((dimY - 1) / 2) * gridspacing + j * gridspacing

            if o == 0 and j == 0:
                Lonul = oLonul

            latv.append(oLatul)
            lonv.append(oLonul)

    tmp = 1
    origin = DataTypes.dictToLocation(Origin)
    i = 0

    #for a in SembList:
    #    tmp = num.zeros(num.shape(a))
    azis = []
    for a in SembList:
        x = array_centers[i][0]
        y = array_centers[i][1]
        delta = orthodrome.distance_accurate50m_numpy(x, y, origin.lat,
                                                      origin.lon)
        #a = a*((1./delta**2)*1.e+15)
        tmp *= a

        #azis.append(toAzimuth(float(Origin['lat']), float(Origin['lon']),x, y))
        i = i + 1

    #min_coor = num.zeros([i,2])
    #i = 0
    #for a in SembList:
    #    deltas = []
#        x = array_centers[i][0]
#        y = array_centers[i][1]
#        for k in range(0,len(latv)):
#            delta = orthodrome.distance_accurate50m_numpy(x, y, latv[k], lonv[k])
#            deltas.append(orthodrome.distance_accurate50m_numpy(x, y, latv[k], lonv[k]))
#            if delta <= num.min(deltas):
#                min_coor[i]= [latv[k], lonv[k]]
#        i = i+1
#    array_overlap = num.average(min_coor, axis=0)
#    delta_center = orthodrome.distance_accurate50m_numpy(array_overlap[0], array_overlap[1], origin.lat, origin.lon)

#    print(array_overlap)

#    print(delta_center)
#    diff_center_lat = origin.lat-array_overlap[0]
#    diff_center_lon = origin.lon-array_overlap[1]
#    print(diff_center_lat)
#    print(diff_center_lon)
#for a in SembList:
#if num.mean(a)>0:
#        tmp *= a

    sembmaxvaluev = num.ndarray(ntimes, dtype=float)
    sembmaxlatv = num.ndarray(ntimes, dtype=float)
    sembmaxlonv = num.ndarray(ntimes, dtype=float)

    rc = UTCDateTime(Origin['time'])
    rcs = '%s-%s-%s_%02d:%02d:%02d' % (rc.day, rc.month, rc.year, rc.hour,
                                       rc.minute, rc.second)
    d = rc.timestamp

    usedarrays = arrays
    folder = Folder['semb']
    fobjsembmax = open(os.path.join(folder, 'sembmax_%s.txt' % (switch)), 'w')
    norm = num.max(num.max(tmp, axis=1))
    max_p = 0.
    sum_i = 0.
    for a, i in enumerate(tmp):
        if a < 1:
            sum_i *= i
    for a, i in enumerate(tmp):
        if a < 1:
            max = num.max(sum_i[:])
            for j in range(migpoints):
                if i[j] > num.max(i[:]) * 0.9 and i[j] > max_p:
                    latvmax = latv[j]
                    lonvmax = lonv[j]
                    max_p = i[j]

#    delta_lat = origin.lat-latvmax
#    delta_lon = origin.lon-lonvmax

#for a, i in enumerate(tmp):
#    max_pos = [l for l, k in enumerate(i) if k == i.max()][0]
#        delta_lat = origin.lat-latv[max_pos]
#        delta_lon = origin.lon-lonv[max_pos]
    for j in range(migpoints):
        latv[j] = latv[j]  #+delta_lat
        lonv[j] = lonv[j]  #+delta_lon
    #        latv.append(latv[j]-delta_lat)
    #        lonv.append(lonv[j]-delta_lon)

    #nix = []
    #for a, i in enumerate(tmp):
    #    for j in range(migpoints):
    #            if i[j]/norm > num.max(sum_i/norm)*0.4:
    #                if j in nix:
    #                    pass
    #                else:
    #                    latv[j] = latv[j]+delta_lat
    #                    lonv[j] = lonv[j]+delta_lon
    #                    nix.append(j)
    #if i[j]/norm > num.max(sum_i/norm)*0.4:
    #    print('yes')
    #    delta_lat = origin.lat-latv[j]
    #    delta_lon = origin.lon-lonv[j]
    #    print delta_lat, delta_lon, latvmax, lonvmax
    #    print latv[j], lonv[j], origin.lat, origin.lon
    #    ix = num.where(latv[j]+delta_lat)[0][0]
    #    iy = num.where(lonv[j]+delta_lon)[0][0]
    #    lat = latv[j].copy()
    #    lon = lonv[j].copy()
    #    latv[j] = latv[ix]
    ##    lonv[j] =  lonv[iy]
    #    lonv[iy]
    #    #latv[j] = latv[j]+delta_lat
    #lonv[j] = lonv[j]+delta_lon
    #    print latv[j], lonv[j]


#

    for a, i in enumerate(tmp):
        logger.info('timestep %d' % a)
        print(a)

        fobj = open(
            os.path.join(folder,
                         '%s-%s_%03d.ASC' % (switch, Origin['depth'], a)), 'w')

        fobj.write('# %s , %s\n' % (d, rcs))
        fobj.write('# step %ds| ntimes %d| winlen: %ds\n' %
                   (step, ntimes, winlen))
        fobj.write('# \n')
        fobj.write('# southwestlat: %.2f dlat: %f nlat: %f \n' %
                   (Latul, gridspacing, dimX))
        fobj.write('# southwestlon: %.2f dlon: %f nlon: %f \n' %
                   (Lonul, gridspacing, dimY))
        fobj.write('# ddepth: 0 ndepth: 1 \n')

        sembmax = 0
        sembmaxX = 0
        sembmaxY = 0

        uncert = num.std(i)  #maybe not std?
        for j in range(migpoints):

            x = latv[j]  #+delta_lat
            y = lonv[j]  #+delta_lon
            #    if i[j]/norm > num.max(i[:]/norm)*0.1:
            #            delta_lat = origin.lat-latv[max_pos]
            #            delta_lon = origin.lon-lonv[max_pos]
            #            print delta_lat, delta_lon, latv[max_pos], lonv[max_pos]
            #            print latv[j], lonv[j], origin.lat, origin.lon
            #        x = latv[j]+delta_lat
            #            y = lonv[j]+delta_lon
            #            print x, y
            semb = i[j] / norm
            fobj.write('%.2f %.2f %.20f\n' % (x, y, semb))
            #    xd= latv[j]-delta_lat
            #        yd= lonv[j]-delta_lon
            #            sembd = 0.
            #            fobj.write('%.2f %.2f %.20f\n' %(xd,yd,sembd))

            if semb > sembmax:
                sembmax = semb
                # search for maximum and position of maximum on semblance grid for given time step
                sembmaxX = x
                sembmaxY = y

        delta = orthodrome.distance_accurate50m_numpy(x, y, origin.lat,
                                                      origin.lon)
        azi = toAzimuth(float(Origin['lat']), float(Origin['lon']),
                        float(sembmaxX), float(sembmaxY))

        sembmaxvaluev[a] = sembmax
        sembmaxlatv[a] = sembmaxX
        sembmaxlonv[a] = sembmaxY
        fobjsembmax.write('%d %.3f %.3f %.30f %.30f %d %03f %f %03f\n' %
                          (a * step, sembmaxX, sembmaxY, sembmax, uncert,
                           usedarrays, delta, float(azi), delta * 119.19))
        fobj.close()

    fobjsembmax.close()

    trigger.writeSembMaxValue(sembmaxvaluev, sembmaxlatv, sembmaxlonv, ntimes,
                              Config, Folder)
    inspect_semb = cfg.Bool('inspect_semb')
    if inspect_semb is True:
        trigger.semblancestalta(sembmaxvaluev, sembmaxlatv, sembmaxlonv)
    return sembmaxvaluev
Exemplo n.º 16
0
#!/usr/bin/python3

import DataTypes
import DataLoader

loader = DataLoader.DataLoader("../../dataset/")
data = DataTypes.Data()
# loader.load_all(data) # load all data, use default log ratio
loader.load_subset(data)  # load subset of data, use default size and log ratio
Exemplo n.º 17
0
def checkProcessError(stationName, nErrors, lines, execTime):  # ??? execTime

    global g_MetaInfo, g_LastStation

    errCode = Server.HAS_NO_DATA
    errCode = Server.RETRY_IT

    for lineNr in range(len(lines)):
        line = lines[lineNr]
        isEnd = False
        s = None

        # UserWarning: MAX_REQUESTS exceeded - breaking current request loop

        if 'MAX_REQUESTS' in line:
            errCode = Server.RETRY_IT

            s = 'UserWarning: MAX_REQUESTS exceeded - breaking current request loop'
            s += ' (' + str(nErrors) + ')'
            #isEnd = True

        elif 'deprecated' in line:
            s = ' '  # ignore ObsPyDeprecation Warning   #15.7.2016

        elif Logfile.MSG_TOKEN in line:
            s = line
        elif 'python' in line:
            s = line

        elif ARCLINK_META in line or IRIS_META in line:
            name = DataTypes.toNetAndStation(stationName)

            if g_LastStation == None or g_LastStation != name:
                g_LastStation = name
                s = KeyFile.getSite(stationName)

        elif Server.CLIENT_ABORT_MSG in line:
            errCode = Server.RETRY_IT
            s = line

        elif '#meta' in line:  # station has data
            errCode = Server.HAS_DATA
            isEnd = True

            s = lines[lineNr + 1]
            g_MetaInfo.append(s)

        elif 'Traceback' in line:
            sn = []

            for i in range(0, 300):
                if lineNr + i >= len(lines): break  #10.12.2015
                if 'KeyboardInterrupt' in lines[lineNr + i]:
                    sn = []
                    break
                #if lineNr+i >= len (lines) : break         #10.12.2015

                sn.append(lines[lineNr + i])
            #endfor

            if Server.checkIsTimeOut(station, sn):  # Traceback shows timeout
                Logfile.error('Retry access later')
                errCode = Server.RETRY_IT

            else:  # Traceback --> log
                Server.printLines(station, sn, onlyErrorLog=True)

            isEnd = True
        #endif

        if s != None: Server.printMsg(stationName, s)
        if isEnd: break
    #endwhile

    return errCode
Exemplo n.º 18
0
def collectSembweighted(SembList, Config, Origin, Folder, ntimes, arrays,
                        switch, weights):
    '''
    method to collect semblance matrizes from all processes and write them to file for each timestep
    '''
    Logfile.add('start collect in collectSemb')

    cfg = ConfigObj(dict=Config)
    origin = ConfigObj(dict=Origin)

    dimX = cfg.dimX()  # ('dimx')
    dimY = cfg.dimY()  # ('dimy')
    winlen = cfg.winlen()  # ('winlen')
    step = cfg.step()  # ('step')

    latv = []
    lonv = []

    gridspacing = cfg.Float('gridspacing')
    migpoints = dimX * dimY
    o_lat = origin.lat()  # float (Origin['lat'])
    o_lon = origin.lon()  # float (Origin['lon'])
    oLatul = 0
    oLonul = 0

    z = 0

    for i in xrange(dimX):
        oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing

        if z == 0 and i == 0:
            Latul = oLatul
        o = 0

        for j in xrange(dimY):
            oLonul = o_lon - ((dimY - 1) / 2) * gridspacing + j * gridspacing

            if o == 0 and j == 0:
                Lonul = oLonul

            latv.append(oLatul)
            lonv.append(oLonul)

    tmp = 1
    for a, w in zip(SembList, weights):
        tmp *= a
    #sys.exit()

    sembmaxvaluev = num.ndarray(ntimes, dtype=float)
    sembmaxlatv = num.ndarray(ntimes, dtype=float)
    sembmaxlonv = num.ndarray(ntimes, dtype=float)

    rc = UTCDateTime(Origin['time'])
    rcs = '%s-%s-%s_%02d:%02d:%02d' % (rc.day, rc.month, rc.year, rc.hour,
                                       rc.minute, rc.second)
    d = rc.timestamp
    usedarrays = 5

    folder = Folder['semb']
    fobjsembmax = open(os.path.join(folder, 'sembmax_%s.txt' % (switch)), 'w')

    for a, i in enumerate(tmp):
        logger.info('timestep %d' % a)

        fobj = open(
            os.path.join(
                folder, '%s-%s_%03d._weighted_semblance.ASC' %
                (switch, Origin['depth'], a)), 'w')
        #fobj = open (os.path.join (folder, '%03d.ASC'    % a),'w')

        fobj.write('# %s , %s\n' % (d, rcs))
        fobj.write('# step %ds| ntimes %d| winlen: %ds\n' %
                   (step, ntimes, winlen))
        fobj.write('# \n')
        fobj.write('# southwestlat: %.2f dlat: %f nlat: %f \n' %
                   (Latul, gridspacing, dimX))
        fobj.write('# southwestlon: %.2f dlon: %f nlon: %f \n' %
                   (Lonul, gridspacing, dimY))
        fobj.write('# ddepth: 0 ndepth: 1 \n')

        sembmax = 0
        sembmaxX = 0
        sembmaxY = 0

        origin = DataTypes.dictToLocation(Origin)
        uncert = num.std(i)  #maybe not std?
        for j in range(migpoints):
            x = latv[j]
            y = lonv[j]
            semb = i[j]

            fobj.write('%.2f %.2f %.20f\n' % (x, y, semb))

            if semb > sembmax:
                sembmax = semb
                # search for maximum and position of maximum on semblance grid for given time step
                sembmaxX = x
                sembmaxY = y

        delta = loc2degrees(Location(sembmaxX, sembmaxY), origin)
        azi = toAzimuth(float(Origin['lat']), float(Origin['lon']),
                        float(sembmaxX), float(sembmaxY))

        sembmaxvaluev[a] = sembmax
        sembmaxlatv[a] = sembmaxX
        sembmaxlonv[a] = sembmaxY

        fobjsembmax.write('%d %.2f %.2f %.20f %.20f %d %03f %f %03f\n' %
                          (a * step, sembmaxX, sembmaxY, sembmax, uncert,
                           usedarrays, delta, float(azi), delta * 119.19))
        fobj.close()

    fobjsembmax.close()

    durationpath = os.path.join(folder, "duration.txt")
    trigger.writeSembMaxValue(sembmaxvaluev, sembmaxlatv, sembmaxlonv, ntimes,
                              Config, Folder)
    print 'DD2: ', durationpath
    trigger.semblancestalta(sembmaxvaluev, sembmaxlatv, sembmaxlonv)
Exemplo n.º 19
0
         #print "   ", p.val, q.val
         return p.val == q.val and self.isSameTree(p.left, q.left) and self.isSameTree(p.right, q.right)
      #print "Checking if p is q: ", p, q
      return p is q

   def isSameTreeTuple(self, p, q):
      def t(n):
         if n:
            print "   ", n.val, n.left, n.right
         else:
            print "    n is None"
         val = n and (n.val, t(n.left), t(n.right))
         print "         val = ", val
         return val
      print "Check if equal p and q ", p.val, q.val
      r1val = t(p)
      r2val = t(q)
      print r1val, r2val
      return r1val == r2val

inputTree = [3,5, "null", 2,1,4,6,7,8,9,10,11,12,13,14]
r1 = constructTree( inputTree )
r2 = constructTree( inputTree )
l = DataTypes.printTreeAsList( r1 )
print l, l == inputTree

#Util.startFuncTracing( "isSameTree" )
same = SameTreeSolution().isSameTree( r1, r2 )
#Util.stopFuncTracing( "isSameTree" )
print "%s" % ( "same" if same else "not same" )
Exemplo n.º 20
0
def run_parallel(options, pwdDict):

    if options.station:  # Client part
        if not init(True): return False

        clt = WaveformClient(options, pwdDict)
        clt.run()
        return True

    else:  # Server part
        if not init(False): return False

        keyfileDir = os.path.join(Globals.EventDir(), options.keyfolder)

        if not Basic.checkExistsDir(keyfileDir):
            return False  # Cannot find directory

        #   Build station list
        #
        stationList = sorted(initWaitingList(options))

        if len(stationList) == 0:
            return Logfile.error('No stations found')

        #
        if not KeyFile.checkVersion(keyfileDir, fullName=stationList[0]):
            return False

        saveUrl(' ', None)  # init debug service
        network = options.network

        mask = KeyFile.getIrisMask(None, stations=stationList)
        irisList = Basic.selectStrings(stationList, mask)
        geofonList = Basic.selectStrings(stationList, Basic.Not(mask))

        if not network or network == 'iris':
            if not startIrisServer(irisList):
                return True  # aborted with ctrl c
        #endif

        if not network or network == 'geofon':
            if not startGeofonServer(geofonList):
                return True  # aborted with ctrl c
    #endif

        if network and network != 'iris' and network != 'geofon':
            if not KeyFile.isNetwork(network):
                return Logfile.error(
                    'Illegal network name <' + network + '>',
                    'Network not found in directory ' +
                    Globals.KeyfileFolder())

            list2 = DataTypes.selectNetwork(irisList,
                                            network)  # search in iris list

            if len(list2) > 0:
                startIrisServer(list2)
                return True

            list2 = DataTypes.selectNetwork(geofonList,
                                            network)  # search in geofon list

            if len(list2) > 0:
                startGeofonServer(list2)
                return True
Exemplo n.º 21
0
	   .create()
	qq = builder.setTableName('test')\
		.setAction(QueryConstants.ACTION_UPDATE)\
		.setKVMap({'col1':1,'col2':2,'col3':3})\
		.setwhereArgs([{'col1':0},'O',{'col2':10}])\
		.create()

	insertQueryTest = builder.setTableName('Test')\
                .setKVMap({'col1':1, 'col2': 2, 'name':'Mike He'})\
                .setAction(QueryConstants.ACTION_INSERT)\
                .create()
	print(insertQueryTest)
	q1 = builder.setTableName('T')\
				.setAction(QueryConstants.ACTION_SELECT)\
				.setwhereArgs([{'col1':1},'A',{'col2':2},'O',{'name':'hdy'}])\
                .create()
    '''
	query = builder.setAction(QueryConstants.ACTION_CREATE_TABLE)\
        .setTableName('TableTest')\
        .setKVMap({'col1':DataTypes.MYSQL_TXT, 'col2':DataTypes.MYSQL_INT(1024),'UserName': DataTypes.MYSQL_STXT})\
        .create()
	print(query)
        
	qq = builder.setAction(QueryConstants.ACTION_DROP_TABLE)\
				.setTableName("Drop Test")\
				.create()
	print(qq)
	# print(qq)
	# print(DataTypes.MYSQL_FLOAT(1,3))