Beispiel #1
0
    def run(self):
        global threads
        docs = get_docs(CORPUS_PATH)
        mapper = Mapper()
        i = 1

        for files in self.__chunk(docs):
            doc_contents = []
            for f in files:
                with open(f, 'r') as d:
                    doc_contents.append(d.read())

            doc_contents = ''.join(doc_contents)
            self.__run_batch(parse(doc_contents), i, mapper)
            i += 1

        print "Writing the mapper to file -------------------------------------"
        mapper.write(self.name)
        print "Writing DocLengths to file --------------------------------------"
        self.__writeDocLengths()

        while len(self.catalogs) != 1:
            print self.chunk_catalog()
            for pair in self.chunk_catalog():
                print pair
                if len(pair) != 2:
                    break
                else:
                    cat1 = self.catalogs[pair[0]]
                    cat2 = self.catalogs[pair[1]]
                    self.__merge(cat1, cat2, pair)

        print "Writing the catalog to file for later use -----------------------"
        Catalog.write(self.catalogs, self.name)
 def __dirSearch1(self, _rootCatalog, _ndeep=10):
     """
     文件夹遍历,结果存放在自定义的目录对象(CatalogList),用于存放一个文件夹包含的所有文件(CatalogItem)对象
     :param _rootCatalog: 目录对象(CatalogList)
     :param _ndeep: 允许的最大递归索引深度
     :return:
     """
     if isinstance(_rootCatalog, Catalog.CatalogList):
         dirPath = _rootCatalog.dirPath()
         dirlist = []
         if os.path.isdir(dirPath) and (dirPath != '/'):
             dirlist = os.listdir(dirPath)
         if len(dirlist) <= 0:
             return False
         dirHaveRightDoc = False
         for t_file in dirlist:
             # print os.path.join(dirPath, t_file)
             if not os.path.isdir(os.path.join(dirPath, t_file)):
                 if self.isFIleInFilter(t_file):
                     _rootCatalog.addItem(Catalog.CatalogItem(t_file))
                     dirHaveRightDoc = dirHaveRightDoc or True
             else:
                 t_childrenlist = Catalog.CatalogList(os.path.join(dirPath, t_file))
                 t_catalogItem = Catalog.CatalogItem(t_file, _children=t_childrenlist)
                 _rootCatalog.addItem(t_catalogItem)
                 if _ndeep >= 0:
                     a = self.__dirSearch1(t_childrenlist, _ndeep=_ndeep-1)
                     dirHaveRightDoc = dirHaveRightDoc or a
         return dirHaveRightDoc
Beispiel #3
0
def insert(table_name: str, values: list):
    time_start = time.time()
    Catalog.not_exists_table(table_name)
    Catalog.check_types_of_table(table_name, values)
    linenum = Buffer.insert_record(table_name, values)
    Index.insert_into_table(table_name, values, linenum)
    time_end = time.time()
    print(" time elapsed : %fs." % (time_end - time_start))
Beispiel #4
0
def delete(table_name: str, where: list = None):
    time_start = time.time()
    Catalog.not_exists_table(table_name)
    Catalog.check_select_statement(table_name, ['*'], where)  # 从insert中借用的方法
    col = Catalog.get_column_dic(table_name)
    pklist = Buffer.delete_record(table_name, col, where)
    Index.delete_from_table(table_name, pklist)
    time_end = time.time()
    print(" time elapsed : %fs." % (time_end - time_start))
Beispiel #5
0
def create_table(table_name: str, attributes: list, pk: str):
    time_start = time.time()
    Catalog.exists_table(table_name)
    Index.create_table(table_name)
    Catalog.create_table(table_name, attributes, pk)
    Buffer.create_table(table_name)
    time_end = time.time()
    print("Successfully create table '%s', time elapsed : %fs." %
          (table_name, time_end - time_start))
Beispiel #6
0
def drop_table(table_name: str):
    time_start = time.time()
    Catalog.not_exists_table(table_name)
    Catalog.drop_table(table_name)
    Buffer.drop_table(table_name)
    Index.delete_table(table_name)
    time_end = time.time()
    print("Successfully drop table '%s', time elapsed : %fs." %
          (table_name, time_end - time_start))
def test_old_catalog_init(tmpdir, old_catalog_file):
    c = Catalog.from_old_catalog(old_catalog_file, catalog_path=tmpdir)
    # Verify the catalog is nonempty and contains the expected data
    assert len(c) == 4
    for dsname in ["wine_reviews_130k", "wine_reviews_150k", 'wine_reviews_130k_varietals_75', 'wine_reviews']:
        assert dsname in c

    # Should fail, as it already exists
    with pytest.raises(FileExistsError):
        c = Catalog.from_old_catalog(old_catalog_file, catalog_path=tmpdir)

    # Should succeed, as replace is set
    c = Catalog.from_old_catalog(old_catalog_file, catalog_path=tmpdir, replace=True)
def catalog(tmpdir):
    """Create a test catalog"""

    # Setup
    # tmpdir should be empty when we get here
    c = Catalog.create(catalog_path=tmpdir)
    yield c
Beispiel #9
0
	def receiveCommand(self):
		while not self._interruptFlag:
			# Waiting for any two lines (so that the VideoClient only gets one catalog per request)
			self._commandSocket.nextLine()
			self._commandSocket.nextLine()
			self._commandSocket.send(Catalog.asHttp())
		self.kill()
Beispiel #10
0
def select(table_name: str, attributes: list, where: list = None):
    time_start = time.time()
    Catalog.not_exists_table(table_name)
    Catalog.check_select_statement(table_name, attributes, where)
    #Index.select_from_table(table_name, attributes, where)
    col_dic = Catalog.get_column_dic(table_name)
    results = Buffer.find_record(table_name, col_dic, where)
    numlist = []
    if attributes == ['*']:
        attributes = list(col_dic.keys())
        numlist = list(col_dic.values())
    else:
        for att in attributes:
            numlist.append(col_dic[att])

    print_select(attributes, numlist, results)
    time_end = time.time()
    print(" time elapsed : %fs." % (time_end - time_start))
 def GetCatalogTree(self, dirPath):
     """
     使用__dirSearch1()方法搜索目录,产生CatalogList对象,存放独有成员self.catalogTree
     :param dirPath:
     :return:
     """
     self.catalogTree = Catalog.CatalogList(dirPath)
     self.__dirSearch1(self.catalogTree)
     return self.catalogTree
Beispiel #12
0
def analysis( args ):
    from Timer import Timer
    import Catalog

    with Timer('analysis') as t:
        if args['image_fits_input'] == 'none':
            with Timer('events generated') as t:
                events = Simulation.simulate_events( args )
            
            if args['image_fits_output'] != 'none':
                with Timer('fits image saved at ' + args['image_fits_output'] ) as t:
                    events.generate_image( events, args['image_fits_output'] )
            
            args['image_fits_input'] = args['image_fits_output']
        
        image = Image.read_image_from_file( args['image_fits_input'] )
            
        with Timer('hits extracted') as t:
            hits = image.extract_hits( mode = 'cluster', threshold = args['extraction_threshold'], border = args['extraction_border'] )
            n = args['extraction_border']+1
            hits = hits.add_number_of_pixel_levels( range(n) )
            hits = hits.add_energy_levels( range(n) )
            hits = hits.add_barycenter_levels( range(n) )
            hits = hits.add_variance_levels( range(n) )
            print( 'hits', hits.dtype.names )

        if args['image_fits_input'] == 'none':
            hits = hits.match_simulated_events( events, lvl=1, length=3 )

        if args['catalog_root_output'] != 'none':
            with Timer('root catalog at ' + args['catalog_root_output'] ) as t:
                Catalog.build_new_catalog_from_recarray( hits, args['catalog_root_output'], None )
        
        #with Timer('size like') as t:
            #hits.add_sizelike( 3, args['charge_gain'], args['readout_noise'], fast = False, tol = 1e-3 )
            #t( len(hits) )

        #with Timer('write catalog sizelike') as t:
            #Catalog.build_new_catalog_from_recarray( hits, 'catalog_test.root', None )
            
        if args['reconstruction_image'] != 'none':
            with Timer('plot reconstruction') as t:
                show_reconstruction( args['reconstruction_image'], events, image, hits, zoom=[[0,200],[0,200]] )
Beispiel #13
0
 def SetCatalogObj(self, dirPath, _ndeep=10):
     """
     使用__dirSearch()方法搜索目录,并返回结果
     :param dirPath:
     :param _ndeep:索引深度
     :return:self.__CatalogObj:目录对象CatalogList
     """
     self.__CatalogObj = Catalog.CatalogList(dirPath)
     self.__dirSearch(self.__CatalogObj, _ndeep)
     return self.GetCatalogObj()
Beispiel #14
0
def mainProgram():
    print("- Welcome to version 0.6 of Filofax")
    print(
        "- Type the first letter of the alternative in the menu to execute it."
    )
    try:
        filofax = Catalog(FILENAME)
    except UnboundLocalError:
        print('File ' + FILENAME + ' not found!')
    printCurrent(filofax)
    printMenu()
    choice = choose()
    while choice != 'Q':
        if choice == 'N':
            filofax.changeDate(1)
        if choice == 'P':
            filofax.changeDate(-1)
        if choice == 'D':
            addNote(filofax)
        if choice == 'E':
            removeNote(filofax)
        if choice == 'A':
            addPage(filofax)
        if choice == 'R':
            removePage(filofax)
        if choice == 'S':
            showPages(filofax)
        if choice == 'M':
            showMonth(filofax)
        if choice == 'J':
            date = chooseDate()
            filofax.setDate(date)
        if choice == 'C':
            date = chooseDate()
            showPage(filofax, date)
        printCurrent(filofax)
        printMenu()
        choice = choose()
    filofax.save(FILENAME)
Beispiel #15
0
def mainProgram():
    print("- Welcome to version 0.6 of Filofax")
    print("- Type the first letter of the alternative in the menu to execute it.")
    try:
        filofax = Catalog(FILENAME)
    except UnboundLocalError:
        print("File " + FILENAME + " not found!")
    printCurrent(filofax)
    printMenu()
    choice = choose()
    while choice != "Q":
        if choice == "N":
            filofax.changeDate(1)
        if choice == "P":
            filofax.changeDate(-1)
        if choice == "D":
            addNote(filofax)
        if choice == "E":
            removeNote(filofax)
        if choice == "A":
            addPage(filofax)
        if choice == "R":
            removePage(filofax)
        if choice == "S":
            showPages(filofax)
        if choice == "M":
            showMonth(filofax)
        if choice == "J":
            date = chooseDate()
            filofax.setDate(date)
        if choice == "C":
            date = chooseDate()
            showPage(filofax, date)
        printCurrent(filofax)
        printMenu()
        choice = choose()
    filofax.save(FILENAME)
Beispiel #16
0
def create_index(index_name: str, table_name: str, indexed_attr: str):
    Catalog.exists_index(index_name)
    Catalog.create_index(index_name, table_name, indexed_attr)
    Index.create_index(index_name, table_name, indexed_attr)
Beispiel #17
0
# -*-coding:Utf-8 -*

# Y:\3IF\RE\TP-1\src

import Catalog
from handlers import HandlerFactory
from handlers.MultiCastCatalogHandler import *
from sockets.TcpSocket import *
from sockets.SocketManager import *

(serverAddress, catalogPort) = Catalog.parse('catalog/startup.txt')
Catalog.addMediaToResourceManager()

connectionProperties = Catalog.getConnectionProperties()
HandlerFactory.setConnectionProperties(connectionProperties)

# Instanciate one SocketManager per media + one for the catalog
# (these will accept all new control connections)
servers = []
for properties in connectionProperties:
	server = SocketManager(serverAddress, properties['port'], properties['protocol'])
	server.start()
	servers.append(server)

# Instanciate one more to serve the catalog via multicast
multiCastCatalogHandler = MultiCastCatalogHandler('225.6.7.8', 4567)
multiCastCatalogHandler.start()

input("Press enter to shutdown server...\r\n")
# Once any input was given, we start closing down the connections
# The script will end when all the connections are released
Beispiel #18
0
                self.catalog.remove_inactive_ws()

                # remove inactive fridges
                self.catalog.remove_inactive_fridge()
                
                print("Inactive fridges and web services removed.")

                time.sleep(5*60)


########################################## MAIN FUNCTION ############################################

if __name__ == '__main__':
    conf = {
        '/': {
            'request.dispatch': cherrypy.dispatch.MethodDispatcher(),
            'tools.sessions.on': True,
        }
    }

    catalog = Catalog("Catalog.json")

    inactiveThread = RemoveInactiveThread(catalog)
    inactiveThread.start()

    cherrypy.tree.mount(Catalog_REST(catalog), '/', conf)
    cherrypy.config.update({'server.socket_host': '0.0.0.0'})
    cherrypy.config.update({'server.socket_port': 8080})
    cherrypy.engine.start()
    cherrypy.engine.block()
Beispiel #19
0
 def setup_method(self, method):
     print "Entered setup"
     print method
     self.location = "location"
     self.webserver = "webserver"
     self.dut = Catalog(self.location, self.webserver)
Beispiel #20
0
def Icol_corr(cat,
              catname,
              catIDs,
              RAo,
              DECo,
              radius,
              insMags,
              insMagerrs,
              catMags,
              catMagerrs,
              plot=True):
    #essential extra imports
    from scipy.optimize import curve_fit
    from SNAP.Analysis.LCFitting import linfunc
    import Catalog as ctlg
    #fetch V band magnitudes
    if cat == 'phot':
        IDV, RAV, DECV, catMV, catMerrV = ctlg.catPhot(catname, band='V')
    elif cat == 'dprs':
        IDV, RAV, DECV, catMV, catMerrV = ctlg.catDPRS(catname, band='V')
    elif cat == 'diff':
        IDV, RAV, DECV, catMV, catMerrV = ctlg.catDiff(catname, band='V')
    elif cat == 'aavso':
        fovam = 2.0 * radius * 0.4 / 60.0  #arcmin radius in KMT scaling
        IDV, RAV, DECV, catMV, catMerrV = ctlg.catAAVSO(RAo[0],
                                                        DECo[0],
                                                        fovam,
                                                        'V',
                                                        out=catname)
    #compute V-I
    I, Ierr = [], []
    KI, KIerr = [], []
    V, Verr = [], []
    for i in range(len(catMV)):
        if IDV[i] in catIDs:
            Iid = list(catIDs).index(IDV[i])
            I.append(catMags[Iid])
            Ierr.append(catMagerrs[Iid])
            KI.append(insMags[Iid])
            KIerr.append(insMagerrs[Iid])
            V.append(catMV[i])
            Verr.append(catMerrV[i])
    I, Ierr = np.array(I), np.array(Ierr)
    KI, KIerr = np.array(KI), np.array(KIerr)
    V, Verr = np.array(V), np.array(Verr)
    VI = V - I
    VI_err = np.sqrt(np.square(Verr) + np.square(Ierr))
    #photometric solution color dependence
    dI = I - KI
    dI_err = np.sqrt(Ierr**2 + KIerr**2)
    #average B-V color
    VI_err = [VI_err[i] if VI_err[i] > 0 else 0.0005 for i in range(len(VI))]
    w = 1 / np.square(VI_err)
    VI_mean = np.sum(VI * w) / np.sum(w)
    VI_merr = np.sqrt(1 / np.sum(w))
    print "Average color (V-I):", VI_mean, "+/-", VI_merr

    #make color correlation plot
    if plot:
        #essential additional import
        import matplotlib.pyplot as plt

        #fit color dependence
        plt.title("I band dependence on V-I")
        plt.errorbar(VI, dI, xerr=VI_err, yerr=dI_err, fmt='k+', zorder=1)
        popt, pcov = curve_fit(linfunc,
                               VI,
                               dI,
                               p0=[0.27, 27.8],
                               sigma=dI_err,
                               absolute_sigma=True)
        perr = np.sqrt(np.diag(pcov))
        colsol = linfunc(VI, *popt)
        #mask out 3sig deviators
        mask = np.absolute(dI - colsol) < 3 * np.std(dI - colsol)
        plt.scatter(VI[mask], dI[mask], c='r')
        popt, pcov = curve_fit(linfunc,
                               VI[mask],
                               dI[mask],
                               p0=[0.27, 27.8],
                               sigma=dI_err[mask],
                               absolute_sigma=True)
        perr = np.sqrt(np.diag(pcov))
        colsol = linfunc(VI[mask], *popt)
        print "Color correlation:", popt, perr
        print "Nstar:", len(VI[mask])
        print "Pearson:", np.corrcoef(VI[mask], dI[mask])
        plt.plot(VI[mask], colsol, zorder=2)
        plt.ylabel("i - inst")
        plt.xlabel("V - i")
        plt.show()

    #return mean color of reference stars
    return VI_mean, VI_merr
	def sendCatalog(self):
		self._isTimerRunning = False
		self._dataSocket.send(Catalog.asText())
		self.startTimer()
Beispiel #22
0
def show_table(table_name: str):
    Catalog.not_exists_table(table_name)
    Catalog.show_table(table_name) # 显示属性名、类型、大小(char)、是否unique
Beispiel #23
0
def save():
    Catalog.__finalize__()
    Index.__finalize__()
    Buffer.__finalize__()
    print("All tables have been saved.")
Beispiel #24
0
class REST_Catalog(object):

    exposed=True
    
    def __init__(self):
         
        self.catalog = Catalog("Catalog.json", "Buyers.json", "configCatalog.json", "devices.json")
        
    def GET(self, *uri, **params):
         
          
        if (len(uri) == 2 and uri[0] == 'get' and uri[1] == 'users'):
             
            return self.catalog.getUsers()
       
        elif (len(uri) == 2 and uri[0] == 'get' and uri[1] == 'userslist'):
             
            return self.catalog.getUsersList()

        elif (len(uri) == 2 and uri[0] == 'get' and uri[1] == 'broker'):
              
            return self.catalog.getBroker()
       
        elif (len(uri) == 2 and uri[0] == 'get' and uri[1] == 'devices'):
              
            return self.catalog.getDevices()

        
    def POST(self, *uri):
        

        if len(uri) == 0:
            raise cherrypy.HTTPError(400)

        mybody = cherrypy.request.body.read()

        try:
            data = json.loads(mybody)
        except:
            raise cherrypy.HTTPError(400)


        #%% Register device
        if (len(uri) == 2 and uri[0] == 'add' and uri[1] == 'device'):
             
            return self.catalog.addDevice(data["NAME"], data["IP"], data["PORT"])
        
          
        #%% Get user
        if (len(uri) == 2 and uri[0] == 'get' and uri[1] == 'user'):
             
            return self.catalog.getUser(data["ID"])


        #%% Add user
        if (len(uri) == 2 and uri[0] == 'add' and uri[1] == 'user'):
          
            out = self.catalog.addUser(data)
            if out == "Error":
                raise cherrypy.HTTPError(400)
            else:
                return out
                
                
        #%% Set nickname            
        elif (len(uri) == 2 and uri[0] == 'set' and uri[1] == 'nickname'):
             
            out = self.catalog.setNickname(data)
            if out == "Error":
                raise cherrypy.HTTPError(400)     
            else:
                return out
           
               
        #%% verify buyer            
        elif (len(uri) == 2 and uri[0] == 'verify' and uri[1] == 'buyer'):
            
            out = self.catalog.verifyBuyer(data)
            
            if out == "Error":
                raise cherrypy.HTTPError(400)     
            else:
                return out
           
               
        #%% verify buyer            
        elif (len(uri) == 2 and uri[0] == 'verify' and uri[1] == 'pw'):
            
            out = self.catalog.verifyPassword(data)
            
            if out == "Error":
                raise cherrypy.HTTPError(400)     
            else:
                return out
               
               
        #%% set flag                             
        elif (len(uri) == 2 and uri[0] == 'set' and uri[1] == 'flag'):
            
            out = self.catalog.setFlag(data)
            
            if out == "Error":
                raise cherrypy.HTTPError(400)     
            else:
                return out   
            
               
         #%% remove user                             
        elif (len(uri) == 2 and uri[0] == 'remove' and uri[1] == 'user'):
            
            out = self.catalog.removeUser(data)
            
            if out == "Error":
                raise cherrypy.HTTPError(400)     
            else:
                return out      


         #%% set SMS                            
        elif (len(uri) == 2 and uri[0] == 'set' and uri[1] == 'sms'):
            
            out = self.catalog.setSMS(data)
            
            if out == "Error":
                raise cherrypy.HTTPError(400)     
            else:
                return out   
            
               
         #%% set number                            
        elif (len(uri) == 2 and uri[0] == 'set' and uri[1] == 'number'):
            
            out = self.catalog.setNumber(data)
            
            if out == "Error":
                raise cherrypy.HTTPError(400)     
            else:
                return out 


         #%% set new mail               
        elif (len(uri) == 2 and uri[0] == 'set' and uri[1] == 'mail'):
            
            out = self.catalog.setMail(data)
            
            if out == "Error":
                raise cherrypy.HTTPError(400)     
            else:
                return out                
               
               
         #%% set new pw               
        elif (len(uri) == 2 and uri[0] == 'set' and uri[1] == 'pw'):
            
            out = self.catalog.setPw(data)
            
            if out == "Error":
                raise cherrypy.HTTPError(400)     
            else:
                return out
            
        elif (len(uri) == 2 and uri[0] == 'remove' and uri[1] == 'device'):
            
            out = self.catalog.removeDevice(data)
            
            if out == "Error":
                raise cherrypy.HTTPError(400)     
            else:
                return out  
Beispiel #25
0
 def render_mainColumn(self, context):
     return [
         Counters(self.target),
         Catalog.CatalogSection(self.target),
         RecentMessages(self.target),
     ]
Beispiel #26
0
                        t_file, _children=t_childrenlist)
                    _rootCatalog.addItem(t_catalogItem)
                    a = self.__dirSearch(t_childrenlist, _ndeep=_ndeep - 1)
                    dirHaveRightDoc = dirHaveRightDoc or a
            return dirHaveRightDoc

    def SetCatalogObj(self, dirPath, _ndeep=10):
        """
        使用__dirSearch()方法搜索目录,并返回结果
        :param dirPath:
        :param _ndeep:索引深度
        :return:self.__CatalogObj:目录对象CatalogList
        """
        self.__CatalogObj = Catalog.CatalogList(dirPath)
        self.__dirSearch(self.__CatalogObj, _ndeep)
        return self.GetCatalogObj()

    def GetCatalogObj(self):
        return self.__CatalogObj


if __name__ == '__main__':
    dirPath = '/home/yangzheng/myPrograms/MLTools_PyQt4'
    test = DocManagerTree()
    test.SetCatalogObj(dirPath, 1)
    ct = test.GetCatalogObj()
    try:
        print Catalog.getTree(ct)
    except DocException, e:
        e.what()
Beispiel #27
0
from Catalog import *
from Dialog import Dialog
from Controller import Controller
from View import View

question = Question(
    1, 'Привет',
    [Answer(1, 'Привет', 2), Answer(2, 'Пока', 0)])
question2 = Question(
    2, 'Как дела?',
    [Answer(1, 'Хорошо', 3), Answer(3, 'Нормально', 3)])
question3 = Question(3, 'Пока', [Answer(1, 'Пока', 0)])

catalog = Catalog([question, question2, question3])

dialog = Dialog(catalog)
view = View()
controller = Controller(dialog, view)
view.set_controller(controller)

controller.start()
Beispiel #28
0
def Bcol_corr(cat,
              catname,
              catIDs,
              RAo,
              DECo,
              radius,
              insMags,
              insMagerrs,
              catMags,
              catMagerrs,
              plot=True):
    #essential extra imports
    from scipy.optimize import curve_fit
    from SNAP.Analysis.LCFitting import linfunc
    import Catalog as ctlg
    #load V band data
    if cat == 'aavso':
        fovam = 2.0 * radius * 0.4 / 60.0  #arcmin radius in KMT scaling
        IDBV, RABV, DECBV, catBV, catBVerr = ctlg.catAAVSO(RAo[0],
                                                           DECo[0],
                                                           fovam,
                                                           'B-V',
                                                           out=catname)
        B, Berr = [], []
        KB, KBerr = [], []
        BV, BV_err = [], []
        for i in range(len(catBV)):
            if IDBV[i] in catIDs:
                Bid = list(catIDs).index(IDBV[i])
                B.append(catMags[Bid])
                Berr.append(catMagerrs[Bid])
                KB.append(insMags[Bid])
                KBerr.append(insMagerrs[Bid])
                BV.append(catBV[i])
                BV_err.append(catBVerr[i])
        B, Berr = np.array(B), np.array(Berr)
        KB, KBerr = np.array(KB), np.array(KBerr)
        BV, BV_err = np.array(BV), np.array(BV_err)
    else:
        #fetch V band magnitudes
        if cat == 'phot':
            IDV, RAV, DECV, catMV, catMerrV = ctlg.catPhot(catname, band='V')
        elif cat == 'dprs':
            IDV, RAV, DECV, catMV, catMerrV = ctlg.catDPRS(catname, band='V')
        elif cat == 'diff':
            IDV, RAV, DECV, catMV, catMerrV = ctlg.catDiff(catname, band='V')
        #compute B-V
        B, Berr = [], []
        KB, KBerr = [], []
        V, Verr = [], []
        for i in range(len(catMV)):
            if IDV[i] in catIDs:
                Bid = list(catIDs).index(IDV[i])
                B.append(catMags[Bid])
                Berr.append(catMagerrs[Bid])
                KB.append(insMags[Bid])
                KBerr.append(insMagerrs[Bid])
                V.append(catMV[i])
                Verr.append(catMerrV[i])
        B, Berr = np.array(B), np.array(Berr)
        KB, KBerr = np.array(KB), np.array(KBerr)
        V, Verr = np.array(V), np.array(Verr)
        BV = B - V
        BV_err = np.sqrt(np.square(Berr) + np.square(Verr))
    #photometric solution color dependence
    dI = B - KB
    dI_err = np.sqrt(Berr**2 + KBerr**2)
    #average B-V color
    BV_err = [BV_err[i] if BV_err[i] > 0 else 0.0005 for i in range(len(BV))]
    w = 1 / np.square(BV_err)
    BV_mean = np.sum(BV * w) / np.sum(w)
    BV_merr = np.sqrt(1 / np.sum(w))
    print "Average color (B-V):", BV_mean, "+/-", BV_merr

    #make color correlation plot
    if plot:
        #essential additional import
        import matplotlib.pyplot as plt

        #fit color dependence
        plt.title("B band dependence on B-V")
        plt.errorbar(BV, dI, xerr=BV_err, yerr=dI_err, fmt='k+', zorder=1)
        popt, pcov = curve_fit(linfunc,
                               BV,
                               dI,
                               p0=[0.27, 27.8],
                               sigma=dI_err,
                               absolute_sigma=True)
        perr = np.sqrt(np.diag(pcov))
        colsol = linfunc(BV, *popt)
        #mask out 3sig deviators
        mask = np.absolute(dI - colsol) < 3 * np.std(dI - colsol)
        plt.scatter(BV[mask], dI[mask], c='r')
        popt, pcov = curve_fit(linfunc,
                               BV[mask],
                               dI[mask],
                               p0=[0.27, 27.8],
                               sigma=dI_err[mask],
                               absolute_sigma=True)
        perr = np.sqrt(np.diag(pcov))
        colsol = linfunc(BV[mask], *popt)
        print "Color correlation:", popt, perr
        print "Nstar:", len(BV[mask])
        print "Pearson:", np.corrcoef(BV[mask], dI[mask])
        plt.plot(BV[mask], colsol, zorder=2)
        plt.ylabel("B - inst")
        plt.xlabel("B - V")
        plt.show()

    #return mean color of reference stars
    return BV_mean, BV_merr
Beispiel #29
0
def drop_index(index_name: str):
    Catalog.not_exists_index(index_name)
    Catalog.drop_index(index_name)
Beispiel #30
0
def ClickMag(image,
             wcs,
             cat,
             catname,
             radius=500,
             band='V',
             fwhm=5.0,
             limsnr=3.0,
             satmag=14.0,
             refmag=19.0,
             satpix=40000.0,
             verbosity=0,
             diagnosis=False):
    """
    #####################################################################
    # Desc: Compute magnitude of objects in image using ref catalog.    #
    #       Select objects by clicking on them.                         #
    # ----------------------------------------------------------------- #
    # Imports:                                                          #
    # ----------------------------------------------------------------- #
    # Input                                                             #
    # ----------------------------------------------------------------- #
    #     image: numpy array containing image data on which to measure  #
    #            source and reference star photometry.                  #
    #       wcs: astropy wcs object, world coordinate system on image.  #
    #       cat: str catalog type (phot, dprs, or diff from Catalog.py) #
    #   catname: str catalog name.                                      #
    #    radius; float radius around object in which to take ref stars. #
    #      band; char observational filter of data.                     #
    #      fwhm; float estimate of FWHM on image.                       #
    #    limsnr; float signal to noise ratio defining detection limit,  #
    #            if 0.0, then no detection limits are calculated.       #
    #    satmag; float magnitude below which reference stars are        #
    #            considered to be saturated and hence not used.         #
    #    refmag; float magnitude above which reference stars are        #
    #            considered to be reliable, and therefore used.         #
    # verbosity; int counts verbosity level.                            #
    # ----------------------------------------------------------------- #
    # Output                                                            #
    # ----------------------------------------------------------------- #
    #  RAo, DECo: float measured equatorial coordinates of sources.     #
    #    Io, SNo: float measured intensities and SNRs of sources.       #
    # mo, mo_err: float calibrated magnitudes and errors of sources.    #
    #       mlim; float detection limits at source positions.           #
    #####################################################################
    """
    #essential imports
    import matplotlib.pyplot as plt
    import numpy as np

    #essential functions
    import Catalog as ctlg
    import PSFlib as plib
    import Photometry as pht
    from MagCalc import PSFError, magnitude
    from Analysis.Cosmology import bands, flux_0

    #missing
    #(RAo,DECo) CHECK
    #aperture=None
    #psf='1'
    #name='object' CHECK
    #fitsky=True

    #steps

    #load all reference stars on image DONE
    #plot image DONE
    #plot reference stars in blue DONE
    #centroid clicked sources -> replot DONE
    #after each click, ask for photometry method
    #evaluate ra and dec coordinates DONE
    #evaluate magnitudes of each using MagCalc DONE

    #load photometric reference stars catalog
    if verbosity > 0:
        print "loading catalog"
    if cat == 'phot':
        ID, RA, DEC, catM, catMerr = ctlg.catPhot(catname, band=band)
    elif cat == 'dprs':
        ID, RA, DEC, catM, catMerr = ctlg.catDPRS(catname, band=band)
    elif cat == 'diff':
        ID, RA, DEC, catM, catMerr = ctlg.catDiff(catname, band=band)
    elif cat == 'aavso':
        fovam = 4.0 * radius * 0.4 / 60.0  #arcmin radius in KMT scaling
        RAo, DECo = [wcs.wcs.crval[0]], [wcs.wcs.crval[1]]
        if band == 'I':
            if verbosity > 0:
                print "Performing AAVSO i -> I band conversion (Jodri 2006)"
            IDi, RAi, DECi, catMi, catMierr = ctlg.catAAVSO(RAo[0],
                                                            DECo[0],
                                                            fovam,
                                                            'i',
                                                            out=catname)
            IDr, RAr, DECr, catMr, catMrerr = ctlg.catAAVSO(RAo[0],
                                                            DECo[0],
                                                            fovam,
                                                            'r',
                                                            out=catname)
            ID, RA, DEC, catM, catMerr = [], [], [], [], []
            for i in range(len(IDi)):
                #for each ID in i band
                if IDi[i] in IDr:
                    #if also in r band
                    j = list(IDr).index(IDi[i])  #here it is
                    #get I band from i and r
                    ID.append(IDi[i])
                    RA.append(RAi[i])
                    DEC.append(DECi[i])
                    #Jodri 2006 general stars transform
                    catMI = 1.083 * catMi[i] - 0.083 * catMr[j] - 0.376
                    catMIerr = np.sqrt(((catMr[j] - catMi[i]) * 0.006)**2 +
                                       (0.004)**2 + (1.083 * catMierr[i])**2 +
                                       (0.083 * catMrerr[j])**2)
                    catM.append(catMI)
                    catMerr.append(catMIerr)
            ID, RA, DEC, catM, catMerr = np.array(ID), np.array(RA), np.array(
                DEC), np.array(catM), np.array(catMerr)
            ID, RA, DEC, catM, catMerr = ctlg.catAAVSO(RAo[0],
                                                       DECo[0],
                                                       fovam,
                                                       'i',
                                                       out=catname)
        else:
            ID, RA, DEC, catM, catMerr = ctlg.catAAVSO(RAo[0],
                                                       DECo[0],
                                                       fovam,
                                                       band,
                                                       out=catname)

    #convert position of catalog stars to pixels
    catX, catY = wcs.all_world2pix(RA, DEC, 0)
    catX, catY = catX.astype(float), catY.astype(float)
    print max(catM)
    #select catalog stars within edges
    index = np.logical_and(catX > 80, image.shape[1] - catX > 80)
    index = np.logical_and(
        index, np.logical_and(catY > 80, image.shape[0] - catY > 80))
    #select unsaturated catalog stars
    index = np.logical_and(index, catM > satmag)
    #select bright enough catalog stars
    index = np.logical_and(index, catM < refmag)
    #crop values to mask
    ID, catX, catY, catRA, catDEC, catM, catMerr = ID[index], catX[
        index], catY[index], RA[index], DEC[index], catM[index], catMerr[index]
    if len(ID) == 0:
        raise PSFError('No reference stars in image.')
    if verbosity > 0:
        #output selected catalog stars
        print "Showing catalog star IDs:"
        for i in range(len(ID)):
            print ID[int(i)], catX[int(i)], catY[int(i)]
            print catRA[int(i)], catDEC[int(i)], catM[int(i)], catMerr[int(i)]
    #number of selected catalog stars
    Ncat = len(ID)
    print Ncat

    #plot image of field
    fig = plt.figure()
    plt.imshow(image, cmap='Greys', vmax=0.0001 * np.amax(image), vmin=0)
    plt.scatter(catX, catY, c='b', marker='+')
    plt.tight_layout()

    x_cens, y_cens = [], []
    psfs = []
    apers = []
    fit_skys = []

    #function: record click position on matplotlib image
    def onclick(event):
        #output click position
        if verbosity > 0:
            print 'Clicked pixel: x=%d, y=%d' % (event.xdata, event.ydata)
        #record clicked position
        x_click, y_click = event.xdata, event.ydata
        #obtain an aperture around clicked star
        intens, x, y = pht.ap_get(image, x_click, y_click, 0, 1 * fwhm)
        #computer centroid of clicked star
        x_cen = np.sum(intens * x) / intens.sum()
        y_cen = np.sum(intens * y) / intens.sum()
        print 'Centroid pixel: x=%d, y=%d' % (x_cen, y_cen)
        #Should we keep the star
        keep = raw_input("Keep? (y/n)")
        if keep == 'y' or keep == "":
            #record centroid position
            x_cens.append(x_cen)
            y_cens.append(y_cen)
            #plot centroid position
            plt.scatter(x_cen, y_cen, marker='+', c='r', s=80)
            fig.canvas.draw()

            if len(x_cens) == 1:
                #First source -> prompt parameters
                use_prev = 'n'
            else:
                #Prompt for previous parameter selection
                use_prev = raw_input(
                    "Use previous photometry parameters? (y/n)")

            if use_prev == 'y' or use_prev == "":
                #Use previous parameters
                apers.append(apers[-1])
                psfs.append(psfs[-1])
                fit_skys.append(fit_skys[-1])
            else:
                #Prompt for aperture size
                aper = raw_input("Aperture=? (float / empty for PSF)")
                if aper == "":
                    #PSF photometry parameters
                    psf = raw_input(
                        "PSF type=? ('1', '2', '3', 's<sersic index>')")
                    apers.append(None)
                    psfs.append(psf)
                else:
                    #Aperture photometry parameters
                    aper = float(aper)
                    apers.append(aper)
                    psfs.append('1')
                #Prompt for planar sky fitting
                fit_sky = raw_input("Fit planar background? (y/n)")
                if fit_sky == 'y' or fit_sky == "":
                    fit_skys.append(True)
                else:
                    fit_skys.append(False)
        else:
            #Drop clicked position and keep listening
            print "Dropping"
        print ""
        print "Click another source"

    #plot image and listen for a click
    cid = fig.canvas.mpl_connect('button_press_event', onclick)
    if verbosity > 0:
        print ""
        print "Click on the sources"
        print '---------------------------------------'
    plt.show()
    #convert source centroid positions to ra, dec degrees
    x_cens = np.array(x_cens)
    y_cens = np.array(y_cens)
    ra_cens, dec_cens = wcs.all_pix2world(x_cens, y_cens, 0)

    #compute magnitude of each source
    RAs, DECs = np.zeros(len(x_cens)), np.zeros(len(x_cens))
    Is, SNs = np.zeros(len(x_cens)), np.zeros(len(x_cens))
    Ms, Merrs = np.zeros(len(x_cens)), np.zeros(len(x_cens))
    if limsnr != 0:
        #also compute limiting magnitudes
        Mlims = np.zeros(len(x_cens))
        for i in range(len(ra_cens)):
            RA, DEC, I, SN, M, Merr, Mlim = magnitude(
                image, image, wcs, cat, catname, (ra_cens[i], dec_cens[i]),
                radius, apers[i], psfs[i], "Source" + str(i), band, fwhm,
                limsnr, satmag, refmag, fit_skys[i], satpix, verbosity)
            #output position, magnitude
            print "Output for Source " + str(i)
            print RA, DEC, I, SN, M, Merr, Mlim
            RAs[i], DECs[i], Is[i], SNs[i], Ms[i], Merrs[i], Mlims[i] = RA[
                0], DEC[0], I[0], SN[0], M[0], Merr[0], Mlim
        retlist = [RAs, DECs, Is, SNs, Ms, Merrs, Mlims]
    else:
        #don't compute limiting magnitudes
        for i in range(len(ra_cens)):
            RA, DEC, I, SN, M, Merr = magnitude(
                image, image, wcs, cat, catname, (ra_cens[i], dec_cens[i]),
                radius, apers[i], psfs[i], "Source" + str(i), band, fwhm,
                limsnr, satmag, refmag, fit_skys[i], satpix, verbosity)
            #output position, magnitude
            print "Output for Source " + str(i)
            print RA, DEC, I, SN, M, Merr
            RAs[i], DECs[i], Is[i], SNs[i], Ms[i], Merrs[i] = RA[0], DEC[0], I[
                0], SN[0], M[0], Merr[0]
        retlist = [RAs, DECs, Is, SNs, Ms, Merrs]

    #plot diagnostic
    if verbosity > 0:
        #plot image of field
        fig = plt.figure()
        plt.imshow(image, cmap='Greys', vmax=0.0001 * np.amax(image), vmin=0)
        plt.scatter(catX, catY, c='b', marker='+', label="catalog")
        plt.scatter(x_cens, y_cens, marker='+', c='r', s=80, label="centroids")
        x_meas, y_meas = wcs.all_world2pix(RAs, DECs, 0)
        plt.scatter(x_meas, y_meas, marker='+', c='g', s=80, label="measured")
        print len(x_cens), len(x_meas)
        plt.tight_layout()
        plt.show()

    #return source measurements
    return retlist
Beispiel #31
0
def show_tables():
    Catalog.show_tables()
Beispiel #32
0
class FileSystem(Fuse):

    null_stat = fuse.Stat(
        st_mode=stat.S_IFDIR | 0755,
        st_ino=0,
        st_dev=0,
        st_nlink=2,
        st_uid=0,
        st_gid=0,
        st_size=0,
        st_atime=0,
        st_mtime=0,
        st_ctime=0,
        st_blksize=0,
        st_rdev=0)

    bacula_stat_fields = [
        'st_dev', 'st_ino', 'st_mode', 'st_nlink', 'st_uid', 'st_gid',
        'st_rdev', 'st_size', 'st_blksize', 'st_blocks', 'st_atime',
        'st_mtime', 'st_ctime', 'st_linkfi', 'st_flags', 'st_streamid'
    ]

    fuse_stat_fields = [
        attr for attr in dir(null_stat) if attr.startswith('st_')
    ]

    xattr_prefix = 'user.baculafs.'

    xattr_fields = ['FileIndex', 'JobId', 'LStat', 'MD5']

    xattr_fields_root = [
        'client', 'fileset', 'datetime', 'joblist', 'cache_prefix'
    ]

    xattr_fields_bextract = [
        'path', 'volume', 'retries', 'state', 'pending', 'failures'
    ]

    bextract_done = {
        'path': None,
        'volume': None,
        'retries': 0,
        'state': 'idle'
    }

    def __init__(self, *args, **kw):
        '''
        Initialize filesystem
        '''

        self._extract_lock = threading.Lock()
        self._getattr_lock = threading.Lock()
        self._bextract_status_lock = threading.Lock()
        self._bextract_user_intervention_event = threading.Event()

        self._initialized = False

        # default option values
        self.logging = 'info'
        self.syslog = False
        self.driver = SQL.SQLITE3
        self.database = None
        self.host = 'localhost'
        self.port = 0
        self.username = '******'
        self.password = None
        self.conf = '/etc/bacula/bacula-sd.conf'
        self.client = ''
        self.fileset = None
        self.device = 'FileStorage'
        self.datetime = None
        self.recent_job = False
        self.joblist = None
        self.cache_prefix = None
        self.user_cache_path = None
        self.cleanup = False
        self.move_root = False
        self.prefetch_attrs = False
        self.prefetch_regex = None
        self.prefetch_symlinks = False
        self.prefetch_recent = False
        self.prefetch_diff = None
        self.prefetch_difflist = None
        self.prefetch_list = None
        self.prefetch_everything = False
        self.batch_mode = False
        self.batch_list = False
        self.batch_bsr = False
        self.batch_extract = False
        self.use_ino = False
        self.max_ino = 0
        self.dirs = {'/': {'': (FileSystem.null_stat, )}}

        self._bextract_status = copy.deepcopy(FileSystem.bextract_done)
        self._bextract_status['pending'] = 0
        self._bextract_status['failures'] = 0

        class File(FileSystem._File):
            def __init__(self2, *a, **kw):
                FileSystem._File.__init__(self2, self, *a, **kw)

        self.file_class = File

        Fuse.__init__(self, *args, **kw)

    def _split(self, path):
        '''
        os.path.split wrapper
        '''
        head, tail = os.path.split(path)
        if head and not head.endswith('/'):
            head += '/'
        return head, tail

    def _bacula_stat(self, base64):
        '''
        Parse base64 encoded lstat info.
        Returns fuse.Stat object with subset of decoded values,
        and dictionary with full list of decoded values
        '''
        st = fuse.Stat()
        lst = dict(
            zip(FileSystem.bacula_stat_fields,
                map(self.base64.decode, base64.split())))
        for k in FileSystem.bacula_stat_fields:
            if k in FileSystem.fuse_stat_fields:
                setattr(st, k, lst[k])
        return lst, st

    def _add_parent_dirs(self, path):
        '''
        add parent directories of path to dirs dictionary
        '''
        head, tail = self._split(path[:-1])
        if not head or head == path:
            return
        if not head in self.dirs:
            self.dirs[head] = {tail: (FileSystem.null_stat, )}
        elif not tail in self.dirs[head]:
            self.dirs[head][tail] = (FileSystem.null_stat, )
        self._add_parent_dirs(head)

    def _update_inodes(self, head):
        '''
        generate unique st_ino for each missing st_ino
        '''
        for tail in self.dirs[head]:
            if self.dirs[head][tail][-1].st_ino == 0:
                if len(self.dirs[head][tail]) == 1:
                    self.dirs[head][tail] = \
                        (copy.deepcopy(FileSystem.null_stat),)
                self.max_ino += 1
                self.dirs[head][tail][-1].st_ino = self.max_ino
            subdir = '%s%s/' % (head, tail)
            if subdir in self.dirs:
                self._update_inodes(subdir)

    def _extract(self, path_list):
        '''
        extract path list from storage, returns path list of extracted files
        '''

        nitems = len(path_list)
        self._bextract_increment_counter('pending', nitems)

        # serialize extractions
        self._extract_lock.acquire()

        items = []
        realpath_list = []
        hardlink_targets = []

        for path in path_list:
            realpath, symlinkinfo, volumes = self._find_volumes(path)
            realpath_list.append(realpath)
            if volumes:
                items.append((symlinkinfo, path, volumes))
                # collect hard link targets
                hardlink_target = self._hardlink_target(path)
                if (hardlink_target and hardlink_target not in path_list
                        and hardlink_target not in hardlink_targets):
                    hardlink_targets.append(hardlink_target)

        # add hardlink targets to list
        # (bextract will fail to extract the hardlink
        # if its target does not exist)
        for path in hardlink_targets:
            realpath, symlinkinfo, volumes = self._find_volumes(path)
            if volumes:
                items.append((symlinkinfo, path, volumes))

        if len(items) > 0:
            rc, sig = self._bextract(items)
            # it seems that bextract does not restore mtime for symlinks
            # so we create a normal file with same mtime as stored symlink
            if rc == 0 and not self.batch_mode:
                for item in items:
                    if item[0]:
                        symlinkfile = item[0][0]
                        symlinktime = item[0][1:]
                        makedirs(os.path.dirname(symlinkfile))
                        touch(symlinkfile, symlinktime)

        self._extract_lock.release()
        self._bextract_increment_counter('pending', -nitems)

        return realpath_list

    def _hardlink_target(self, path):
        '''
        return hard link target of path if it is a hard link
        '''
        head, tail = self._split(path)
        bs = self.dirs[head][tail][-2]
        jobid = self.dirs[head][tail][1]
        if bs['st_nlink'] > 1 and bs['st_linkfi'] > 0:
            st_linkfi = bs['st_linkfi']
            for file in self.catalog.files:
                if jobid == file[3] and st_linkfi == file[2]:
                    hardlink_target = ('/' if not file[0].startswith('/') else
                                       '') + file[0] + file[1]
                    return hardlink_target
        return None

    def _find_volumes(self, path):
        '''
        return list of volumes that contain path to be extracted,
        if the path has not been extracted yet
        '''
        realpath = os.path.normpath(self.cache_path + path)
        symlinkpath = os.path.normpath(self.cache_symlinks + path)
        head, tail = self._split(path)
        # sanity check: path should not be a directory
        if tail == '':
            raise RuntimeError('trying to extract a directory %s' % path)
        # check that path exists in catalog
        if head not in self.dirs or tail not in self.dirs[head]:
            return None, None, None
        # sanity check: path entry is incomplete
        if len(self.dirs[head][tail]) == 1:
            raise RuntimeError('incomplete entry for path %s' % path)
        # return if file has already been extracted
        bs = self.getattr(path)
        is_symlink = stat.S_ISLNK(bs.st_mode)
        found = False
        if os.path.exists(realpath) or os.path.lexists(realpath):
            # make sure that stat info of realpath matches path
            s = os.lstat(realpath)
            conds = [
                getattr(s, attr) == getattr(bs, attr) for attr in
                ['st_mode', 'st_uid', 'st_gid', 'st_size', 'st_mtime']
            ]
            if is_symlink:
                conds[-1] = (os.path.exists(symlinkpath)
                             and bs.st_mtime == os.stat(symlinkpath).st_mtime)
            if all(conds):
                return realpath, None, None
        # generate list of volumes for path
        fileindex, jobid = self.dirs[head][tail][0:2]
        jobs = [job for job in self.catalog.jobs if job[0] == jobid]
        volumes = [
            [
                volume[1],  # 0-Volume
                volume[2],  # 1-MediaType
                self.device,  # 2-Device
                jobs[0][0],  # 3-JobId
                jobs[0][1],  # 4-VolSessionId
                jobs[0][2],  # 5-VolSessionTime
                (volume[5] << 32) | volume[7],  # 6-VolAddr: StartAddr
                (volume[6] << 32) | volume[8],  # 7-VolAddr: EndAddr
                fileindex
            ]  # 8-FileIndex
            for volume in self.catalog.volumes
            if (volume[0] == jobid and volume[3] <= fileindex
                and fileindex <= volume[4])
        ]

        return (realpath, ((symlinkpath, bs.st_atime,
                            bs.st_mtime) if is_symlink else None), volumes)

    def _bextract_set_status(self, status):
        '''
        thread safe modification of bextract status dict
        '''
        self._bextract_status_lock.acquire()
        for key in status:
            self._bextract_status[key] = status[key]
        self._bextract_status_lock.release()

    def _bextract_increment_counter(self, counter, n):
        '''
        thread safe modification of bextract counters
        '''
        self._bextract_status_lock.acquire()
        self._bextract_status[counter] += n
        self._bextract_status_lock.release()

    def _bextract_get_status(self):
        '''
        thread safe access to bextract status dict
        '''
        self._bextract_status_lock.acquire()
        status = copy.deepcopy(self._bextract_status)
        self._bextract_status_lock.release()
        return status

    def _bextract_flock(self):
        '''
        lock the storage daemon configuration file
        '''
        # we allow locking to fail, so as to allow
        # at least a single instance of baculafs,
        # even if we can't lock the sd conf file
        try:
            f = open(self.conf, 'r')
            fcntl.flock(f, fcntl.LOCK_EX)
            return f
        except:
            self.logger.warning(traceback.format_exc())
            return None

    def _bextract_funlock(self, f):
        '''
        unlock the file f
        '''
        if not f:
            return
        try:
            fcntl.flock(f, fcntl.LOCK_UN)
            f.close()
        except:
            self.logger.warning(traceback.format_exc())

    def _bextract(self, items):
        '''
        extract list of items from Bacula storage device
        '''
        if self.batch_list:
            for item in items:
                print item[1]
            if (not self.batch_bsr and not self.batch_extract):
                return (0, 0)

        bsrpath = self._write_bsr(items)

        if self.batch_bsr:
            bsrfile = open(bsrpath, 'rt')
            for line in bsrfile:
                sys.stdout.write(line)
            sys.stdout.flush()
            bsrfile.close()
            if not self.batch_extract:
                return (0, 0)

        if self.batch_extract:
            makedirs(self.fuse_args.mountpoint)

        cmd = ('bextract -b "%s" -c "%s" "%s" "%s"' %
               (bsrpath,
                self.conf,
                self.device,
                self.cache_path \
                    if not self.batch_extract else self.fuse_args.mountpoint))
        self.logger.debug(cmd)

        self._bextract_set_status({
            'path': items[0][1],
            'volume': items[0][-1][0][0],
            'retries': 0,
            'state': 'run'
        })

        # we serialize calls to bextract across instances of baculafs
        # by locking the storage daemon configuration file
        # (note that this may not work over NFS)
        f = self._bextract_flock()

        child = pexpect.spawn(cmd)
        child.logfile = self.logfile  # sys.stdout

        attempt = 0
        missing = ''
        while True:
            # bextract either finishes or waits for a missing volume
            i = child.expect([self.fail_pattern, pexpect.EOF],
                             timeout=None,
                             searchwindowsize=200)
            self.logfile.flush(flush_tail=True)
            if i == 0:
                # count retries
                if missing == child.match.groups()[0]:
                    attempt += 1
                    self._bextract_set_status({
                        'retries':
                        attempt,
                        'state':
                        '*user intervention required*'
                    })
                else:
                    attempt = 1
                    missing = child.match.groups()[0]
                    self._bextract_set_status({
                        'volume':
                        missing,
                        'retries':
                        attempt,
                        'state':
                        '*user intervention required*'
                    })
                # wait for user
                if not self._initialized:
                    if self.loglevel != logging.DEBUG:
                        sys.stdout.write(
                            'Mount Volume "%s" on device "%s" %s '
                            'and press return when ready: ' %
                            (missing, self.device, child.match.groups()[1]))
                        sys.stdout.flush()
                    sys.stdin.read(1)
                else:
                    self.logger.error(
                        'Mount volume "%s" on device "%s" %s and run '
                        '"attr -s baculafs.bextract.state -V run %s"'
                        ' when ready' % (missing, self.device,
                                         child.match.groups()[1],
                                         self.fuse_args.mountpoint))
                    self._bextract_user_intervention_event.clear()
                    self._bextract_user_intervention_event.wait()
                    self._bextract_user_intervention_event.clear()
                # retry
                self._bextract_set_status({'state': 'run'})
                child.sendline('')
            else:
                child.close()
                break

        # unlock the sd configuration file
        self._bextract_funlock(f)

        self._bextract_set_status(FileSystem.bextract_done)
        if child.exitstatus or child.signalstatus:
            self.logger.error('extraction failed (bsr file: %s)' % bsrpath)
            self._bextract_increment_counter('failures', 1)
        return (child.exitstatus, child.signalstatus)

    def _group_by_volume(self, items):
        '''
        return items grouped by volume
        '''
        # group volumes
        volumes = []
        for item in items:
            for v in item[-1]:
                found = False
                findex = v[-1]
                for vindex in xrange(0, len(volumes)):
                    volume = volumes[vindex]
                    if not any(map(cmp, v[:-1], volume[:-1])):
                        volume[-1].append(findex)
                        found = True
                        break
                if not found:
                    volumes.append(v[:-1] + [[v[-1]]])

        # compact list of file indices
        for volume in volumes:
            volume[-1] = list(set(volume[-1]))
            volume[-1].sort()
            l = len(volume[-1])
            findex = volume[-1][0]
            findices = [(findex, findex)]
            for idx in volume[-1][1:]:
                next_idx = findices[-1][-1] + 1
                if idx == next_idx:
                    findices[-1] = (findices[-1][0], idx)
                else:
                    findices.append((idx, idx))
            volume[-1] = findices
            volume.append(l)

        # reorder volumes to ensure correct handling of
        # files spanning multiple volumes
        volumes.sort(cmp=lambda a, b: \
                         (cmp(a[3], b[3]) or
                          cmp(a[8][-1][-1], b[8][0][0])))

        return volumes

    def _write_bsr(self, items):
        '''
        generate bsr for items to be extracted
        '''
        bsrfd, bsrpath = tempfile.mkstemp(
            suffix='.bsr', dir=self.cache_bsrpath, text=True)
        volumes = self._group_by_volume(items)
        for volume in volumes:
            os.write(bsrfd, 'Volume="%s"\n' % volume[0])
            os.write(bsrfd, 'MediaType="%s"\n' % volume[1])
            os.write(bsrfd, 'Device="%s"\n' % volume[2])
            os.write(bsrfd, 'VolSessionId=%d\n' % volume[4])
            os.write(bsrfd, 'VolSessionTime=%d\n' % volume[5])
            if not self.bsr_compat:
                os.write(bsrfd, 'VolAddr=%d-%d\n' % (volume[6], volume[7]))
            for findex in volume[8]:
                if findex[0] == findex[1]:
                    os.write(bsrfd, 'FileIndex=%d\n' % findex[0])
                else:
                    os.write(bsrfd, 'FileIndex=%d-%d\n' % findex)
            os.write(bsrfd, 'Count=%d\n' % volume[9])
        os.close(bsrfd)
        return bsrpath

    def _match_stat(self, path, bs):
        '''
        determine if stat of path matches bs
        '''
        found = False
        if os.path.exists(path) or os.path.lexists(path):
            s = os.lstat(path)
            found = all([
                getattr(s, attr) == getattr(bs, attr) for attr in
                ['st_mode', 'st_uid', 'st_gid', 'st_size', 'st_mtime']
            ])
        return found

    def _setup_logging(self):
        '''
        initialize logging facility
        '''
        # log messages are sent to both console and syslog
        # use -o logging=level to set the log level
        # use -o syslog to enable logging to syslog
        self.logger = logging.getLogger('BaculaFS')
        self.loglevel = LOGGING_LEVELS.get(self.logging, logging.NOTSET)
        self.logger.setLevel(self.loglevel)
        h = logging.StreamHandler()
        h.setLevel(self.loglevel)
        formatter = logging.Formatter("%(message)s")
        h.setFormatter(formatter)
        self.logger.addHandler(h)
        if self.syslog:
            try:
                h = logging.handlers.SysLogHandler('/dev/log')
                h.setLevel(self.loglevel)
                formatter = logging.Formatter(
                    "%(name)s: %(levelname)-8s - %(message)s")
                h.setFormatter(formatter)
                self.logger.addHandler(h)
            except:
                self.logger.warning(traceback.format_exc())
        self.logfile = LogFile(self.logger, logging.DEBUG)

    def initialize(self, version):
        '''
        initialize database, catalog
        '''

        self._setup_logging()

        # batch mode
        self.batch_mode = (self.batch_list or self.batch_bsr
                           or self.batch_extract)
        # disable INFO level logging in batch mode
        if self.batch_mode and self.loglevel == logging.INFO:
            self.loglevel = logging.WARNING
            self.logger.setLevel(self.loglevel)

        self.logger.info('Populating file system ... ')

        # setup cache
        if self.user_cache_path:
            self.cache_prefix = self.user_cache_path
        else:
            self.cache_prefix = tempfile.mkdtemp(prefix='baculafs-')
        self.cache_path = os.path.normpath(self.cache_prefix + '/files')
        makedirs(self.cache_path)
        self.cache_bsrpath = os.path.normpath(self.cache_prefix + '/bsr')
        makedirs(self.cache_bsrpath)
        self.cache_symlinks = os.path.normpath(self.cache_prefix + '/symlinks')
        makedirs(self.cache_symlinks)

        # test for old version (2.x) of bacula
        self.bsr_compat = int(version[0]) < 3
        if self.bsr_compat:
            self.logger.debug('Detected old Bacula: %s' % version)
        # test access to sd conf file
        open(self.conf, 'r').close()
        # init bextract failure pattren
        self.fail_pattern = ('Mount Volume "([^"]+)" on device "%s" (.*) '
                             'and press return when ready:' % self.device)
        # init database and catalog
        self.db = Database(self.driver, self.host, self.port, self.database,
                           self.username, self.password, self.logger)
        self.catalog = Catalog(self.db)
        self.base64 = Base64()
        files = self.catalog.query(self.client, self.fileset, self.datetime,
                                   self.recent_job, self.joblist)
        # validated values
        self.client = self.catalog.client
        self.fileset = self.catalog.fileset[1]
        self.datetime = self.catalog.datetime
        # we don't need the database anymore
        self.db.close()

        prefetches = []
        difflist = {}

        # validate prefetch conditions
        if self.prefetch_everything:
            self.prefetch_recent = False
            self.prefetch_regex = None
            self.prefetch_diff = None
            self.prefetch_difflist = None
            self.prefetch_list = None
            self.prefetch_symlinks = True
        if self.prefetch_regex:
            try:
                regex = re.compile(self.prefetch_regex)
                self.prefetch_attrs = True
            except:
                # bad regex: show traceback and ignore
                self.logger.warning(traceback.format_exc())
                self.prefetch_regex = None
        if self.prefetch_diff:
            self.prefetch_diff = os.path.normpath(
                os.path.expanduser(self.prefetch_diff))
            try:
                if os.path.isdir(self.prefetch_diff):
                    self.prefetch_symlinks = True
                else:
                    self.prefetch_diff = None
            except:
                # can't access target directory: show traceback and ignore
                self.logger.warning(traceback.format_exc())
                self.prefetch_diff = None
        if self.prefetch_difflist:
            self.prefetch_difflist = os.path.normpath(
                os.path.expanduser(self.prefetch_difflist))
            try:
                difflistfile = (sys.stdin if self.prefetch_difflist == '-' else
                                open(self.prefetch_difflist, 'rt'))
                for line in difflistfile.readlines():
                    date = ' '.join(line.split()[:5])
                    difflist[line[(len(date) + 1):].strip()] = \
                                time.strptime(date, '%a %b %d %H:%M:%S %Y')
                difflistfile.close()
                self.prefetch_symlinks = True
            except:
                # can't access/parse difflist: show traceback and ignore
                self.logger.warning(traceback.format_exc())
                self.prefetch_difflist = None
        if self.prefetch_list:
            self.prefetch_list = os.path.normpath(
                os.path.expanduser(self.prefetch_list))
            try:
                listfile = (sys.stdin if self.prefetch_list == '-' else open(
                    self.prefetch_list, 'rt'))
                matchlist = [line.strip() for line in listfile.readlines()]
                listfile.close()
                self.prefetch_symlinks = True
            except:
                # can't access/parse list: show traceback and ignore
                self.logger.warning(traceback.format_exc())
                self.prefetch_list = None
        if self.prefetch_recent:
            self.prefetch_symlinks = True
        if self.prefetch_symlinks:
            self.prefetch_attrs = True
        if 'use_ino' in self.fuse_args.optlist:
            self.use_ino = True
            self.prefetch_attrs = True  # must figure out max st_ino

        for file in files:
            head = file[0]
            tail = file[1]
            # handle windows directories
            if not head.startswith('/'):
                head = '/' + head
            # make file entry
            if self.prefetch_attrs:
                entry = file[2:] + self._bacula_stat(file[-2])
                # find max st_ino
                if self.use_ino:
                    if entry[-1].st_ino > self.max_ino:
                        self.max_ino = entry[-1].st_ino
                # detemine if we need to prefetch this entry
                filepath = head + tail
                if (not stat.S_ISDIR(entry[-1].st_mode) and
                    (self.prefetch_everything or
                     (self.prefetch_recent and
                      file[3] == self.catalog.most_recent_jobid) or
                     (self.prefetch_regex and
                      regex.search(filepath)) or
                     (self.prefetch_diff and
                      not self._match_stat(self.prefetch_diff + filepath,
                                           entry[-1])) or
                     (self.prefetch_difflist and
                      (filepath[1:] not in difflist or
                       difflist[filepath[1:]][:-1] != \
                           time.localtime(entry[-1].st_mtime)[:-1])) or
                     (self.prefetch_list and
                      filepath in matchlist) or
                     (self.prefetch_symlinks and
                      stat.S_ISLNK(entry[-1].st_mode)))):
                    prefetches.append(filepath)
            else:
                entry = file[2:] + (None, )  # stat info placeholder
            # new directory
            if head not in self.dirs:
                self.dirs[head] = {}
            # add parent directories
            self._add_parent_dirs(head)
            # directories are added to their parents
            if head != '/' and tail == '':
                head, tail = self._split(head[:-1])
            # and finally
            self.dirs[head][tail] = entry

        # fix st_ino
        if self.use_ino:
            self._update_inodes('/')

        npf = len(prefetches)
        if npf > 0:
            self.logger.info('Prefetching %d objects ... ' % npf)
            self._extract(prefetches)
        self.logger.debug('Cache directory is: %s' % self.cache_prefix)
        self.joblist = ' '.join([str(job[0]) for job in self.catalog.jobs])
        self.logger.debug('Job ids in file system: %s' % self.joblist)
        self.logger.info('BaculaFS ready (%d files).' % len(files))

        self._initialized = True

    def shutdown(self):
        '''
        remove cache directory if required
        '''
        if self.cleanup and not self.user_cache_path and self.cache_prefix:
            self.logger.info(
                'removing cache directory: %s' % self.cache_prefix)
            shutil.rmtree(self.cache_prefix, ignore_errors=True)

    def setxattr(self, path, name, value, flags):
        '''
        set value of extended attribute
        (we only allow setting user.baculafs.bextract.state on
        the root directory)
        '''
        if (path == '/' and name == FileSystem.xattr_prefix + 'bextract.state'
                and value == 'run'):
            self._bextract_user_intervention_event.set()
        else:
            return -errno.EOPNOTSUPP

    def getxattr(self, path, name, size):
        '''
        get value of extended attribute
        baculafs exposes some filesystem attributes for the root directory
        (e.g. joblist, cache_prefix - see FileSystem.xattr_fields_root)
        and several other attributes for each file/directory that appears
        in the catalog (e.g. MD5, JobId - see FileSystem.xattr_fields)
        '''
        head, tail = self._split(path)
        val = None
        n = name.replace(FileSystem.xattr_prefix, '')
        if path == '/':
            if n in FileSystem.xattr_fields_root:
                val = str(getattr(self, n))
            elif n.startswith('bextract.'):
                n = n.replace('bextract.', '')
                if n in FileSystem.xattr_fields_bextract:
                    val = str(self._bextract_get_status()[n])
        if (not val and head in self.dirs and tail in self.dirs[head]
                and len(self.dirs[head][tail]) != 1
                and n in FileSystem.xattr_fields):
            val = str(self.dirs[head][tail][FileSystem.xattr_fields.index(n)])
            if n == 'MD5' and val != '0':
                l = len(val)
                val = binascii.b2a_hex(
                    binascii.a2b_base64(val + '=' * (
                        (l * 3 + 8) / 3 - l) + '\n'))  # padding
        # attribute not found
        if val == None:
            return -errno.ENODATA
        # We are asked for size of the value.
        if size == 0:
            return len(val)
        return val

    def listxattr(self, path, size):
        '''
        list extended attributes
        '''
        head, tail = self._split(path)
        xattrs = []
        if path == '/':
            xattrs += [
                FileSystem.xattr_prefix + a
                for a in FileSystem.xattr_fields_root
            ]
            xattrs += [
                FileSystem.xattr_prefix + 'bextract.' + a
                for a in FileSystem.xattr_fields_bextract
            ]
        if (head in self.dirs and tail in self.dirs[head]
                and len(self.dirs[head][tail]) != 1):
            xattrs += [
                FileSystem.xattr_prefix + a for a in FileSystem.xattr_fields
            ]
        # We are asked for size of the attr list, ie. joint size of attrs
        # plus null separators.
        if size == 0:
            return len("".join(xattrs)) + len(xattrs)
        return xattrs

    def getattr(self, path):
        '''
        Retrieve file attributes.
        Notes:
        1) Bacula does not store attributes for parent directories
           that are not being explicitly backed up, so we provide
           a default set of attributes FileSystem.null_stat
        2) file attributes are base64-encoded and stored by Bacula
           in the catalog. These attributes are decoded when first
           needed and then cached for subsequent requests.
        3) python fuse expects atime/ctime/mtime to be positive
        '''
        head, tail = self._split(path)
        if head in self.dirs and tail in self.dirs[head]:
            self._getattr_lock.acquire()
            attrs = self.dirs[head][tail][-1]
            # decode and cache stat info
            if not attrs:
                self.dirs[head][tail] = self.dirs[head][tail][:-1] + \
                                self._bacula_stat(self.dirs[head][tail][-3])
                attrs = self.dirs[head][tail][-1]
            # zero negative timestamps
            for a in ['st_atime', 'st_mtime', 'st_ctime']:
                t = getattr(attrs, a)
                if t < 0:
                    self.logger.warning(
                        '%s has negative timestamp %s=%d, will use 0' % (path,
                                                                         a, t))
                    setattr(attrs, a, 0)
            self._getattr_lock.release()
            return attrs
        else:
            return -errno.ENOENT

    def readdir(self, path, offset):
        '''
        read directory entries
        '''
        path = path if path.endswith('/') else path + '/'
        for key in ['.', '..']:
            yield fuse.Direntry(key)
        for key in self.dirs[path].keys():
            if len(key) > 0:
                if self.use_ino:
                    bs = self.getattr(path + key)
                    ino = bs.st_ino
                else:
                    ino = 0
                yield fuse.Direntry(key, ino=ino)

    def readlink(self, path):
        '''
        read link contents
        '''
        realpath = self._extract([path])[0]
        if realpath:
            link = os.readlink(realpath)
            if self.move_root and link.startswith('/'):
                link = os.path.normpath(self.fuse_args.mountpoint + link)
            return link
        return -errno.ENOENT

    class _File(object):
        def __init__(self, fs, path, flags, *mode):
            self.fs = fs
            accmode = os.O_RDONLY | os.O_WRONLY | os.O_RDWR
            if (flags & accmode) != os.O_RDONLY:
                raise IOError(errno.EACCES, '')
            self.path = path
            self.realpath = fs._extract([path])[0]
            self.file = os.fdopen(
                os.open(self.realpath, flags, *mode), flag2mode(flags))
            self.fd = self.file.fileno()
            self.direct_io = False
            self.keep_cache = True

        def read(self, length, offset):
            self.file.seek(offset)
            return self.file.read(length)

        def release(self, flags):
            self.file.close()
Beispiel #33
0
def initialize(path: str):
    Catalog.__initialize__(path)
    Index.__initialize__(path)
    Buffer.__initialize__()
Beispiel #34
0
    def initialize(self, version):
        '''
        initialize database, catalog
        '''

        self._setup_logging()

        # batch mode
        self.batch_mode = (self.batch_list or self.batch_bsr
                           or self.batch_extract)
        # disable INFO level logging in batch mode
        if self.batch_mode and self.loglevel == logging.INFO:
            self.loglevel = logging.WARNING
            self.logger.setLevel(self.loglevel)

        self.logger.info('Populating file system ... ')

        # setup cache
        if self.user_cache_path:
            self.cache_prefix = self.user_cache_path
        else:
            self.cache_prefix = tempfile.mkdtemp(prefix='baculafs-')
        self.cache_path = os.path.normpath(self.cache_prefix + '/files')
        makedirs(self.cache_path)
        self.cache_bsrpath = os.path.normpath(self.cache_prefix + '/bsr')
        makedirs(self.cache_bsrpath)
        self.cache_symlinks = os.path.normpath(self.cache_prefix + '/symlinks')
        makedirs(self.cache_symlinks)

        # test for old version (2.x) of bacula
        self.bsr_compat = int(version[0]) < 3
        if self.bsr_compat:
            self.logger.debug('Detected old Bacula: %s' % version)
        # test access to sd conf file
        open(self.conf, 'r').close()
        # init bextract failure pattren
        self.fail_pattern = ('Mount Volume "([^"]+)" on device "%s" (.*) '
                             'and press return when ready:' % self.device)
        # init database and catalog
        self.db = Database(self.driver, self.host, self.port, self.database,
                           self.username, self.password, self.logger)
        self.catalog = Catalog(self.db)
        self.base64 = Base64()
        files = self.catalog.query(self.client, self.fileset, self.datetime,
                                   self.recent_job, self.joblist)
        # validated values
        self.client = self.catalog.client
        self.fileset = self.catalog.fileset[1]
        self.datetime = self.catalog.datetime
        # we don't need the database anymore
        self.db.close()

        prefetches = []
        difflist = {}

        # validate prefetch conditions
        if self.prefetch_everything:
            self.prefetch_recent = False
            self.prefetch_regex = None
            self.prefetch_diff = None
            self.prefetch_difflist = None
            self.prefetch_list = None
            self.prefetch_symlinks = True
        if self.prefetch_regex:
            try:
                regex = re.compile(self.prefetch_regex)
                self.prefetch_attrs = True
            except:
                # bad regex: show traceback and ignore
                self.logger.warning(traceback.format_exc())
                self.prefetch_regex = None
        if self.prefetch_diff:
            self.prefetch_diff = os.path.normpath(
                os.path.expanduser(self.prefetch_diff))
            try:
                if os.path.isdir(self.prefetch_diff):
                    self.prefetch_symlinks = True
                else:
                    self.prefetch_diff = None
            except:
                # can't access target directory: show traceback and ignore
                self.logger.warning(traceback.format_exc())
                self.prefetch_diff = None
        if self.prefetch_difflist:
            self.prefetch_difflist = os.path.normpath(
                os.path.expanduser(self.prefetch_difflist))
            try:
                difflistfile = (sys.stdin if self.prefetch_difflist == '-' else
                                open(self.prefetch_difflist, 'rt'))
                for line in difflistfile.readlines():
                    date = ' '.join(line.split()[:5])
                    difflist[line[(len(date) + 1):].strip()] = \
                                time.strptime(date, '%a %b %d %H:%M:%S %Y')
                difflistfile.close()
                self.prefetch_symlinks = True
            except:
                # can't access/parse difflist: show traceback and ignore
                self.logger.warning(traceback.format_exc())
                self.prefetch_difflist = None
        if self.prefetch_list:
            self.prefetch_list = os.path.normpath(
                os.path.expanduser(self.prefetch_list))
            try:
                listfile = (sys.stdin if self.prefetch_list == '-' else open(
                    self.prefetch_list, 'rt'))
                matchlist = [line.strip() for line in listfile.readlines()]
                listfile.close()
                self.prefetch_symlinks = True
            except:
                # can't access/parse list: show traceback and ignore
                self.logger.warning(traceback.format_exc())
                self.prefetch_list = None
        if self.prefetch_recent:
            self.prefetch_symlinks = True
        if self.prefetch_symlinks:
            self.prefetch_attrs = True
        if 'use_ino' in self.fuse_args.optlist:
            self.use_ino = True
            self.prefetch_attrs = True  # must figure out max st_ino

        for file in files:
            head = file[0]
            tail = file[1]
            # handle windows directories
            if not head.startswith('/'):
                head = '/' + head
            # make file entry
            if self.prefetch_attrs:
                entry = file[2:] + self._bacula_stat(file[-2])
                # find max st_ino
                if self.use_ino:
                    if entry[-1].st_ino > self.max_ino:
                        self.max_ino = entry[-1].st_ino
                # detemine if we need to prefetch this entry
                filepath = head + tail
                if (not stat.S_ISDIR(entry[-1].st_mode) and
                    (self.prefetch_everything or
                     (self.prefetch_recent and
                      file[3] == self.catalog.most_recent_jobid) or
                     (self.prefetch_regex and
                      regex.search(filepath)) or
                     (self.prefetch_diff and
                      not self._match_stat(self.prefetch_diff + filepath,
                                           entry[-1])) or
                     (self.prefetch_difflist and
                      (filepath[1:] not in difflist or
                       difflist[filepath[1:]][:-1] != \
                           time.localtime(entry[-1].st_mtime)[:-1])) or
                     (self.prefetch_list and
                      filepath in matchlist) or
                     (self.prefetch_symlinks and
                      stat.S_ISLNK(entry[-1].st_mode)))):
                    prefetches.append(filepath)
            else:
                entry = file[2:] + (None, )  # stat info placeholder
            # new directory
            if head not in self.dirs:
                self.dirs[head] = {}
            # add parent directories
            self._add_parent_dirs(head)
            # directories are added to their parents
            if head != '/' and tail == '':
                head, tail = self._split(head[:-1])
            # and finally
            self.dirs[head][tail] = entry

        # fix st_ino
        if self.use_ino:
            self._update_inodes('/')

        npf = len(prefetches)
        if npf > 0:
            self.logger.info('Prefetching %d objects ... ' % npf)
            self._extract(prefetches)
        self.logger.debug('Cache directory is: %s' % self.cache_prefix)
        self.joblist = ' '.join([str(job[0]) for job in self.catalog.jobs])
        self.logger.debug('Job ids in file system: %s' % self.joblist)
        self.logger.info('BaculaFS ready (%d files).' % len(files))

        self._initialized = True
Beispiel #35
0
 def __init__(self):
      
     self.catalog = Catalog("Catalog.json", "Buyers.json", "configCatalog.json", "devices.json")
Beispiel #36
0
from Catalog import *
from User import *
from pdb import set_trace

catalog = Catalog()
# catalog.displayAllBooks()
# set_trace()
librarian = Librarian("Awantik", "Bangalore", 34, 'asljlkj22', 'zeke101')
librarian.catalogAccess(catalog)

book = librarian.addBook('Shoe Dog', 'Phil Knight', '2015', 312)
librarian.addBookItem(book, '123hg', 'H1B2')
librarian.addBookItem(book, '123hg', 'H1B4')
librarian.addBookItem(book, '123hg', 'H1B5')

book = catalog.addBook('Moonwalking with Einstien', 'J Foer', '2017', 318)
librarian.addBookItem(book, '463hg', 'K1B2')
librarian.addBookItem(book, '463hg', 'K1B3')
librarian.addBookItem(book, '463hg', 'K1B4')

# catalog.displayAllBooks()

m1 = Member("Vishal", "Bangalore", 23, 'asljlkj22', 'std1233')
# m1.availableBooks(catalog)
m1.issueBook(catalog, 'Moonwalking with Einstien', '463hg', 5)
# m1.availableBooks(catalog)
#
# m1.issued_books_details()
# m1.availableBooks(catalog)
# m1.returnBook(catalog,"Moonwalking with Einstien",'463hg','K1B4')
# m1.availableBooks(catalog)