コード例 #1
0
ファイル: scanheaderdef.py プロジェクト: binchensolar/eovsa
def rd_sfdef(sh=None):
    ''' Create initial list of dictionaries representing each variable
        in the given scan header dictionary sh.  If not given, sh is
        obtained from the current version.
    '''
    if sh is None:
        sh, version = read_xml2.xml_ptrs('/tmp/scan_header.xml')

    mylist = walk_keys(sh,'Stateframe')
    return mylist
コード例 #2
0
ファイル: scanheaderdef.py プロジェクト: sjyu1988/eovsa
def rd_sfdef(sh=None):
    ''' Create initial list of dictionaries representing each variable
        in the given scan header dictionary sh.  If not given, sh is
        obtained from the current version.
    '''
    if sh is None:
        sh, version = read_xml2.xml_ptrs('/tmp/scan_header.xml')

    mylist = walk_keys(sh, 'Stateframe')
    return mylist
コード例 #3
0
ファイル: stateframedef.py プロジェクト: jack-h/eovsa
def load_deftable(xml_file=None,sdict=None,version=None):
    ''' Loads the named xml_file (or equivalent descriptive dictionary) 
        into the stateframe or scan header definition table, depending on 
        the type of xml file or dictionary provided.  Returns True if success, 
        or False if an error. It reads the current definition table to see 
        if this definition is already in there, and bails with a warning if so
    '''
    if xml_file and (not os.path.isfile(xml_file)):
        print 'Error: Named xml file',xml_file,'not found.'
        return False
    elif xml_file:
        try:
            sfname = os.path.basename(xml_file).split('.')[0][:10] == 'stateframe'
        except:
            sfname = False
        try:
            shname = os.path.basename(xml_file).split('.')[0][:11] == 'scan_header'
        except:
            shname = False
        if sfname or shname:
            sdict, version = rxml.xml_ptrs(xml_file)
    if sdict and version:
        brange, outlist = sfdef(sdict)
        startbyte(outlist)
        tbl = outlist2table(outlist,version)
        # Everything worked so far, so connect to database
        cnxn = pyodbc.connect("DRIVER={FreeTDS};SERVER=192.168.24.106,1433; \
                             DATABASE=eOVSA06;UID=aaa;PWD=I@bsbn2w;")
        cursor = cnxn.cursor()
        if 'Schedule' in sdict:
            # Case of a stateframe
            tblname = 'StateFrameDef'
        else:
            # Case of a scanheader
            tblname = 'ScanHeaderDef'
        # Check if this version is already entered
        print "select * from " + tblname + " where Version=" + str(int(version))
        cursor.execute("select * from " + tblname + " where Version=" + str(int(version)))
        rows = cursor.fetchall()
        if len(rows) == 0:
            for params in tbl:
                cursor.execute("insert into " + tblname + " (Status, Version, Dimension, "
                                  + "DataType, FieldBytes, DimOffset, StartByte, FieldNum, " 
                                  + "FieldName) values ( 0, ?, ?, ?, ?, ?, ?, ?, ?)", params)
            cursor.execute("update "+tblname+" set status=1 where Version='" + str(int(version)) + "'")
            cnxn.commit()
        else:
            print 'Warning: The definition for version',version,'already exists in',tblname
        cursor.close()
        del cursor
        cnxn.close()
        return True
    else:
        print 'Error: Bad sdict=',sdict,'or version=',version
        return False
コード例 #4
0
ファイル: cal_header.py プロジェクト: natsuhakuroda/eovsa
def read_cal_xml(type,t=None):
    ''' Read the calibration type definition xml record of the given type, for the 
        given time (as a Time() object), or for the current time if None.
        
        Returns a dictionary of look-up information and its internal version.  A side-effect
        is that a file /tmp/type<n>.xml is created, where <n> is the type.
    '''
    import dbutil, read_xml2, sys
    if t is None:
        t = util.Time.now()
    timestamp = int(t.lv)  # Given (or current) time as LabVIEW timestamp
    typdict = cal_types()
    try:
        typinfo = typdict[type]
    except:
        print 'Type',type,'not found in type definition dictionary.'
        return {}, None
    cursor = dbutil.get_cursor()
    # Read type definition XML from abin table
    query = 'select top 1 * from abin where Version = '+str(type)+'.0 and Timestamp <='+str(timestamp)+' order by Timestamp desc'
    sqldict, msg = dbutil.do_query(cursor,query)
    if msg == 'Success':
        if len(sqldict) == 0:
            # This type of xml file does not yet exist in the database, so mark it for adding
            print 'Type',type,'not defined in abin table.'
            cursor.close()
            return {}, None
        else:
            # There is one, so read it and the corresponding binary data
            buf = sqldict['Bin'][0]   # Binary representation of xml file
            xmlfile = '/tmp/type'+str(type)+'.xml'
            f = open(xmlfile,'wb')
            f.write(buf)
            f.close()
            xmldict, thisver = read_xml2.xml_ptrs(xmlfile)
            cursor.close()
            return xmldict, thisver
コード例 #5
0
ファイル: cal_header.py プロジェクト: natsuhakuroda/eovsa
def send_xml2sql(type=None,t=None,test=False,nant=None,nfrq=None):
    ''' Routine to send any changed calibration xml definitions to the 
        SQL Server.  The latest definition (if any) for a given type is
        checked to see if the version matches.  If not, an update is 
        stored.  This routine will typically be run whenever a definition
        is added or changed.  If type is provided (i.e. not None), only 
        the given type will be updated (and only if its internal version 
        number has changed).
        
        The timestamp of the new record will be set according to the Time()
        object t, if provided, or the current time if not.
        
        As a debugging tool, if test is True, this routine goes through the
        motions but does not write to the abin table.
    '''
    import dbutil, read_xml2, sys
    if t is None:
        t = util.Time.now()
    timestamp = int(t.lv)  # Current time as LabVIEW timestamp
    cursor = dbutil.get_cursor()
    typdict = cal_types()
    if type:
        # If a particular type is specified, limit the action to that type
        typdict = {type:typdict[type]}
    for key in typdict.keys():
        #print 'Working on',typdict[key][0]
        # Execute the code to create the xml description for this key
        if key == 1:
            # Special case for TP calibration
            if nant is None or nfrq is None:
                print 'For',typdict[key][0],'values for both nant and nfrq are required.'
                cursor.close()
                return
            exec 'buf = '+typdict[key][1]+'(nant='+str(nant)+',nfrq='+str(nfrq)+')'
        else:
            exec 'buf = '+typdict[key][1]+'()'
        # Resulting buf must be written to a temporary file and reread
        # by xml_ptrs()
        f = open('/tmp/tmp.xml','wb')
        f.write(buf)
        f.close()
        mydict, xmlver = read_xml2.xml_ptrs('/tmp/tmp.xml')
        defn_version = float(key)+xmlver/10.  # Version number expected
        # Retrieve most recent key.0 record and check its version against the expected one
        query = 'select top 1 * from abin where Version = '+str(key)+'.0 and Timestamp <= '+str(timestamp)+' order by Timestamp desc'
        #print 'Executing query'
        outdict, msg = dbutil.do_query(cursor,query)
        #print msg
        if msg == 'Success':
            if len(outdict) == 0:
                # This type of xml file does not yet exist in the database, so mark it for adding
                add = True
            else:
                # There is one, so see if they differ
                buf2 = outdict['Bin'][0]   # Binary representation of xml file
                if buf == buf2:
                    # This description is already there so we will skip it
                    add = False
                else:
                    add = True
        else:
            # Some kind of error occurred, so print the message and skip adding the xml description
            #print 'Query',query,'resulted in error message:',msg
            add = False
        if add:
            # This is either new or updated, so add the xml description
            # to the database
            #print 'Trying to add',typdict[key][0]
            try:
                if test:
                    print 'Would have updated',typdict[key][0],'to version',defn_version
                else:
                    cursor.execute('insert into aBin (Timestamp,Version,Description,Bin) values (?,?,?,?)',
                   timestamp, key, typdict[key][0], dbutil.stateframedef.pyodbc.Binary(buf))
                    print 'Type definition for',typdict[key][0],'successfully added/updated to version',defn_version,'--OK'
                    cursor.commit()
            except:
                print 'Unknown error occurred in adding',typdict[key][0]
                print sys.exc_info()[1]
        else:
            print 'Type definition for',typdict[key][0],'version',defn_version,'exists--OK'
    cursor.close()
コード例 #6
0
ファイル: stateframedef.py プロジェクト: jack-h/eovsa
def log2sql(log_file=None):
    ''' Transfers the named stateframe log file to the SQL database.  This transfer can
        take a long time, so this should allow interruption of the transfer, and then
        a subsequent call on the same log file should find the place where it left off to
        resume the transfer.
    '''
    from util import Time
    
    if log_file is None:
        print 'Error: a stateframe log filename must be provided.'
        return False
    if not os.path.isfile(log_file):
        print 'Error: Named stateframe log file',log_file,'not found.'
        return False
    # Log file basename is expected to be in format 'sf_yyyymmdd_vxx.0.log', 
    # where xx is the version number
    basename = os.path.basename(log_file) 
    logname = basename.split('_')
    if logname[0] == 'sf':
        sfdate = logname[1]
        try:
            sftime = datetime.datetime.strptime(logname[1],'%Y%m%d')
            t = Time(str(sftime))
            sftimestamp = int(t.lv + 0.5)  # Start timestamp, as nearest integer
            sfver = int(logname[2].split('.')[0][1:])
        except:
            print 'Error: File ',basename,'does not have expected basename format sf_yyyymmdd_vxx.0.log'
            return False
    else:
        return False
    
    # At this point, the log file exists and the name is of the right format
    # Connect to the database and see if there are any data already for this date, and if so
    # determine the time range.
    with pyodbc.connect("DRIVER={FreeTDS};SERVER=192.168.24.106,1433; \
                             DATABASE=eOVSA06;UID=aaa;PWD=I@bsbn2w;") as cnxn:
        cursor = cnxn.cursor()
        tblname = 'fV'+str(sfver)+'_vD1'
        cursor.execute("select top 1 Timestamp from "+tblname+" where Timestamp between "+str(sftimestamp)+" and "+str(sftimestamp+86400-2)+" order by Timestamp desc")
        rows = cursor.fetchall()
        if len(rows) == 1:
            # There are data for this date, and rows[1].Timestamp should be the last time entry,
            # so start at last time entry + 1 s
            try:
                sftimestamp2 = int(rows[0].Timestamp + 1)
            except:
                print 'Error: Unexpected data returned from database.  Returned value:',rows[0]
                return False
        elif len(rows) > 1:
            print 'Error: Unexpected data returned from database.'
            return False
        else:
            # No data returned from call, which means we should start with current value of sftimestamp
            pass
    
        # We now know where to start, so open log file and read to start of data
        # First need to find out record length
        f = open(log_file,'rb')
        buf = f.read(32)
        recsize = struct.unpack_from('i', buf, 16)[0]
        version = struct.unpack_from('d', buf, 8)[0]
        f.close()
        if int(version) != sfver:
            print 'Error: Version in file name is',sfver,'but version in file itself is',int(version)
            return False
            
        # We need the "brange" variable, which is used by transmogrify() to reformat the binary data.
        # Therefore, the defining stateframe XML file is needed.        
        # The correct XML file for this version must exist in the same directory as the log file
        xml_file = os.path.dirname(log_file)+'/'+'stateframe_v'+str(sfver)+'.00.xml'
        if not os.path.isfile(xml_file):
            print 'Error: Stateframe xml file',xml_file,'not found.'
            return False        
        sf, version = rxml.xml_ptrs(xml_file)
        brange, outlist = sfdef(sf)
        lineno = 0
        with open(log_file,'rb') as f:
            bufin = f.read(recsize)
            while len(bufin) == recsize:
                lineno += 1
                if struct.unpack_from('d', bufin, 0)[0] >= sftimestamp:
                    # This is new data, so write to database
                    bufout = transmogrify(bufin, brange)
                    try:
                        cursor.execute('insert into fBin (Bin) values (?)',pyodbc.Binary(bufout))
                        print 'Record '+str(lineno)+' successfully written\r',
                        cnxn.commit()
                    except:
                        # An exception could be an error, or just that the entry was already inserted
                        pass
                bufin = f.read(recsize)
    print '\n'
    return True
コード例 #7
0
ファイル: stateframedef.py プロジェクト: jack-h/eovsa
def old_version_test(sflog=None,sfxml=None,outbinfile=None,outtabfile=None):
    ''' Read stateframe log files of older versions and
        create output file of rearranged binary data, and
        corresponding stateframedef table as text file.
           sflog = file name of stateframe log to read
           sfxml = file name of corresponding XML file
           outbinfile = file name of output binary data file
           outtabfile = file name of output table text file
    '''
    if sfxml:
        sf, version = rxml.xml_ptrs(sfxml)
    else:
        sf = None
        version = 0.0

    if sflog:
        try:
            f = open(sflog,'rb')
        except:
            print 'Could not open file',sflog,'-- Exiting.'
            return
    
        # Get binary size and check version number
        data = f.read(100)
        if stateframe.extract(data,['d',8]) != version:
            print 'Stateframe log file version does not match XML version. -- Exiting'
            return
        recsize = stateframe.extract(data,sf['Binsize'])
        f.close()
    else:
        # No log file specified, so we will try to read directly from ACC once per second
        # Read one as a test and get its version number
        # Read from ACC
        accini = stateframe.rd_ACCfile()
        data, msg = stateframe.get_stateframe(accini)
        version = stateframe.extract(data,['d',8])
        

    # Parse the stateframe dictionary and generate the brange and outlist dicts
    brange, outlist = sfdef(sf)
    # Calculate the startbytes in the list -- modifies outlist in place
    startbyte(outlist)

    stdout = sys.stdout  # Save current stdout
    if outtabfile:
        # Write the table info to the given file name -- just sets stdout to the file,
        # writes it, and resets stdout
        try:
            sys.stdout = open(outtabfile,'w')
        except:
            print 'Could not redirect STDOUT to',outtabfile,' -- Will print to screen'
            sys.stdout = stdout

    outlist2table(outlist,version)
    sys.stdout = stdout   # Reset to standard stdout

    if outbinfile:
        try:
            g = open(outbinfile,'wb')
        except:
            print 'Could not open file',outbinfile,'for writing. -- Exiting.'
            return
        if sflog:
            # Read from log file
            f = open(sflog,'rb')
            while 1:
                # Read and rearrange 1000 records
                try:
                    indata = f.read(recsize)
                    outdata = transmogrify(indata,brange)
                    g.write(outdata)
                except:
                    f.close()
                    g.close()
                    return
        else:
            # Read from ACC
            accini = stateframe.rd_ACCfile()
            for i in range(60):
                # Read from ACC and rearrange 60 records -- takes 1 min
                indata, msg = stateframe.get_stateframe(accini)
                outdata = transmogrify(indata,brange)
                g.write(outdata)
                time.sleep(1)
            g.close()

    return            
コード例 #8
0
def rd_ACCfile():
    '''Reads key variables from ACC.ini file on ACC (using urllib2)
    '''
    # List of strings to search for
    s0 = '[Stateframe]'
    s1 = 'bin size = '
    s2 = 'template path = '
    n0 = '[Network]'
    n1 = 'TCP.schedule.port = '
    n2 = 'TCP.stateframe.port = '
    n3 = 'TCP.schedule.stateframe.port = '
    r0 = '[ROACH]'
    r1 = 'boffile = '
    
    userpass = '******'
    ACCfile = None
    if socket.getfqdn().find('solar.pvt') != -1:
        try:
            ACCfile = urllib2.urlopen('ftp://'+userpass+'acc.solar.pvt/ni-rt/startup/acc.ini',timeout=0.5)
        except:
            # Timeout error
            print Time.now().iso,'FTP connection to ACC timed out'
        # Since this is the HELIOS machine, make a disk copy of ACC.ini in the
        # current (dropbox) directory.  This will be used by other instances of
        # sf_display() on other machines that do not have access to acc.solar.pvt.
        try:
            lines = ACCfile.readlines()
            o = open('acc.ini','w')
            for line in lines:
                o.write(line+'\n')
            o.close()
            ACCfile.close()
            ACCfile = urllib2.urlopen('ftp://'+userpass+'acc.solar.pvt/ni-rt/startup/acc.ini',timeout=0.5)
            # Also read XML file for stateframe from ACC, and decode template for later use
            sf, version = xml_ptrs()
        except:
            pass
    if ACCfile is None:
        # ACC not reachable?  Try reading static files.
        print 'Cannot ftp ACC.ini.  Reading static acc.ini and stateframe.xml from current directory instead.'
        ACCfile = open('acc.ini','r')
        # Also read XML file for stateframe from static file, and decode template for later use
        sf, version = xml_ptrs('stateframe.xml')

    for line in ACCfile:
        if s0 in line:    # String s0 ([Stateframe]) found
            for line in ACCfile:
                if s1 in line:
                    binsize = int(line[len(s1):])
                elif s2 in line:
                    xmlpath = line[len(s2):]
                    break
                elif line == '':
                    break
        if n0 in line:    # String n0 ([Network]) found
            for line in ACCfile:
                if n1 in line:
                    scdport = int(line[len(n1):])
                elif n2 in line:
                    sfport = int(line[len(n2):])
                    print '\nConnecting to ACC at port:',sfport
                elif n3 in line:
                    scdsfport = int(line[len(n3):])
                    break
                elif not line:
                    break
        if r0 in line:    # String r0 ([ROACH]) found
            for line in ACCfile:
                if r1 in line:
                    boffile = line[len(r1):].strip()
                elif not line:
                    break
    ACCfile.close()
    accdict = {'host':'acc.solar.pvt','binsize':binsize,'xmlpath':xmlpath,
               'scdport':scdport,'sfport':sfport,'scdsfport':scdsfport,'sf':sf,'version':version,'boffile':boffile}
    #if socket.gethostname() != 'helios':
        # The host is not OVSA, so assume port forwarding of stateframe port
        # to localhost port 6341
        #accdict['host'] = 'localhost'
    return accdict
コード例 #9
0
ファイル: cal_header.py プロジェクト: jack-h/eovsa
def send_xml2sql():
    ''' Routine to send any changed calibration xml definitions to the 
        SQL Server.  The latest definition (if any) for a given type is
        checked to see if the version matches.  If not, an update is 
        stored.  This routine will typically be run whenever a definition
        is added or changed.
    '''
    import dbutil, read_xml2, sys
    t = util.Time.now()
    timestamp = t.lv  # Current time as LabVIEW timestamp
    cursor = dbutil.get_cursor()
    typdict = cal_types()
    for key in typdict.keys():
        #print 'Working on',typdict[key][0]
        # Execute the code to create the xml description for this key
        exec 'buf = '+typdict[key][1]+'()'
        # Resulting buf must be written to a temporary file and reread
        # by xml_ptrs()
        f = open('/tmp/tmp.xml','wb')
        f.write(buf)
        f.close()
        mydict, xmlver = read_xml2.xml_ptrs('/tmp/tmp.xml')
        defn_version = float(key)+xmlver/10.  # Version number expected
        # Retrieve most recent key.0 record and check its version against the expected one
        query = 'select top 1 * from abin where Version = '+key+'.0 order by Timestamp desc'
        #print 'Executing query'
        outdict, msg = dbutil.do_query(cursor,query)
        #print msg
        if msg == 'Success':
            if len(outdict) == 0:
                # This type of xml file does not yet exist in the database, so mark it for adding
                add = True
            else:
                # There is one, so see if it agrees with the version
                buf = outdict['Bin'][0]   # Binary representation of xml file
                f = open('/tmp/tmp.xml','wb')
                f.write(buf)
                f.close()
                mydict, thisver = read_xml2.xml_ptrs('/tmp/tmp.xml')
                #print 'Versions:',float(key)+thisver/10.,defn_version
                if (float(key)+thisver/10.) == defn_version:
                    # This description is already there so we will skip it
                    add = False
                else:
                    add = True
        else:
            # Some kind of error occurred, so print the message and skip adding the xml description
            #print 'Query',query,'resulted in error message:',msg
            add = False
        if add:
            # This is either new or updated, so add the xml description
            # to the database
            #print 'Trying to add',typdict[key][0]
            try:
                cursor.execute('insert into aBin (Timestamp,Version,Description,Bin) values (?,?,?,?)',
                   timestamp, float(key), typdict[key][0], dbutil.stateframedef.pyodbc.Binary(buf))
                print typdict[key][0],'successfully added/updated to version',defn_version
            except:
                print 'Unknown error occurred in adding',typdict[key][0]
                print sys.exc_info()[1]
        else:
            print typdict[key][0],'version',defn_version,'already exists--not updated'
    cursor.commit()
    cursor.close()
            
            
            
            
コード例 #10
0
ファイル: cal_header.py プロジェクト: jack-h/eovsa
def send_xml2sql():
    ''' Routine to send any changed calibration xml definitions to the 
        SQL Server.  The latest definition (if any) for a given type is
        checked to see if the version matches.  If not, an update is 
        stored.  This routine will typically be run whenever a definition
        is added or changed.
    '''
    import dbutil, read_xml2, sys
    t = util.Time.now()
    timestamp = t.lv  # Current time as LabVIEW timestamp
    cursor = dbutil.get_cursor()
    typdict = cal_types()
    for key in typdict.keys():
        #print 'Working on',typdict[key][0]
        # Execute the code to create the xml description for this key
        exec 'buf = '+typdict[key][1]+'()'
        # Resulting buf must be written to a temporary file and reread
        # by xml_ptrs()
        f = open('/tmp/tmp.xml','wb')
        f.write(buf)
        f.close()
        mydict, xmlver = read_xml2.xml_ptrs('/tmp/tmp.xml')
        defn_version = float(key)+xmlver/10.  # Version number expected
        # Retrieve most recent key.0 record and check its version against the expected one
        query = 'select top 1 * from abin where Version = '+key+'.0 order by Timestamp desc'
        #print 'Executing query'
        outdict, msg = dbutil.do_query(cursor,query)
        #print msg
        if msg == 'Success':
            if len(outdict) == 0:
                # This type of xml file does not yet exist in the database, so mark it for adding
                add = True
            else:
                # There is one, so see if it agrees with the version
                buf = outdict['Bin'][0]   # Binary representation of xml file
                f = open('/tmp/tmp.xml','wb')
                f.write(buf)
                f.close()
                mydict, thisver = read_xml2.xml_ptrs('/tmp/tmp.xml')
                #print 'Versions:',float(key)+thisver/10.,defn_version
                if (float(key)+thisver/10.) == defn_version:
                    # This description is already there so we will skip it
                    add = False
                else:
                    add = True
        else:
            # Some kind of error occurred, so print the message and skip adding the xml description
            #print 'Query',query,'resulted in error message:',msg
            add = False
        if add:
            # This is either new or updated, so add the xml description
            # to the database
            #print 'Trying to add',typdict[key][0]
            try:
                cursor.execute('insert into aBin (Timestamp,Version,Description,Bin) values (?,?,?,?)',
                   timestamp, float(key), typdict[key][0], dbutil.stateframedef.pyodbc.Binary(buf))
                print typdict[key][0],'successfully added/updated to version',defn_version
            except:
                print 'Unknown error occurred in adding',typdict[key][0]
                print sys.exc_info()[1]
        else:
            print typdict[key][0],'version',defn_version,'already exists--not updated'
    cursor.commit()
    cursor.close()
コード例 #11
0
ファイル: stateframe.py プロジェクト: jack-h/eovsa
def rd_ACCfile():
    """Reads key variables from ACC.ini file on ACC (using urllib2)
    """
    # List of strings to search for
    s0 = "[Stateframe]"
    s1 = "bin size = "
    s2 = "template path = "
    n0 = "[Network]"
    n1 = "TCP.schedule.port = "
    n2 = "TCP.stateframe.port = "
    n3 = "TCP.schedule.stateframe.port = "
    r0 = "[ROACH]"
    r1 = "boffile = "

    userpass = "******"
    ACCfile = None
    if socket.getfqdn().find("solar.pvt") != -1:
        try:
            ACCfile = urllib2.urlopen("ftp://" + userpass + "acc.solar.pvt/ni-rt/startup/acc.ini", timeout=0.5)
        except:
            # Timeout error
            print Time.now().iso, "FTP connection to ACC timed out"
        # Since this is the HELIOS machine, make a disk copy of ACC.ini in the
        # current (dropbox) directory.  This will be used by other instances of
        # sf_display() on other machines that do not have access to acc.solar.pvt.
        try:
            lines = ACCfile.readlines()
            o = open("acc.ini", "w")
            for line in lines:
                o.write(line + "\n")
            o.close()
            ACCfile.close()
            ACCfile = urllib2.urlopen("ftp://" + userpass + "acc.solar.pvt/ni-rt/startup/acc.ini", timeout=0.5)
            # Also read XML file for stateframe from ACC, and decode template for later use
            sf, version = xml_ptrs()
        except:
            pass
    if ACCfile is None:
        # ACC not reachable?  Try reading static files.
        print "Cannot ftp ACC.ini.  Reading static acc.ini and stateframe.xml from current directory instead."
        ACCfile = open("acc.ini", "r")
        # Also read XML file for stateframe from static file, and decode template for later use
        sf, version = xml_ptrs("stateframe.xml")

    for line in ACCfile:
        if s0 in line:  # String s0 ([Stateframe]) found
            for line in ACCfile:
                if s1 in line:
                    binsize = int(line[len(s1) :])
                elif s2 in line:
                    xmlpath = line[len(s2) :]
                    break
                elif line == "":
                    break
        if n0 in line:  # String n0 ([Network]) found
            for line in ACCfile:
                if n1 in line:
                    scdport = int(line[len(n1) :])
                elif n2 in line:
                    sfport = int(line[len(n2) :])
                    print "\nConnecting to ACC at port:", sfport
                elif n3 in line:
                    scdsfport = int(line[len(n3) :])
                    break
                elif not line:
                    break
        if r0 in line:  # String r0 ([ROACH]) found
            for line in ACCfile:
                if r1 in line:
                    boffile = line[len(r1) :].strip()
                elif not line:
                    break
    ACCfile.close()
    accdict = {
        "host": "acc.solar.pvt",
        "binsize": binsize,
        "xmlpath": xmlpath,
        "scdport": scdport,
        "sfport": sfport,
        "scdsfport": scdsfport,
        "sf": sf,
        "version": version,
        "boffile": boffile,
    }
    # if socket.gethostname() != 'helios':
    # The host is not OVSA, so assume port forwarding of stateframe port
    # to localhost port 6341
    # accdict['host'] = 'localhost'
    return accdict