Ejemplo n.º 1
0
    def _get_all_taskpaper_files(self, workspaceRoot):
        """*get a list of all the taskpaper filepaths in the workspace (excluding the sync directory)*

        **Key Arguments:**
            - ``workspaceRoot`` -- path to the root folder of a workspace containing taskpaper files

        **Return:**
            - ``taskpaperFiles`` -- a list of paths to all the taskpaper files within the workspace
        """
        self.log.info('starting the ``_get_all_taskpaper_files`` method')

        theseFiles = recursive_directory_listing(
            log=self.log,
            baseFolderPath=self.workspaceRoot,
            whatToList="files"  # all | files | dirs
        )

        taskpaperFiles = []
        taskpaperFiles[:] = [
            f for f in theseFiles if os.path.splitext(f)[1] == ".taskpaper"
            and self.syncFolder not in f
        ]

        self.log.info('completed the ``_get_all_taskpaper_files`` method')
        return taskpaperFiles
Ejemplo n.º 2
0
    def _get_all_taskpaper_files(self):
        """*get a list of all the taskpaper filepaths in the workspace*

        **Return:**
            - ``taskpaperFiles`` -- a list of paths to all the taskpaper files within the workspace
        """
        self.log.info('starting the ``_get_all_taskpaper_files`` method')

        if self.workspaceRoot:
            from fundamentals.files import recursive_directory_listing
            theseFiles = recursive_directory_listing(
                log=self.log,
                baseFolderPath=self.workspaceRoot,
                whatToList="files"  # all | files | dirs
            )

            taskpaperFiles = []
            taskpaperFiles[:] = [
                f for f in theseFiles if os.path.splitext(f)[1] == ".taskpaper"
            ]
        else:
            taskpaperFiles = [self.taskpaperPath]

        self.log.info('completed the ``_get_all_taskpaper_files`` method')
        return taskpaperFiles
Ejemplo n.º 3
0
    def test_recursive_directory_listing_function(self):

        from fundamentals.files import recursive_directory_listing
        theseFiles = recursive_directory_listing(log,
                                                 baseFolderPath="/tmp",
                                                 whatToList="all")
        # print(theseFiles)

        from fundamentals.files import recursive_directory_listing
        theseFiles = recursive_directory_listing(log,
                                                 baseFolderPath="/tmp",
                                                 whatToList="files")
        # print(theseFiles)

        from fundamentals.files import recursive_directory_listing
        theseFiles = recursive_directory_listing(log,
                                                 baseFolderPath="/tmp",
                                                 whatToList="dirs")
Ejemplo n.º 4
0
def get_pdf_paths(
        log,
        settings):
    """*generate a dictionary of pdf-names and their paths*

    **Key Arguments:**
        - ``log`` -- the logger

    **Return:**
        - ``pdfDict`` -- the dictionary of pdf paths (keys are pdf names)

    **Usage:**

        To generate a list of dictionaries of ``{pdfName: pdfPath}`` for PDFs in the reading-list folder, use the code:

        .. code-block:: python

            from headjack.read import get_pdf_paths
            pdfDict = get_pdf_paths(
                log=log,
                settings=settings)
            print pdfDict
    """
    import os
    log.info('starting the ``get_pdf_paths`` method')

    fileList = recursive_directory_listing(
        log=log,
        baseFolderPath=settings["read"]["reading_list_root_path"],
        whatToList="files"  # all | files | dirs
    )
    pdfDict = {}
    for f in fileList:
        if f.split(".")[-1].lower() == "pdf":
            pdfDict[os.path.basename(f)] = f

    log.info('completed the ``get_pdf_paths`` method')
    return pdfDict
Ejemplo n.º 5
0
def _download_one_night_of_atlas_data(mjd, log, archivePath):
    """*summary of function*

    **Key Arguments:**
        - ``mjd`` -- the mjd of the night of data to download
        - ``archivePath`` -- the path to the root of the local archive         
    """

    # SETUP A DATABASE CONNECTION FOR THE remote database
    global dbSettings

    # SETUP ALL DATABASE CONNECTIONS
    atlasMoversDBConn = database(log=log, dbSettings=dbSettings).connect()

    cmd = "rsync -avzL --include='*.dph' --include='*.meta' --include='*/' --exclude='*' [email protected]:/atlas/red/02a/%(mjd)s %(archivePath)s/02a/" % locals(
    )

    p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
    stdout, stderr = p.communicate()
    if len(stderr):
        if "No such file or directory" in stderr:
            baseFolderPath = "%(archivePath)s/02a/%(mjd)s" % locals()
            # Recursively create missing directories
            if not os.path.exists(baseFolderPath):
                os.makedirs(baseFolderPath)
            print 'MJD %(mjd)s data: %(stderr)s' % locals()
        else:
            print 'error in rsyncing MJD %(mjd)s data: %(stderr)s' % locals()
            return None

    cmd = "rsync -avzL --include='*.dph' --include='*.meta' --include='*/' --exclude='*' [email protected]:/atlas/red/01a/%(mjd)s %(archivePath)s/01a/" % locals(
    )
    p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
    stdout, stderr = p.communicate()
    if len(stderr):
        if "No such file or directory" in stderr:
            baseFolderPath = "%(archivePath)s/01a/%(mjd)s" % locals()
            # Recursively create missing directories
            if not os.path.exists(baseFolderPath):
                os.makedirs(baseFolderPath)
            print 'MJD %(mjd)s data: %(stderr)s' % locals()
        else:
            print 'error in rsyncing MJD %(mjd)s data: %(stderr)s' % locals()
            return None

    theseFiles = recursive_directory_listing(
        log=log,
        baseFolderPath="%(archivePath)s/02a/%(mjd)s" % locals(),
        whatToList="files"  # all | files | dirs
    )
    theseFiles += recursive_directory_listing(
        log=log,
        baseFolderPath="%(archivePath)s/01a/%(mjd)s" % locals(),
        whatToList="files"  # all | files | dirs
    )

    metaFilenames = []
    metaFilenames[:] = [(os.path.splitext(os.path.basename(m))[0], m)
                        for m in theseFiles if "meta" in m]

    metaDict = {}
    for m in metaFilenames:
        metaDict[m[0]] = m[1]

    sqlQuery = u"""
            select expname from atlas_exposures where floor(mjd) = %(mjd)s 
        """ % locals()
    rows = readquery(log=log, sqlQuery=sqlQuery, dbConn=atlasMoversDBConn)

    dbExps = []
    dbExps[:] = [r["expname"] for r in rows]

    missingMeta = []
    missingMeta[:] = [m for m in metaDict.keys() if m not in dbExps]

    fitskw = {
        "MJD-OBS": "mjd",
        "OBJECT": "atlas_object_id",
        "RA": "raDeg",
        "DEC": "decDeg",
        "FILTER": "filter",
        "EXPTIME": "exp_time",
        "OBSNAME": "expname"
    }

    allData = []
    for m in missingMeta:

        pathToReadFile = metaDict[m]
        try:
            log.debug("attempting to open the file %s" % (pathToReadFile, ))
            readFile = codecs.open(pathToReadFile, encoding='utf-8', mode='r')
            thisData = readFile.read()
            readFile.close()
        except IOError, e:
            message = 'could not open the file %s' % (pathToReadFile, )
            log.critical(message)
            raise IOError(message)

        fitsDict = {}
        for l in thisData.split("\n"):
            kw = l.split("=")[0].strip()
            if kw in fitskw.keys() and kw not in fitsDict.keys():
                fitsDict[fitskw[kw]] = l.split("=")[1].split(
                    "/")[0].strip().replace("'", "")

        if len(fitsDict) == 7:
            allData.append(fitsDict)