Exemplo n.º 1
0
    def _query(self, filters, **parameters):
        cursor = None
        try:
            if self.db_conn is None:
                self.connect()

            try:
                fields = parameters['fields']
            except KeyError:
                fields = self.db_fields
            try:
                limit = int(parameters['limit'])
            except KeyError:
                limit = None
            try:
                path = parameters['path']
            except KeyError:
                path = self.path

            cursor = self.db_conn.cursor()
            query = self.db_query_format.format(fields=','.join(fields),
                                                table=self.db_table,
                                                filters=' AND '.join(filters))

            if limit:
                query = '{0} LIMIT {1}'.format(query, limit)

            debug(D_DATASET, 'Executing SQL query: {0}'.format(query))
            cursor.execute(query)
            for row in cursor.fetchall():
                yield MakeFile(path(self, row), self.nest)
        except Exception as e:
            fatal(D_DATASET,
                  'Unable to perform SQL query: {0}'.format(e),
                  print_traceback=True)
        finally:
            if cursor:
                cursor.close()
            if not self.db_conn_keep_alive:
                self.disconnect()

        raise StopIteration
Exemplo n.º 2
0
    def __iter__(self):
        # Generate the cache under any of the following conditions:
        #
        #   1. Cache file does not exist
        #   2. Cache file exists, is older than compile start time, and we are
        #      forced to do so
        debug(D_DATASET, 'Iterating on Dataset {0}'.format(self))
        if os.path.exists(self.cache_path):
            # If cache file is made after we started compiling, then it is
            # valid, so don't bother generating.
            if CurrentScript().start_time <= os.stat(self.cache_path).st_ctime:
                debug(D_DATASET, 'Loading Dataset {0}'.format(self))
                return (MakeFile(f.strip(), self.nest) \
                    for f in open(self.cache_path, 'r'))

            message = 'Cache file {0} already exists'.format(self.cache_path)
            if CurrentScript().force:
                warn(D_DATASET, message)
            else:
                fatal(D_DATASET, message)

        debug(D_DATASET, 'Generating Dataset {0}'.format(self))
        return self._generate()
Exemplo n.º 3
0
 def _generate(self):
     return (MakeFile(normalize_path(f.strip(), os.curdir), self.nest)
             for f in glob.glob(self.expr))
Exemplo n.º 4
0
 def __iter__(self):
     # Skip checking and generating cache file, since we are given file
     debug(D_DATASET, 'Loading Dataset: {0}'.format(self.cache_path))
     return (MakeFile(normalize_path(f.strip(), os.curdir), self.nest)
             for f in open(self.cache_path, 'r'))