コード例 #1
0
ファイル: mysql_data_source.py プロジェクト: insanity/akagi
    def __iter__(self):
        self.__result = []
        c = self._connection.cursor()
        logger.debug("Executing query...")
        c.execute(self.query.body)
        logger.debug("Finished.")

        return iter(c.fetchall())
コード例 #2
0
 def activate(self):
     logger.debug("Deleting old files on s3...")
     self.bundle.clear()
     logger.debug("Executing query on Redshift")
     logger.debug(
         "\n" + self.query.body + "\n"
     )  # avoid logging unload query since it has raw credentials inside
     self._cursor.execute(self.query.sql)
     logger.debug("Finished")
コード例 #3
0
ファイル: data_source.py プロジェクト: yoheikikuta/akagi
    def save(self, tar_dir, force=False):
        paths = []

        for d in self.bundle.data_files:
            path = os.path.expanduser(os.path.join(tar_dir, d.key))
            dirname = os.path.dirname(path)

            if not os.path.isdir(dirname):
                os.makedirs(dirname)

            if os.path.isfile(path):
                logger.debug("Skipped %(key)s" % ({"key": d.key}))
            else:
                with open(path, 'wb') as f:
                    f.write(d.raw_content)
                    logger.debug("Saved %(key)s to %(path)s" % ({"key": d.key, "path": path}))

            paths.append(path)

        return paths
コード例 #4
0
    def _save_data_file(self, data_file):
        path = os.path.join(self._local_cache_dir, data_file.key)
        dirname = os.path.dirname(path)

        if not os.path.isdir(dirname):
            try:
                os.makedirs(dirname)
            except FileExistsError:
                pass

        if os.path.isfile(path) or os.path.isdir(path):
            logger.debug("Skipped %(key)s" % ({"key": data_file.key}))
        else:
            with open(path, 'wb') as f:
                f.write(data_file.raw_content)
                logger.debug("Saved %(key)s to %(path)s" % ({
                    "key": data_file.key,
                    "path": path
                }))

        return path
コード例 #5
0
ファイル: data_file.py プロジェクト: pauchan/akagi
    def _cache_content(self):
        '''Cache its content and return its path on local file system.'''

        dirname = os.path.dirname(self.path)

        try:
            os.makedirs(dirname)
        except OSError:
            if not os.path.isdir(dirname):
                raise

        if os.path.isfile(self.path) or os.path.isdir(self.path):
            logger.debug("Skipped %(key)s" % ({"key": self._content.key}))
        else:
            with open(self.path, 'wb') as f:
                f.write(self._content.raw_body.read())
                logger.debug("Saved %(key)s to %(path)s" %
                             ({
                                 "key": self._content.key,
                                 "path": self.path
                             }))

        return self.path
コード例 #6
0
ファイル: redshift_data_source.py プロジェクト: pauchan/akagi
    def data_files(self):
        if self._data_files is None:
            if not self._exists_on_s3:
                unload_prefix = self._unload_root_prefix
                datetime.utcnow().strftime("%Y%m%d_%H%M%f")  # XXX: utcnow()?

                query = UnloadQuery(self._query_str, self._bucket_name,
                                    unload_prefix)

                logger.debug("Executing query on Redshift")
                logger.debug(
                    "\n" + query.body + "\n"
                )  # avoid logging unload query since it has raw credentials inside
                self._cursor.execute(query.sql)
                logger.debug("Finished")

            self._data_files = data_files_for_s3_prefix(
                self._bucket_name, self._latest_prefix_on_s3)

        return self._data_files
コード例 #7
0
ファイル: s3_data_file_bundle.py プロジェクト: insanity/akagi
 def clear(self):
     for obj in self._bucket.objects.filter(Prefix=self.prefix):
         logger.debug("Deleting intermediate object on s3: %(key)s" % ({"key": obj.key}))
         obj.delete()