Beispiel #1
0
 def data_update(self, source_env):
     self.log.info('Initialized data sync')
     try:
         tempdir = tempfile.mkdtemp()
         export = dbtools.export_data(self, source_env, tempdir)
         dbtools.import_data(self, self.update_env, export)
         local('rm -rf %s' % tempdir)
     except:
         self.log.exception('Data sync encountered a fatal error.')
         raise
     else:
         self.log.info('Data sync successful.')
Beispiel #2
0
 def data_update(self, source_env):
     self.log.info('Initialized data sync')
     try:
         tempdir = tempfile.mkdtemp()
         export = dbtools.export_data(self, source_env, tempdir)
         dbtools.import_data(self, self.update_env, export)
         local('rm -rf %s' % tempdir)
     except:
         self.log.exception('Data sync encountered a fatal error.')
         raise
     else:
         self.log.info('Data sync successful.')
Beispiel #3
0
    def setup_environments(self, handler=None, working_dir=None):
        """ Send code/data/files from processing to destination (dev/test/live)
        All import and restore processing is done in temp directories. Once
        processing is complete, it is pushed out to the final destination.

        handler: 'import' or None. If import, complete extra import processing.
        working_dir: If handler is import, also needs full path to working_dir.

        """

        # During import, only run updates/import processes a single database.
        # Once complete, we import this 'final' database into each environment.
        if handler == 'import':
            tempdir = tempfile.mkdtemp()
            dump_file = dbtools.export_data(self, 'dev', tempdir)

        for env in self.environments:
            # Code
            destination = os.path.join(self.project_path, env)
            local('git clone -l /var/git/projects/%s -b %s %s' % (self.project,
                                                                 self.project,
                                                                 destination))
            # On import setup environment data and files.
            if handler == 'import':
                # Data (already exists in 'dev' - import into other envs)
                if env != 'dev':
                    dbtools.import_data(self, env, dump_file)

                # Files
                source = os.path.join(working_dir, 'sites/default/files')
                file_dir = os.path.join(self.project_path, env,
                                                'sites/default')
                local('rsync -av %s %s' % (source, file_dir))

        # Cleanup
        if handler == 'import':
            local('rm -rf %s' % tempdir)
Beispiel #4
0
    def setup_environments(self, handler=None, working_dir=None):
        """ Send code/data/files from processing to destination (dev/test/live)
        All import and restore processing is done in temp directories. Once
        processing is complete, it is pushed out to the final destination.

        handler: 'import' or None. If import, complete extra import processing.
        working_dir: If handler is import, also needs full path to working_dir.

        """

        # During import, only run updates/import processes a single database.
        # Once complete, we import this 'final' database into each environment.
        if handler == 'import':
            tempdir = tempfile.mkdtemp()
            dump_file = dbtools.export_data(self, 'dev', tempdir)

        for env in self.environments:
            # Code
            destination = os.path.join(self.project_path, env)
            local('git clone -l /var/git/projects/%s -b %s %s' %
                  (self.project, self.project, destination))
            # On import setup environment data and files.
            if handler == 'import':
                # Data (already exists in 'dev' - import into other envs)
                if env != 'dev':
                    dbtools.import_data(self.project, env, dump_file)

                # Files
                source = os.path.join(working_dir, 'sites/default/files')
                file_dir = os.path.join(self.project_path, env,
                                        'sites/default')
                local('rsync -av %s %s' % (source, file_dir))

        # Cleanup
        if handler == 'import':
            local('rm -rf %s' % tempdir)
Beispiel #5
0
 def data_update(self, source_env):
     tempdir = tempfile.mkdtemp()
     export = dbtools.export_data(self, source_env, tempdir)
     dbtools.import_data(self.project, self.project_env, export)
     local('rm -rf %s' % tempdir)
Beispiel #6
0
 def data_update(self, source_env):
     tempdir = tempfile.mkdtemp()
     export = dbtools.export_data(self, source_env, tempdir)
     dbtools.import_data(self.project, self.project_env, export)
     local('rm -rf %s' % tempdir)