def initialize(self, config=None): exists = True try: self.uri.get_bucket(headers=self.headerValues, validate=True) except GSResponseError: exists = False if exists: raise JobStoreExistsException(self.locator) from google.cloud import storage storage_client = storage.Client() bucket = storage_client.create_bucket(self.bucketName) #self.files = self._retryCreateBucket(self.uri, self.headerValues) try: self.files = self.uri.get_bucket(headers=self.headerValues, validate=True) except GSResponseError: raise NoSuchJobStoreException(self.locator) # functionally equivalent to dictionary1.update(dictionary2) but works with our immutable dicts self.encryptedHeaders = dict(self.encryptedHeaders, **self._resolveEncryptionHeaders(config)) super(GoogleJobStore, self).initialize(config)
def initialize(self, config): try: os.mkdir(self.jobStoreDir) except OSError as e: if e.errno == errno.EEXIST: raise JobStoreExistsException(self.jobStoreDir) else: raise logger.debug('initialized')
def initialize(self, config): try: os.mkdir(self.jobStoreDir) except OSError as e: if e.errno == errno.EEXIST: raise JobStoreExistsException(self.jobStoreDir) else: raise os.mkdir(self.tempFilesDir) super(FileJobStore, self).initialize(config)
def initialize(self, config=None): try: self.bucket = self.storageClient.create_bucket(self.bucketName) except exceptions.Conflict: raise JobStoreExistsException(self.locator) super(GoogleJobStore, self).initialize(config) # set up sever side encryption after we set up config in super if self.config.sseKey is not None: with open(self.config.sseKey) as f: self.sseKey = compat_bytes(f.read()) assert len(self.sseKey) == 32
def initialize(self, config): try: os.mkdir(self.jobStoreDir) except OSError as e: if e.errno == errno.EEXIST: raise JobStoreExistsException(self.jobStoreDir) else: raise mkdir_p(self.jobsDir) mkdir_p(self.statsDir) mkdir_p(self.filesDir) mkdir_p(self.jobFilesDir) mkdir_p(self.sharedFilesDir) self.linkImports = config.linkImports super(FileJobStore, self).initialize(config)
def initialize(self, config): # boto3 does not currently set the region when creating the # bucket. This is not expected to change any time soon. # https://github.com/boto/boto3/issues/781 # So we are fetching the configured region from boto3 client. region = self.bucket.meta.client._client_config.region_name try: self.bucket.create( CreateBucketConfiguration={'LocationConstraint': region}) self.bucket.wait_until_exists() except ClientError as e: if e.response['Error']['Code'] != 'BucketAlreadyOwnedByYou': raise e # Bucket already exists. Check path prefix: if list(self.bucket.objects.filter(Prefix=self.prefix).limit(1)): raise JobStoreExistsException(self.path)
def initialize(self, config): try: os.mkdir(self.jobStoreDir) except OSError as e: if e.errno == errno.EEXIST: raise JobStoreExistsException(self.jobStoreDir) else: raise os.makedirs(self.jobsDir, exist_ok=True) os.makedirs(self.statsDir, exist_ok=True) os.makedirs(self.filesDir, exist_ok=True) os.makedirs(self.jobFilesDir, exist_ok=True) os.makedirs(self.sharedFilesDir, exist_ok=True) self.linkImports = config.linkImports self.moveExports = config.moveExports super().initialize(config)
def initialize(self, config): if self._jobStoreExists(): raise JobStoreExistsException(self.locator) logger.debug("Creating job store at '%s'" % self.locator) self._bind(create=True) super(AzureJobStore, self).initialize(config)