Exemple #1
0
import os

from doodad.utils import REPO_DIR, safe_import

blob = safe_import.try_import('azure.storage.blob')
azure = safe_import.try_import('azure')

AZURE_STARTUP_SCRIPT_PATH = os.path.join(
    REPO_DIR, "scripts/azure/azure_startup_script.sh")
AZURE_SHUTDOWN_SCRIPT_PATH = os.path.join(
    REPO_DIR, "scripts/azure/azure_shutdown_script.sh")
AZURE_CLOUD_INIT_PATH = os.path.join(REPO_DIR, "scripts/azure/cloud-init.txt")


def upload_file_to_azure_storage(
        filename,
        container_name,
        #storage_account,
        connection_str,
        remote_filename=None,
        dry=False,
        check_exists=True):
    if remote_filename is None:
        remote_filename = os.path.basename(filename)
    remote_path = 'doodad/mount/' + remote_filename

    if not dry:
        blob_service_client = blob.BlobServiceClient.from_connection_string(
            connection_str)
        blob_client = blob_service_client.get_blob_client(
            container=container_name, blob=remote_path)
Exemple #2
0
import os
import json
import uuid
import six
import base64
import pprint
import shlex

from doodad.utils import shell
from doodad.utils import safe_import
from doodad import mount
from doodad.apis.ec2.autoconfig import Autoconfig
from doodad.credentials.ec2 import AWSCredentials

googleapiclient = safe_import.try_import('googleapiclient')
googleapiclient.discovery = safe_import.try_import('googleapiclient.discovery')
boto3 = safe_import.try_import('boto3')
botocore = safe_import.try_import('botocore')
from doodad.apis import gcp_util, aws_util


class LaunchMode(object):
    """
    A LaunchMode object is responsible for executing a shell script on a specified platform.

    Args:
        shell_interpreter (str): Interpreter command for script. Default 'sh'
        async_run (bool): If True,
    """
    def __init__(self, shell_interpreter='sh', async_run=False):
        self.shell_interpreter = shell_interpreter
Exemple #3
0
import os
import time

from doodad.utils import hash_file, REPO_DIR, safe_import
storage = safe_import.try_import('google.cloud.storage')

GCP_STARTUP_SCRIPT_PATH = os.path.join(REPO_DIR, "scripts/gcp/gcp_startup_script.sh")
GCP_SHUTDOWN_SCRIPT_PATH = os.path.join(REPO_DIR, "scripts/gcp/gcp_shutdown_script.sh")

def make_timekey():
        return '%d'%(int(time.time()*1000))

def upload_file_to_gcp_storage(
    bucket_name,
    file_name,
    remote_filename=None,
    dry=False,
    check_exists=True
):
    if remote_filename is None:
        remote_filename = os.path.basename(file_name)
    remote_path = 'doodad/mount/' + remote_filename
    if not dry:
        storage_client = storage.Client()
        bucket = storage_client.get_bucket(bucket_name)
        blob = bucket.blob(remote_path)
        if check_exists and blob.exists(storage_client):
            print("{remote_path} already exists".format(remote_path=remote_path))
            return remote_path
        blob.upload_from_filename(file_name)
    return remote_path
Exemple #4
0
 def test_good_import(self):
     lib = safe_import.try_import('sys')
     self.assertEqual(sys.copyright, lib.copyright)
Exemple #5
0
 def test_bad_import_submodule(self):
     lib = safe_import.try_import('bad_library')
     lib.sublib = safe_import.try_import('bad_library.sblib')
     with self.assertRaises(ImportError):
         lib.sublib.do_something()
Exemple #6
0
 def test_bad_import(self):
     lib = safe_import.try_import('bad_library')
     with self.assertRaises(ImportError):
         lib.do_something()