Ejemplo n.º 1
0
def deploy(repo_name):
    for s in config:
        if s['repo_name'] == repo_name:
            git_repo = GitRepo(remote_url=s['repo_url'],
                               local_folder=s['repo_name'])

            if s['deploy_type'] == 's3':
                server = S3Bucket(s['aws_key_id'], s['aws_key'],
                                  s['s3_bucket'])

                prev_hash = server.get_value(VERSION_FILE)
                if '' == prev_hash:
                    files_to_upload = git_repo.all_files()
                    files_to_delete = []
                else:
                    files_to_upload, files_to_delete = \
                            git_repo.changed_files(prev_hash)

                server.upload_files(files_to_upload, all_public=True)
                server.delete_files(files_to_delete)

                server.set_value(VERSION_FILE, git_repo.head_hash())

            elif s['deploy_type'] == 'ssh':
                print 'did nothing'

            else:
                assert False, \
                        'Wrong deploy type: %s. Only support S3 and SSH now' \
                        % s['deploy_type']
            return
Ejemplo n.º 2
0
def upload_log_files(guid, logs,
                     bucket_name=None, access_key_id=None, access_secret_key=None):
    """Upload all specified logs to Amazon S3.

    :param guid: Unique ID which is used as subfolder name for all log files.
    :param logs: List of log files to upload.
    :param bucket_name: Name of the S3 bucket.
    :param access_key_id: Client ID used for authentication.
    :param access_secret_key: Secret key for authentication.

    """
    # If no AWS credentials are given we don't upload anything.
    if not bucket_name:
        logger.info('No AWS Bucket name specified - skipping upload of artifacts.')
        return {}

    s3_bucket = S3Bucket(bucket_name, access_key_id=access_key_id,
                         access_secret_key=access_secret_key)

    uploaded_logs = {}

    for log in logs:
        try:
            if os.path.isfile(logs[log]):
                remote_path = '{dir}/{filename}'.format(dir=str(guid),
                                                        filename=os.path.basename(log))
                url = s3_bucket.upload(logs[log], remote_path)

                uploaded_logs.update({log: {'path': logs[log], 'url': url}})
                logger.info('Uploaded {path} to {url}'.format(path=logs[log], url=url))

        except Exception:
            logger.exception('Failure uploading "{path}" to S3'.format(path=logs[log]))

    return uploaded_logs
Ejemplo n.º 3
0
def get_s3_bucket(credentials_path):
    try:
        with open(credentials_path) as f:
            config_string = f.read()
            s3_config = json.loads(config_string)
            return S3Bucket(s3_config['s3_bucket_name'],
                            s3_config['aws_access_key_id'],
                            s3_config['aws_access_key'], logger)
    except IOError:
        msg = ('S3 credentials file not '
               'found at {0}.'.format(credentials_path))
        logger.error(msg)
Ejemplo n.º 4
0
def downloadS3Object(bucket_name, key, obj):
    if not bucket_name or not key:
        msg = "Can't get bucket: {}".format(bucket_name)
        log.error(msg)
        return None
    bucket = S3Bucket(bucket_name, log)
    if not bucket:
        msg = "Can't get bucket: {}".format(bucket_name)
        log.error(msg)
        return None
    bucket.downloadFileObj(key, obj)
    obj.seek(0)
    return obj
    def _get_s3_bucket(self):
        """ Returns S3Bucket instance populated based on config.

        Prerequisite: A venv has been created and necessary packages have been
        installed.
        """
        self.info("Setting up S3Bucket.")
        from s3 import S3Bucket
        c = self.config
        dirs = self.query_abs_dirs()
        credentials_path = os.path.join(dirs['base_work_dir'],
                                        c['s3_credentials_path'])
        try:
            with open(credentials_path) as f:
                config_string = f.read()
                s3_config = json.loads(config_string)
                return S3Bucket(s3_config['s3_bucket_name'],
                                s3_config['aws_access_key_id'],
                                s3_config['aws_access_key'],
                                self.log_obj.logger)
        except IOError:
            msg = ('S3 credentials file not '
                   'found at {0}.'.format(credentials_path))
            self.warning(msg)
Ejemplo n.º 6
0
def runModel():
    log.info('POST /run')
    try:
        if request.form and request.form['jsonData']:
            parameters = json.loads(request.form['jsonData'])
        else:
            message = "Missing input jsonData!"
            log.error(message)
            return buildFailure(message, 400)

        sendToQueue = parameters.get('sendToQueue', False)

        inputFileName = None
        id = str(uuid.uuid4())
        if (len(request.files) > 0):
            inputCSVFile = request.files['csvFile']
            ext = os.path.splitext(inputCSVFile.filename)[1]
            if sendToQueue:
                bucket = S3Bucket(INPUT_BUCKET, log)
                object = bucket.uploadFileObj(getInputFileKey(id, ext),
                                              inputCSVFile)
                if object:
                    parameters['inputCSVFile'] = {
                        'originalName': inputCSVFile.filename,
                        'bucket_name': object.bucket_name,
                        'key': object.key
                    }
                else:
                    message = "Upload CSV file to S3 failed!"
                    log.error(message)
                    return buildFailure(message, 500)

            else:
                parameters['inputCSVFile'] = inputCSVFile.filename
                inputFileName = getInputFilePath(id, ext)
                inputCSVFile.save(inputFileName)
                if not os.path.isfile(inputFileName):
                    message = "Upload file failed!"
                    log.error(message)
                    return buildFailure(message, 500)
                outputRdsFileName = getOutputFilePath(id, '.rds')
                outputSSFileName = getOutputFilePath(
                    id, extensionMap[SS_FILE_TYPE])
                outputFileName = getOutputFilePath(id, '.out')
                parameters['filename'] = inputFileName
                parameters['outputRdsFilename'] = outputRdsFileName
                parameters['outputFilename'] = outputFileName
        else:
            message = 'No input data (CSV) file, please upload a data file!'
            log.warning(message)
            return buildFailure(message, 400)

        columns = [
            parameters['outcomeC'], parameters['outcomeL'],
            parameters['outcomeR']
        ]
        if 'design' in parameters and parameters['design'] == 1:
            columns += [parameters['strata'], parameters['weight']]
            parameters['weightInfo'] = [{
                'samp.weight': parameters['weight'],
                'strata': parameters['strata']
            }]
        if parameters['covariatesSelection']:
            covariateNameMap = mapCategoricalCovariates(
                parameters['covariatesArr'])
            columns += parameters['covariatesSelection']
            covariates = ' + '.join([
                covariateNameMap[x] for x in parameters['covariatesSelection']
            ])

            if 'effects' in parameters:
                effects = [
                    covariateNameMap[x[0]] + ' * ' + covariateNameMap[x[1]]
                    for x in parameters['effects']
                ]
                effectsPlain = [
                    x[0] + ' * ' + x[1] for x in parameters['effects']
                ]
                if effects:
                    covariates += ' + ' + ' + '.join(effects)
                    parameters['effectsString'] = ' + '.join(effectsPlain)
            parameters['covariates'] = covariates
        parameters['columns'] = columns

        if sendToQueue:
            # Send parameters to queue
            sqs = Queue(log)
            sqs.sendMsgToQueue(
                {
                    'parameters': parameters,
                    'jobId': id,
                    'extension': ext,
                    'jobType': 'fitting'
                }, id)
            return buildSuccess({
                'enqueued':
                True,
                'jobId':
                id,
                'message':
                'Job "{}" has been added to queue successfully!'.format(
                    parameters.get('jobName', 'PIMixture'))
            })
        else:
            fittingResult = fitting(parameters,
                                    outputSSFileName,
                                    SS_FILE_TYPE,
                                    log,
                                    timeout=FITTING_TIMEOUT)
            if fittingResult['status']:
                return buildSuccess(fittingResult['results'])
            else:
                return buildFailure(fittingResult)

    except Exception as e:
        exc_type, exc_obj, tb = sys.exc_info()
        f = tb.tb_frame
        lineno = tb.tb_lineno
        inputFileName = f.f_code.co_filename
        linecache.checkcache(inputFileName)
        line = linecache.getline(inputFileName, lineno, f.f_globals)
        log.exception("Exception occurred")
        return buildFailure({
            "status": False,
            "message": "An unknown error occurred"
        })
Ejemplo n.º 7
0
    args = parser.parse_args()

    print('migrate disk storage files to s3')
    print('Clowder dburl: %s, dbname: %s' % (args.dburl, args.dbname))
    print('upload files to S3: region: %s, service endpoint: %s' %
          (args.s3REGION, args.s3endpoint))
    print('S3 bucket: %s' % args.s3bucket)
    print("Clowder Upload folder: %s, diskstorage folder: %s" %
          (args.clowderupload, args.clowderprefix))
    f = None
    total_bytes_uploaded = 0
    collections = [
        'logo', 'uploads', 'thumbnails', 'titles', 'textures', 'previews'
    ]
    try:
        s3bucket = S3Bucket(args.s3bucket, args.s3endpoint, args.s3ID,
                            args.s3KEY, args.s3REGION)
        now = datetime.now()
        dt_string = now.strftime("%d-%m-%YT%H:%M:%S")
        file_path = "%s/migrates-filelist-%s.txt" % (args.outputfolder,
                                                     dt_string)
        directory = os.path.dirname(file_path)
        if not os.path.exists(directory):
            os.mkdir(directory)
        f = open(file_path, "w")
        client = MongoClient(args.dburl)
        db = client.get_database(name=args.dbname)

        for collection in collections:
            try:
                num = db[collection].count_documents({})
                num_not_disk_storage = 0