Пример #1
0
 def create_function(self):
     try:
         response = self.client.create_function(self.get_property("name"),
                                                self.get_property("runtime"),
                                                self.get_property("iam", "role"),
                                                self.get_property("handler"),
                                                self.get_property("code"),
                                                self.get_property("environment"),
                                                self.get_property("description"),
                                                self.get_property("time"),
                                                self.get_property("memory"),
                                                self.get_property("tags"))
         if response and 'FunctionArn' in response:
             self.properties["function_arn"] = response['FunctionArn']
         response_parser.parse_lambda_function_creation_response(response,
                                                                 self.get_function_name(),
                                                                 self.client.get_access_key(),
                                                                 self.get_output_type())
     except ClientError as ce:
         error_msg = "Error initializing lambda function."
         logger.error(error_msg, error_msg + ": %s" % ce)
         utils.finish_failed_execution()
     finally:
         # Remove the files created in the operation
         utils.delete_file(self.properties["zip_file_path"])
Пример #2
0
def upload_file_to_S3_bucket(image_file, deployment_bucket, file_key):
    if(utils.get_tree_size(scar_temporal_folder) > MAX_S3_PAYLOAD_SIZE):         
        error_msg = "Uncompressed image size greater than 250MB.\nPlease reduce the uncompressed image and try again."
        logger.error(error_msg)
        utils.delete_file(zip_file_path)
        exit(1)
    
    logger.info("Uploading '%s' to the '%s' S3 bucket" % (image_file, deployment_bucket))
    file_data = utils.read_file(image_file, 'rb')
    S3().upload_file(deployment_bucket, file_key, file_data)
Пример #3
0
def face_detection():
    filename = upload_file(request, UPLOAD_FOLDER)
    if filename:
        try:
            result = find_faces(filename)
            delete_file(filename, UPLOAD_FOLDER)
            return result
        except:
            delete_file(filename, UPLOAD_FOLDER)
            return {'message': "Bad request"}, status.HTTP_400_BAD_REQUEST
    else:
        return {'message': "Bad request"}, status.HTTP_400_BAD_REQUEST
Пример #4
0
 def create_function(self):
     try:
         response = self.client.create_function(**self.get_creations_args())
         if response and 'FunctionArn' in response:
             self.properties["function_arn"] = response['FunctionArn']
         response_parser.parse_lambda_function_creation_response(
             response, self.get_function_name(),
             self.client.get_access_key(), self.get_output_type())
     except ClientError as ce:
         error_msg = "Error initializing lambda function."
         logger.error(error_msg, error_msg + ": %s" % ce)
     finally:
         # Remove the files created in the operation
         utils.delete_file(self.properties["zip_file_path"])
Пример #5
0
    def put(self, data):
        UPLOADS_FOLDER = current_app.config['UPLOADS_FOLDER']
        user = current_user

        if 'picture' in req.files:
            file = req.files['picture']
            filename = save_file(file, UPLOADS_FOLDER)
            data['avatar'] = filename

            if user.avatar:
                delete_file(UPLOADS_FOLDER, user.avatar)

        user.update(**data)

        return user
Пример #6
0
async def upload_custom_pois(
    *,
    db: AsyncSession = Depends(deps.get_db),
    file: UploadFile,
    poi_category=Body(..., example=request_examples["poi_category"]),
    current_user: models.User = Depends(deps.get_current_active_user),
) -> Any:
    """Handle uploaded custom pois."""
    defined_uuid = uuid.uuid4().hex
    file_name = defined_uuid + os.path.splitext(file.filename)[1]
    file_dir = f"/tmp/{file_name}"

    real_file_size = 0
    temp: IO = NamedTemporaryFile(delete=False)
    for chunk in file.file:
        real_file_size += len(chunk)
        if real_file_size > MaxUploadFileSize.max_upload_poi_file_size.value:
            temp.close()
            delete_file(temp.name)
            raise HTTPException(
                status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
                detail=
                "The uploaded file size is to big the largest allowd size is %s MB."
                % round(MaxUploadFileSize.max_upload_poi_file_size / 1024.0**2,
                        2),
            )

        temp.write(chunk)
    temp.close()

    # Write file to file system

    shutil.move(temp.name, file_dir)
    await crud.upload.upload_custom_pois(
        db=db,
        poi_category=poi_category,
        file=file,
        file_dir=file_dir,
        file_name=file_name,
        current_user=current_user,
    )

    updated_settings = await dynamic_customization.build_main_setting_json(
        db=db, current_user=current_user)

    return updated_settings
Пример #7
0
    async def upload_custom_pois(
        self,
        *,
        db: AsyncSession,
        file: UploadFile,
        file_dir: str,
        file_name: str,
        poi_category: str,
        current_user: models.User,
    ):
        """Handle uploaded custom pois."""
        # Check if poi_category is already uploaded for study area
        try:
            query_poi_features = (select(models.PoiUser.category).join(
                models.DataUpload).where(
                    and_(
                        models.DataUpload.user_id == current_user.id,
                        models.DataUpload.study_area_id ==
                        current_user.active_study_area_id,
                        models.PoiUser.data_upload_id == models.DataUpload.id,
                        models.PoiUser.category == poi_category,
                    )).limit(1))

            poi_features = await db.execute(query_poi_features)
            poi_features = poi_features.first()
        except:
            delete_file(file_dir)
            raise HTTPException(
                status_code=400,
                detail="Failed reading the file.",
            )

        if poi_features is not None:
            delete_file(file_dir)
            raise HTTPException(
                status_code=400,
                detail=
                "The chosen custom poi category already exists. Please delete the old data-set first in case you want to replace it with the new one",
            )

        required_attributes = ["geometry"]
        optional_attributes = [
            "opening_hours",
            "name",
            "street",
            "housenumber",
            "zipcode",
            "opening_hours",
            "wheelchair",
        ]
        # Get active study area

        study_area_obj = await crud.study_area.get(
            db=db, id=current_user.active_study_area_id, extra_fields=["geom"])
        study_area_geom = to_shape(study_area_obj.geom)

        if UploadFileTypes.geojson.value in file_name:
            try:
                gdf = gpd_read_file(file_dir, driver="GeoJSON")
                delete_file(file_dir)
            except:
                delete_file(file_dir)
                raise HTTPException(
                    status_code=400,
                    detail="Failed reading the file in GeodataFrame",
                )
        elif UploadFileTypes.zip.value in file_name:
            unzipped_file_dir = (
                os.path.splitext(file_dir)[0] + "/" +
                file.filename.replace(UploadFileTypes.zip.value, ""))

            # Create directory
            try:
                shutil.unpack_archive(file_dir,
                                      os.path.splitext(file_dir)[0], "zip")
            except:
                clean_unpacked_zip(zip_path=file_dir,
                                   dir_path=file_dir.replace(
                                       UploadFileTypes.zip.value, ""))
                raise HTTPException(status_code=400,
                                    detail="Could not read or process file.")

            # List shapefiles
            try:
                available_shapefiles = [
                    f for f in os.listdir(unzipped_file_dir)
                    if f.endswith(".shp")
                ]
            except:
                clean_unpacked_zip(zip_path=file_dir,
                                   dir_path=file_dir.replace(
                                       UploadFileTypes.zip.value, ""))
                raise HTTPException(status_code=400,
                                    detail="No shapefiles inside folder.")

            # Read shapefiles and append to GeoDataFrame
            if len(available_shapefiles) == 1:
                gdf = gpd_read_file(
                    f"{unzipped_file_dir}/{available_shapefiles[0]}")
            elif len(available_shapefiles) > 1:
                clean_unpacked_zip(zip_path=file_dir,
                                   dir_path=file_dir.replace(
                                       UploadFileTypes.zip.value, ""))
                raise HTTPException(
                    status_code=400,
                    detail="More then one shapefiles inside folder.")
            else:
                raise HTTPException(status_code=400,
                                    detail="No shapefiles inside folder.")
            clean_unpacked_zip(zip_path=file_dir,
                               dir_path=file_dir.replace(
                                   UploadFileTypes.zip.value, ""))
        else:
            raise HTTPException(status_code=400, detail="Invalid file type")

        # Convert to EPSG 4326
        gdf_schema = dict(gdf.dtypes)
        if gdf.crs.name == "unknown":
            raise HTTPException(status_code=400, detail="Invalid CRS")
        else:
            gdf.to_crs(epsg=4326, inplace=True)
            gdf.set_crs(epsg=4326)
            gdf = gdf.clip(study_area_geom)

        # Drop not needed columns
        columns_to_drop = []
        for attribute in gdf_schema:
            if attribute not in optional_attributes + required_attributes:
                columns_to_drop.append(attribute)

        gdf = gdf.drop(columns_to_drop, axis=1)
        if len(gdf) == 0:
            raise HTTPException(
                status_code=400,
                detail="No valid data in file or data outside the study area.")

        # Assign specified category to all points
        gdf["category"] = poi_category

        # Create entry in upload table
        upload_obj = models.DataUpload(
            data_type=file.content_type,
            upload_type=models.PoiUser.__table__.name,
            user_id=current_user.id,
            upload_size=int(file.file.tell() / 1000),
            study_area_id=current_user.active_study_area_id,
        )
        upload_obj = await data_upload.create(db=db, obj_in=upload_obj)

        # Write to database
        try:
            gdf["uid"] = (gdf.centroid.map(
                lambda p: str(format(round(p.x, 4), ".4f")).replace(".", "") +
                "_" + str(format(round(p.y, 4), ".4f")).replace(".", "")) +
                          "_" + str(poi_category))
            gdf["count_uid"] = gdf.groupby(["uid"]).cumcount() + 1
            gdf["uid"] = (gdf["uid"] + "_" + gdf["count_uid"].astype(str) +
                          "_u" + str(upload_obj.id))
            gdf["data_upload_id"] = upload_obj.id

            gdf.rename_geometry("geom", inplace=True)
            gdf.drop(["count_uid"], axis=1, inplace=True)

            gdf.to_postgis(
                name="poi_user",
                schema="customer",
                con=legacy_engine,
                if_exists="append",
                chunksize=1000,
            )

        except:
            await db.execute(
                """DELETE FROM customer.data_upload WHERE id = :data_upload_id""",
                {"data_upload_id": upload_obj.id},
            )
            await db.commit()
            raise HTTPException(
                status_code=400,
                detail=
                "An error happened when writing the data into the database.",
            )

        try:
            default_poi_categories = (
                await
                crud.dynamic_customization.get_all_default_poi_categories(db))
            if poi_category not in default_poi_categories:
                hex_color = "#%06x" % random.randint(0, 0xFFFFFF)
                new_setting = {
                    poi_category: {
                        "icon": "fas fa-question",
                        "color": [hex_color]
                    }
                }

                if check_dict_schema(PoiCategory, new_setting) == False:
                    raise HTTPException(status_code=400,
                                        detail="Invalid JSON-schema")

                await crud.dynamic_customization.insert_opportunity_setting(
                    db=db,
                    current_user=current_user,
                    insert_settings=new_setting,
                    data_upload_id=upload_obj.id)

        except:
            await db.execute(
                """DELETE FROM customer.data_upload WHERE id = :data_upload_id""",
                {"data_upload_id": upload_obj.id},
            )
            await db.commit()
            raise HTTPException(
                status_code=400,
                detail=
                "An error happened when writing new settings to the database.",
            )

        return {"msg": "Upload successful"}
Пример #8
0
 def clean_tmp_folders(self):
     if os.path.isfile(self.properties['ZipFilePath']):
         utils.delete_file(self.properties['ZipFilePath'])
     # Delete created temporal files
     if os.path.isdir(self.scar_temporal_folder):
         shutil.rmtree(self.scar_temporal_folder, ignore_errors=True)
Пример #9
0
def payload_size_error(zip_file_path, message):
    logger.error(message)
    utils.delete_file(zip_file_path) 
    exit(1)   
Пример #10
0
def payload_size_error(zip_file_path, message):
    logger.error(message)
    utils.delete_file(zip_file_path)
    utils.finish_failed_execution()
Пример #11
0
def main(input_img_path, ann_path, model_type, metric):
    """
    Runs training process and save model.
    """
    logger = logging.getLogger(__name__)

    # collect images path
    input_img_dir = Path(input_img_path)
    imgs_path_train = sorted([
        i.absolute() for i in (input_img_dir / 'train').glob("*.png")
        if i.is_file()
    ])
    imgs_path_val = sorted([
        i.absolute() for i in (input_img_dir / 'val').glob("*.png")
        if i.is_file()
    ])
    imgs_path_test = sorted([
        i.absolute() for i in (input_img_dir / 'test').glob("*.png")
        if i.is_file()
    ])

    # Annotations path
    ann_path_dir = Path(ann_path)
    ann_train_path = ann_path_dir / 'train' / ann_file_name
    ann_val_path = ann_path_dir / 'val' / ann_file_name
    ann_test_path = ann_path_dir / 'test' / ann_file_name

    # Add randomly more paths
    imgs_path_train = create_random_list_of_size(
        imgs_path_train,
        len(imgs_path_train) * multiply_by)
    imgs_path_val = create_random_list_of_size(
        imgs_path_val,
        len(imgs_path_val) * multiply_by)
    imgs_path_test = create_random_list_of_size(
        imgs_path_test,
        len(imgs_path_test) * multiply_by)

    # Generate Data on the fly for train and validation
    data_generator_train = GDXrayDataGenerator(imgs_path_train,
                                               ann_train_path,
                                               labels,
                                               n_classes,
                                               batch_size=batch_size,
                                               dim=dim)
    data_generator_val = GDXrayDataGenerator(imgs_path_val,
                                             ann_val_path,
                                             labels,
                                             n_classes,
                                             batch_size=batch_size,
                                             dim=dim)
    data_generator_test = GDXrayDataGenerator(imgs_path_test,
                                              ann_test_path,
                                              labels,
                                              n_classes,
                                              batch_size=batch_size,
                                              dim=dim)

    # Model Path
    model_path = Path('models') / (model_name % model_type)
    delete_file(model_path)

    # Set-up model selected
    model = Unet(dim, n_classes, n_filters)
    conf, call_backs = get_model_configures(metric, str(model_path))
    model.build_model(**conf)

    # Fit the model TODO add timer
    history = model.fit(x=data_generator_train,
                        steps_per_epoch=len(data_generator_train),
                        validation_data=data_generator_val,
                        epochs=50,
                        verbose=1,
                        callbacks=call_backs)

    # Evaluate model
    result = model.evaluate(data_generator_test)
    print(result)
    print(dict(zip(model.metrics_names, result)))
    model.save_model(str(model_path))

    # Save History
    save_history(metric, history)