Example #1
0
def handler(raw_event, context):
    print(raw_event)
    event = EVENT_PARSER(raw_event)

    if event.s3:
        for s3 in event.records:
            # s3의 tar.gz 파일에서 수목,좌표 csv 객체 압축 해제
            tree_csv, point_csv = get_tar(s3)
            points = get_point_data(point_csv)
            # 람다의 tmp폴더 혼용에 의한 오류방지를 위해 해시id 생성
            dt = datetime.utcnow()
            hashid = Hashids(s3.object_key).encode(dt.year, dt.month, dt.day)

            shp = make_shp(tree_csv, points, hashid)
            f = NamedTemporaryFile(delete=False)

            tmp_file = f"{hashid}.tar.gz"
            file_name = f"shp/{tmp_file}"

            root, dirs, files = list(os.walk(os.path.dirname(shp)))[0]
            shp_exts = ['prj', 'shp', 'shx', 'dbf', 'cpg']
            # 해시파일중 shp 관련 파일로 필터링
            files = [f for f in files if f.split('.')[0] == hashid and f.split('.')[-1] in shp_exts]
            with tarfile.open(f.name, mode='w:gz') as gz:
                for name in files:
                    file_path = os.path.join(root, name)
                    gz.add(file_path, arcname=name)
            metadata = s3.object.get()["Metadata"]
            metadata = {f"origin_{key}": value for key, value in zip(metadata.keys(), metadata.values())}
            metadata['upload_by'] = 'csv2shp'
            metadata['origin_data_bucket'] = s3.bucket_arn
            metadata['origin_data_key'] = s3.object_key

            s3_resource = boto3.resource('s3')
            with open(f.name, 'rb') as result:
                s3_resource.Object(s3.bucket_name, file_name).put(Body=result.read(), ContentEncoding="gzip",
                                                                  Metadata=metadata)