def to_featureclass(geo,
                    location,
                    overwrite=True,
                    validate=False,
                    sanitize_columns=True,
                    has_m=True,
                    has_z=False):
    """
    Exports the DataFrame to a Feature class.

    ===============     ====================================================
    **Argument**        **Description**
    ---------------     ----------------------------------------------------
    location            Required string. This is the output location for the
                        feature class. This should be the path and feature
                        class name.
    ---------------     ----------------------------------------------------
    overwrite           Optional Boolean. If overwrite is true, existing
                        data will be deleted and replaced with the spatial
                        dataframe.
    ---------------     ----------------------------------------------------
    validate            Optional Boolean. If True, the export will check if
                        all the geometry objects are correct upon export.
    ---------------     ----------------------------------------------------
    sanitize_columns    Optional Boolean. If True, column names will be
                        converted to string, invalid characters removed and
                        other checks will be performed. The default is True.
    ---------------     ----------------------------------------------------
    ham_m               Optional Boolean to indicate if data has linear
                        referencing (m) values. Default is False.
    ---------------     ----------------------------------------------------
    has_z               Optional Boolean to indicate if data has elevation
                        (z) values. Default is False.
    ===============     ====================================================


    :returns: string

    """
    out_location = os.path.dirname(location)
    fc_name = os.path.basename(location)
    df = geo._data
    old_idx = df.index
    df.reset_index(drop=True, inplace=True)
    if geo.name is None:
        raise ValueError("DataFrame must have geometry set.")
    if validate and \
       geo.validate(strict=True) == False:
        raise ValueError(("Mixed geometry types detected, "
                          "cannot export to feature class."))

    # sanitize
    if sanitize_columns:
        # logic
        _sanitize_column_names(geo, inplace=True)

    columns = df.columns.tolist()
    for col in columns[:]:
        if not isinstance(col, str):
            df.rename(columns={col: str(col)}, inplace=True)
            col = str(col)

    if HASARCPY:
        # 1. Create the Save Feature Class
        #
        columns = df.columns.tolist()
        join_dummy = "AEIOUYAJC81Z"
        columns.pop(columns.index(df.spatial.name))
        dtypes = [(join_dummy, np.int64)]
        if overwrite and arcpy.Exists(location):
            arcpy.Delete_management(location)
        elif overwrite == False and arcpy.Exists(location):
            raise ValueError(('overwrite set to False, Cannot '
                              'overwrite the table. '))

        notnull = geo._data[geo._name].notnull()
        idx = geo._data[geo._name][notnull].first_valid_index()
        sr = geo._data[geo._name][idx]['spatialReference']
        gt = geo._data[geo._name][idx].geometry_type.upper()
        null_geom = {
            'point':
            pd.io.json.dumps({
                'x': None,
                'y': None,
                'spatialReference': sr
            }),
            'polyline':
            pd.io.json.dumps({
                'paths': [],
                'spatialReference': sr
            }),
            'polygon':
            pd.io.json.dumps({
                'rings': [],
                'spatialReference': sr
            }),
            'multipoint':
            pd.io.json.dumps({
                'points': [],
                'spatialReference': sr
            })
        }
        sr = geo._data[geo._name][idx].spatial_reference.as_arcpy
        null_geom = null_geom[gt.lower()]

        if has_m == True:
            has_m = "ENABLED"
        else:
            has_m = None

        if has_z == True:
            has_z = "ENABLED"
        else:
            has_z = None

        fc = arcpy.CreateFeatureclass_management(out_location,
                                                 spatial_reference=sr,
                                                 geometry_type=gt,
                                                 out_name=fc_name,
                                                 has_m=has_m,
                                                 has_z=has_z)[0]

        # 2. Add the Fields and Data Types
        oidfld = da.Describe(fc)['OIDFieldName']
        for col in columns[:]:
            if col.lower() in ['fid', 'oid', 'objectid']:
                dtypes.append((col, np.int32))
            elif df[col].dtype.name.startswith('datetime64[ns'):
                dtypes.append((col, '<M8[us]'))
            elif df[col].dtype.name == 'object':
                try:
                    u = type(df[col][df[col].first_valid_index()])
                except:
                    u = pd.unique(df[col].apply(type)).tolist()[0]
                if issubclass(u, str):
                    mlen = df[col].str.len().max()
                    dtypes.append((col, '<U%s' % int(mlen)))
                else:
                    try:
                        if df[col][idx] is None:
                            dtypes.append((col, '<U254'))
                        else:
                            dtypes.append((col, type(df[col][idx])))
                    except:
                        dtypes.append((col, '<U254'))
            elif df[col].dtype.name == 'int64':
                dtypes.append((col, np.int64))
            elif df[col].dtype.name == 'bool':
                dtypes.append((col, np.int32))
            else:
                dtypes.append((col, df[col].dtype.type))

        array = np.array([], np.dtype(dtypes))
        arcpy.da.ExtendTable(fc, oidfld, array, join_dummy, append_only=False)

        # 3. Insert the Data
        fields = arcpy.ListFields(fc)
        icols = [fld.name for fld in fields \
                 if fld.type not in ['OID', 'Geometry'] and \
                 fld.name in df.columns] + ['SHAPE@JSON']
        dfcols = [fld.name for fld in fields \
                  if fld.type not in ['OID', 'Geometry'] and\
                  fld.name in df.columns] + [df.spatial.name]

        with da.InsertCursor(fc, icols) as irows:
            dt_fld_idx = [irows.fields.index(col) for col in df.columns \
                          if df[col].dtype.name.startswith('datetime64[ns')]

            def _insert_row(row):
                row[-1] = pd.io.json.dumps(row[-1])
                for idx in dt_fld_idx:
                    if isinstance(row[idx], type(pd.NaT)):
                        row[idx] = None
                irows.insertRow(row)

            q = df[geo._name].isna()
            df.loc[q, 'SHAPE'] = null_geom  # set null values to proper JSON
            np.apply_along_axis(_insert_row, 1, df[dfcols].values)
            df.loc[q, 'SHAPE'] = None  # reset null values
        df.set_index(old_idx)
        return fc
    elif HASPYSHP:
        if fc_name.endswith('.shp') == False:
            fc_name = "%s.shp" % fc_name
        if SHPVERSION < [2]:
            res = _pyshp_to_shapefile(df=df,
                                      out_path=out_location,
                                      out_name=fc_name)
            df.set_index(old_idx)
            return res
        else:
            res = _pyshp2(df=df, out_path=out_location, out_name=fc_name)
            df.set_index(old_idx)
            return res
    elif HASARCPY == False and HASPYSHP == False:
        raise Exception(
            ("Cannot Export the data without ArcPy or PyShp modules."
             " Please install them and try again."))
    else:
        df.set_index(old_idx)
        return None
def to_table(geo, location, overwrite=True):
    """
    Exports a geo enabled dataframe to a table.

    ===========================     ====================================================================
    **Argument**                    **Description**
    ---------------------------     --------------------------------------------------------------------
    location                        Required string. The output of the table.
    ---------------------------     --------------------------------------------------------------------
    overwrite                       Optional Boolean.  If True and if the table exists, it will be
                                    deleted and overwritten.  This is default.  If False, the table and
                                    the table exists, and exception will be raised.
    ===========================     ====================================================================

    :returns: String
    """
    out_location = os.path.dirname(location)
    fc_name = os.path.basename(location)
    df = geo._data
    if location.lower().find('.csv') > -1:
        geo._df.to_csv(location)
        return location
    elif HASARCPY:
        columns = df.columns.tolist()
        join_dummy = "AEIOUYAJC81Z"
        try:
            columns.pop(columns.index(df.spatial.name))
        except:
            pass
        dtypes = [(join_dummy, np.int64)]
        if overwrite and arcpy.Exists(location):
            arcpy.Delete_management(location)
        elif overwrite == False and arcpy.Exists(location):
            raise ValueError(('overwrite set to False, Cannot '
                              'overwrite the table. '))
        fc = arcpy.CreateTable_management(out_path=out_location,
                                          out_name=fc_name)[0]
        # 2. Add the Fields and Data Types
        #
        oidfld = da.Describe(fc)['OIDFieldName']
        for col in columns[:]:
            if col.lower() in ['fid', 'oid', 'objectid']:
                dtypes.append((col, np.int32))
            elif df[col].dtype.name == 'datetime64[ns]':
                dtypes.append((col, '<M8[us]'))
            elif df[col].dtype.name == 'object':
                try:
                    u = type(df[col][df[col].first_valid_index()])
                except:
                    u = pd.unique(df[col].apply(type)).tolist()[0]
                if issubclass(u, str):
                    mlen = df[col].str.len().max()
                    dtypes.append((col, '<U%s' % int(mlen)))
                else:
                    try:
                        dtypes.append((col, type(df[col][0])))
                    except:
                        dtypes.append((col, '<U254'))
            elif df[col].dtype.name == 'int64':
                dtypes.append((col, np.int64))
            elif df[col].dtype.name == 'bool':
                dtypes.append((col, np.int32))
            else:
                dtypes.append((col, df[col].dtype.type))

        array = np.array([], np.dtype(dtypes))
        arcpy.da.ExtendTable(fc, oidfld, array, join_dummy, append_only=False)
        # 3. Insert the Data
        #
        fields = arcpy.ListFields(fc)
        icols = [fld.name for fld in fields \
                 if fld.type not in ['OID', 'Geometry'] and \
                 fld.name in df.columns]
        dfcols = [fld.name for fld in fields \
                  if fld.type not in ['OID', 'Geometry'] and\
                  fld.name in df.columns]
        with da.InsertCursor(fc, icols) as irows:
            for idx, row in df[dfcols].iterrows():
                try:
                    irows.insertRow(row.tolist())
                except:
                    print("row %s could not be inserted." % idx)
        return fc

    return