def get_data_object_txt(response_content, table_name, table_infos_geojson, database_id):
    """
    Perform a SELECT on the DB to retreive infos on associated object, txt format
    Param: table_name : name of the table
    """

    select_columns = table_infos_geojson.get(table_name).get('select_col')
    select_string = "SELECT %s FROM %s" \
                    % (select_columns, table_name)

    cursor = query_db(select_string, database_id)
    for row in cursor.fetchall():
        data = zip([column[0] for column in cursor.description], row)
        stringGeometry = ""
        stringKey = ""
        for attr in data:
            key = attr[0]
            val = attr[1]

            if key == "geom":
                stringGeometry = val
            else:
                stringKey = val

        response_content.append("%s,%s" % (stringKey, stringGeometry))
def get_data_object(response_content, table_name, where_string, complement_string, table_infos, testing, database_id):
    """
    Perform a SELECT on the DB to retreive infos on associated object
    Param: table_name : name of the table
    """
    test_string = ""
    if testing:
        test_string = " LIMIT 1"
    select_columns = table_infos.get(table_name).get('select_col')
    select_string = "SELECT %s FROM %s %s %s %s" \
                    % (select_columns, table_name, where_string, complement_string, test_string)

    cursor = query_db(select_string, database_id)
    for row in cursor.fetchall():
        data = zip([column[0] for column in cursor.description], row)
        feat_dict = SortedDict({})
        for attr in data:
            key = attr[0]
            val = attr[1]
            if type(val).__name__ == "date":
                val = val.strftime("%d/%m/%Y")

            new_key = table_infos.get(table_name).get('db_to_json_columns').get(key)

            feat_dict[new_key] = val

        response_content.append(feat_dict)
def get_data_object_geojson(response_content, table_name):
    """
    Perform a SELECT on the DB to retreive infos on associated object, geojson format
    Param: table_name : name of the table
    """

    select_columns = settings.FAUNE_TABLE_INFOS_GEOJSON.get(table_name).get('select_col')
    select_string = "SELECT %s FROM %s" \
                    % (select_columns, table_name)

    cursor = query_db(select_string, database_id)
    i = 0  # feature index
    for row in cursor.fetchall():
        data = zip([column[0] for column in cursor.description], row)
        feat_dict = SortedDict({"type": "Feature", "id": i})
        properties_dict = SortedDict({})
        for attr in data:
            key = attr[0]
            val = attr[1]
            if type(val).__name__ == "date":
                val = val.strftime("%d/%m/%Y")

            if key == "geom":
                geom = loads(val)
                geometry_dict = dumps(geom)
            else:
                new_key = settings.FAUNE_TABLE_INFOS_GEOJSON.get(table_name).get('db_to_json_columns').get(key)
                properties_dict[new_key] = val

        feat_dict["properties"] = properties_dict
        feat_dict["geometry"] = simplejson.loads(geometry_dict)

        i = i + 1
        response_content.append(feat_dict)
    def test_import(self):
        
        old_db = django.db.connections['default']
        django.db.connections['default'].settings_dict['NAME']
        try:
            django.db.connections['default'] = old_db.__class__(old_db.settings_dict)
            django.db.connections['default'].settings_dict['NAME'] = 'appli_faune'
            
            data = open('faune/data.json').read()
            json_data = simplejson.loads(data)
            inputData = EasyDict(json_data)
            
            resp = self.client.post('/import/', {'data': data, 'token': '666'})
            self.assertEqual(resp.status_code, 200)
            
            json_data = simplejson.loads(resp.content)
            d = EasyDict(json_data)
            
            # Check that data has been inserted
            # sheet
            selectString = "SELECT insee, dateobs,altitude_saisie, altitude_sig, altitude_retenue, supprime, pdop, the_geom_27572, the_geom_2154, saisie_initiale, id_organisme, srid_dessin, id_protocole, id_lot, the_geom_3857 FROM contactfaune.t_fiches_cf WHERE id_cf = %s" % (d.id_sheet)
            cursor = query_db(selectString)
            assertDict = {
                'insee': lambda val: self.assertEqual(val, None),
                'dateobs': lambda val: self.assertEqual(val, datetime.date(2012, 9, 19)),
                'altitude_saisie': lambda val: self.assertEqual(val, None),
                'altitude_sig': lambda val: self.assertEqual(val, 0),
                'altitude_retenue': lambda val: self.assertEqual(val,0),
                'supprime': lambda val: self.assertEqual(val, False),
                'pdop': lambda val: self.assertEqual(val, 22),
                'the_geom_27572': lambda val: self.assertEqual(val, '0101000020B46B000096855BCD99DA1241B26AFE7355364141'),
                'the_geom_2154': lambda val: self.assertEqual(val, '01010000206A080000A0C81FCB83E915418E90CC4D16875941'),
                'saisie_initiale': lambda val: self.assertEqual(val, 'pda'),
                'id_organisme': lambda val: self.assertEqual(val, 2),
                'srid_dessin': lambda val: self.assertEqual(val, 2154),
                'id_protocole': lambda val: self.assertEqual(val, 140),
                'id_lot': lambda val: self.assertEqual(val, 4),
                'the_geom_3857': lambda val: self.assertEqual(val, '0101000020110F00002A6DE372E38404C18C60D12226D15641')
            }            
            for row in cursor.fetchall():
                data = zip([column[0] for column in cursor.description], row)
                for key, val in data:
                    assertDict[key](val)

            ids_statements = d.ids_statements.split(',')
            # statement 1
            selectString = "SELECT id_releve_cf, id_taxon, id_critere_cf, am, af, ai, na, sai, jeune, yearling, cd_ref_origine, nom_taxon_saisi, commentaire, supprime, prelevement FROM contactfaune.t_releves_cf WHERE id_cf = %s and id_releve_cf = %s" % (d.id_sheet, ids_statements[0])
            cursor = query_db(selectString)
            assertDict = {
                'id_releve_cf': lambda val: self.assertEqual(val,401382208),
                'id_taxon': lambda val: self.assertEqual(val,1),
                'id_critere_cf': lambda val: self.assertEqual(val,1),
                'am': lambda val: self.assertEqual(val,1),
                'af': lambda val: self.assertEqual(val,0),
                'ai': lambda val: self.assertEqual(val,0),
                'na': lambda val: self.assertEqual(val,0),
                'sai': lambda val: self.assertEqual(val,0),
                'jeune': lambda val: self.assertEqual(val,0),
                'yearling': lambda val: self.assertEqual(val,0),
                'cd_ref_origine': lambda val: self.assertEqual(val,267),
                'nom_taxon_saisi': lambda val: self.assertEqual(val,'taxon 1'),
                'commentaire': lambda val: self.assertEqual(val,'Lorem ipsum dolor sit amet, consectetur adipiscing elit.'),
                'supprime': lambda val: self.assertEqual(val,False),
                'prelevement': lambda val: self.assertEqual(val,False)
            }            
            for row in cursor.fetchall():
                data = zip([column[0] for column in cursor.description], row)
                for key, val in data:
                    assertDict[key](val)

            # statement 2
            selectString = "SELECT id_releve_cf, id_taxon, id_critere_cf, am, af, ai, na, sai, jeune, yearling, cd_ref_origine, nom_taxon_saisi, commentaire, supprime, prelevement FROM contactfaune.t_releves_cf WHERE id_cf = %s and id_releve_cf = %s" % (d.id_sheet, ids_statements[1])
            cursor = query_db(selectString)
            assertDict = {
                'id_releve_cf': lambda val: self.assertEqual(val,401382405),
                'id_taxon': lambda val: self.assertEqual(val,2),
                'id_critere_cf': lambda val: self.assertEqual(val,2),
                'am': lambda val: self.assertEqual(val,0),
                'af': lambda val: self.assertEqual(val,2),
                'ai': lambda val: self.assertEqual(val,0),
                'na': lambda val: self.assertEqual(val,0),
                'sai': lambda val: self.assertEqual(val,0),
                'jeune': lambda val: self.assertEqual(val,0),
                'yearling': lambda val: self.assertEqual(val,0),
                'cd_ref_origine': lambda val: self.assertEqual(val,259),
                'nom_taxon_saisi': lambda val: self.assertEqual(val,'taxon 2'),
                'commentaire': lambda val: self.assertEqual(val,'Lorem ipsum dolor sit amet, consectetur adipiscing elit.'),
                'supprime': lambda val: self.assertEqual(val,False),
                'prelevement': lambda val: self.assertEqual(val,False)
            }            
            for row in cursor.fetchall():
                data = zip([column[0] for column in cursor.description], row)
                for key, val in data:
                    assertDict[key](val)
                    
            
            # Observers
            for observer in inputData.observers_id:
                selectString = "SELECT count(*) FROM contactfaune.cor_role_fiche_cf WHERE id_cf = %s AND id_role = %s" % (d.id_sheet, observer)
                cursor = query_db(selectString)
                row = cursor.fetchone()
                self.assertEqual(row[0],1)

        finally:
            # At the end, remove inserted row in database
            deleteString = "DELETE FROM contactfaune.t_releves_cf WHERE id_cf = %s" % (d.id_sheet)
            query_db(deleteString)
            deleteString = "DELETE FROM contactfaune.t_fiches_cf WHERE id_cf = %s" % (d.id_sheet)
            query_db(deleteString)
            deleteString = "DELETE FROM contactfaune.cor_role_fiche_cf WHERE id_cf = %s and id_role in (%s)" % (d.id_sheet, ','.join(map(str, inputData.observers_id)))
            query_db(deleteString)
            commit_transaction()
            
            django.db.connections['default'] = old_db         
def import_data_fmi(json_data, data):
    """
    Import data for fauna, mortality and invertebrate (fmi)
    """

    response_content = {}

    if json_data['input_type'] == 'fauna' or json_data['input_type'] == 'mortality':
        table_infos = settings.FAUNE_TABLE_INFOS
        table_sheet = settings.TABLE_FAUNA_SHEET
        table_statement = settings.TABLE_FAUNA_STATEMENT
        database_id = settings.DB_FAUNA
    if json_data['input_type'] == 'invertebrate':
        table_infos = settings.INV_TABLE_INFOS
        table_sheet = settings.TABLE_INV_SHEET
        table_statement = settings.TABLE_INV_STATEMENT
        database_id = settings.DB_INV

    d = EasyDict(json_data)

    bad_id = False
    # Check if ID are unique
    count_string = "SELECT count(*) FROM %s WHERE %s='%s'" % (table_sheet, table_infos.get(table_sheet).get('id_col'), d.id)
    cursor = query_db(count_string, database_id)
    row = cursor.fetchone()
    if row:
        datarow = zip([column[0] for column in cursor.description], row)
        val = datarow[0][1]
        if val == 1:
            bad_id = True
            response_content.update({
                'status_code': _("1"),
                'status_message': _("Existing ID in database (%s) (%s)") % (table_sheet, d.id)
            })
    for taxon in d.taxons:
        count_string = "SELECT count(*) FROM %s WHERE %s='%s'" % (table_statement, table_infos.get(table_statement).get('id_col'), taxon.id)
        cursor = query_db(count_string, database_id)
        row = cursor.fetchone()
        if row:
            datarow = zip([column[0] for column in cursor.description], row)
            val = datarow[0][1]
            if val == 1:
                bad_id = True
                response_content.update({
                    'status_code': _("1"),
                    'status_message': _("Existing ID in database (%s) (%s)") % (table_statement, taxon.id)
                })

    if not bad_id:
        try:
            objects = []
            new_feature = {}
            json_to_db = table_infos.get(table_sheet).get('json_to_db_columns')

            # Insert into TABLE_SHEET
            new_feature[table_infos.get(table_sheet).get('id_col')] = d.id
            new_feature['table_name'] = table_sheet
            date_obs = d.dateobs.split(" ")
            new_feature[json_to_db.get('dateobs')] = date_obs[0]
            if json_data['input_type'] == 'invertebrate':
                new_feature[json_to_db.get('heure')] = date_obs[1].split(":")[0]
                new_feature[json_to_db.get('environment')] = d.environment

            new_feature[json_to_db.get('initial_input')] = d.initial_input
            new_feature['supprime'] = 'False'
            new_feature['id_organisme'] = settings.FAUNA_ID_ORGANISM

            if json_data['input_type'] == 'fauna':
                new_feature['id_protocole'] = settings.FAUNA_ID_PROTOCOL
                new_feature['id_lot'] = settings.FAUNA_ID_LOT
            if json_data['input_type'] == 'mortality':
                new_feature['id_protocole'] = settings.MORTALITY_ID_PROTOCOL
                new_feature['id_lot'] = settings.MORTALITY_ID_LOT
            if json_data['input_type'] == 'invertebrate':
                new_feature['id_protocole'] = settings.INV_ID_PROTOCOL
                new_feature['id_lot'] = settings.INV_ID_LOT

            # we need to transform into 2154
            new_feature[json_to_db.get('geometry')] = "st_transform(ST_GeomFromText('POINT(%s %s)', 4326),2154)" % (d.geolocation.longitude, d.geolocation.latitude)
            new_feature[json_to_db.get('accuracy')] = d.geolocation.accuracy
            objects.append(new_feature)
            cursor = sync_db(objects, table_infos, database_id)

            # Insert into TABLE_STATEMENT
            statement_ids = []
            for taxon in d.taxons:
                statement_ids.append(taxon.id)
                objects = []
                new_feature = {}
                json_to_db = table_infos.get(table_statement).get('json_to_db_columns')
                new_feature['table_name'] = table_statement
                new_feature['supprime'] = 'False'
                new_feature[table_infos.get(table_statement).get('id_col')] = taxon.id
                new_feature[table_infos.get(table_sheet).get('id_col')] = d.id
                new_feature[json_to_db.get('id')] = taxon.id_taxon
                new_feature[json_to_db.get('name_entered')] = taxon.name_entered

                if json_data['input_type'] == 'fauna':
                    new_feature[json_to_db.get('adult_male')] = taxon.counting.adult_male
                    new_feature[json_to_db.get('adult_female')] = taxon.counting.adult_female
                    new_feature[json_to_db.get('adult')] = taxon.counting.adult
                    new_feature[json_to_db.get('not_adult')] = taxon.counting.not_adult
                    new_feature[json_to_db.get('young')] = taxon.counting.young
                    new_feature[json_to_db.get('yearling')] = taxon.counting.yearling
                    new_feature[json_to_db.get('sex_age_unspecified')] = taxon.counting.sex_age_unspecified
                    new_feature[json_to_db.get('criterion')] = taxon.observation.criterion
                if json_data['input_type'] == 'mortality':
                    new_feature[json_to_db.get('adult_male')] = taxon.mortality.adult_male
                    new_feature[json_to_db.get('adult_female')] = taxon.mortality.adult_female
                    new_feature[json_to_db.get('adult')] = taxon.mortality.adult
                    new_feature[json_to_db.get('not_adult')] = taxon.mortality.not_adult
                    new_feature[json_to_db.get('young')] = taxon.mortality.young
                    new_feature[json_to_db.get('yearling')] = taxon.mortality.yearling
                    new_feature[json_to_db.get('sex_age_unspecified')] = taxon.mortality.sex_age_unspecified
                    new_feature[json_to_db.get('sample')] = taxon.mortality.sample
                    new_feature[json_to_db.get('criterion')] = taxon.observation.criterion
                if json_data['input_type'] == 'invertebrate':
                    new_feature[json_to_db.get('adult_male')] = taxon.counting.adult_male
                    new_feature[json_to_db.get('adult_female')] = taxon.counting.adult_female
                    new_feature[json_to_db.get('adult')] = taxon.counting.adult
                    new_feature[json_to_db.get('not_adult')] = taxon.counting.not_adult
                    new_feature[json_to_db.get('criterion')] = taxon.observation.criterion

                new_feature[json_to_db.get('comment')] = taxon.comment

                objects.append(new_feature)
                cursor = sync_db(objects, table_infos, database_id)

            # Insert into TABLE_SHEET_ROLE (multiple observers enable)
            for observer in d.observers_id:
                objects = []
                new_feature = {}

                if json_data['input_type'] == 'fauna' or json_data['input_type'] == 'mortality':
                    new_feature['table_name'] = settings.TABLE_FAUNA_SHEET_ROLE
                    new_feature['id_cf'] = d.id
                if json_data['input_type'] == 'invertebrate':
                    new_feature['table_name'] = settings.TABLE_INV_SHEET_ROLE
                    new_feature['id_inv'] = d.id

                new_feature['id_role'] = observer
                objects.append(new_feature)
                sync_db(objects, table_infos, database_id)

            # Commit transaction
            commit_transaction(database_id)

            response_content.update({
                'status_code': _("0"),
                'status_message': "id_sheet: %s, ids_statements: %s" % (d.id, ','.join(map(str, statement_ids)))
            })
        except Exception, e:
            #  Insert rejected JSON into synchro_table (text format)
            id_failed = archive_bad_data(data, json_data)

            response_content.update({
                'status_code': _("1"),
                'status_message': _("Bad json or data (%d)") % id_failed
            })
def import_data_flora(json_data, data):
    """
    Import data for flora
    """

    response_content = {}

    table_infos = settings.FLORA_TABLE_INFOS
    table_apresence = settings.TABLE_FLORA_T_APRESENCE
    table_zprospection = settings.TABLE_FLORA_T_ZPROSPECTION
    database_id = settings.DB_FLORA

    d = EasyDict(json_data)

    bad_id = False
    # Check if ID are unique
    count_string = "SELECT count(*) FROM %s WHERE %s='%s'" % (table_zprospection, table_infos.get(table_zprospection).get('id_col'), d.id)
    cursor = query_db(count_string, database_id)
    row = cursor.fetchone()
    if row:
        datarow = zip([column[0] for column in cursor.description], row)
        val = datarow[0][1]
        if val == 1:
            bad_id = True
            response_content.update({
                'status_code': _("1"),
                'status_message': _("Existing ID in database (%s) (%s)") % (table_zprospection, d.id)
            })
    for taxon in d.taxons:
        for area in taxon.areas:
            count_string = "SELECT count(*) FROM %s WHERE %s='%s'" % (table_apresence, table_infos.get(table_apresence).get('id_col'), area.id)
            cursor = query_db(count_string, database_id)
            row = cursor.fetchone()
            if row:
                datarow = zip([column[0] for column in cursor.description], row)
                val = datarow[0][1]
                if val == 1:
                    bad_id = True
                    response_content.update({
                        'status_code': _("1"),
                        'status_message': _("Existing ID in database (%s) (%s)") % (table_apresence, area.id)
                    })
        # even the json offers a list, there's only one taxa
        break

    if not bad_id:
        try:
            objects = []
            new_feature = {}
            json_to_db = table_infos.get(table_zprospection).get('json_to_db_columns')
            areas_ids = []
            for taxon in d.taxons:
                # Insert into ZPROSPECTION
                new_feature[table_infos.get(table_zprospection).get('id_col')] = d.id
                new_feature['table_name'] = table_zprospection
                date_obs = d.dateobs.split(" ")
                new_feature[json_to_db.get('dateobs')] = date_obs[0]
                new_feature[json_to_db.get('initial_input')] = d.initial_input
                new_feature['supprime'] = 'False'
                new_feature[json_to_db.get('name_entered')] = taxon.name_entered
                new_feature[json_to_db.get('id_taxon')] = taxon.id_taxon
                new_feature['id_organisme'] = settings.FLORA_ID_ORGANISM
                # ajout Gil
                new_feature['id_protocole'] = settings.FLORA_ID_PROTOCOL
                new_feature['id_lot'] = settings.FLORA_ID_LOT

                # we need to transform geometry into 2154
                string_geom = get_geometry_string_from_coords(taxon.prospecting_area.feature.geometry.coordinates, taxon.prospecting_area.feature.geometry.type)
                new_feature[json_to_db.get('geometry')] = string_geom

                objects.append(new_feature)
                cursor = sync_db(objects, table_infos, database_id)

                # Insert into APRESENCE
                for area in taxon.areas:
                    areas_ids.append(area.id)
                    objects = []
                    new_feature = {}
                    json_to_db = table_infos.get(table_apresence).get('json_to_db_columns')
                    new_feature['table_name'] = table_apresence
                    new_feature['supprime'] = 'False'
                    new_feature[table_infos.get(table_apresence).get('id_col')] = area.id
                    new_feature[table_infos.get(table_zprospection).get('id_col')] = d.id
                    new_feature[json_to_db.get('id')] = area.id
                    new_feature[json_to_db.get('phenology')] = area.phenology
                    new_feature[json_to_db.get('computed_area')] = area.computed_area

                    if area.frequency.type == "estimation":
                        new_feature[json_to_db.get('frequenceap')] = area.frequency.value
                        new_feature[json_to_db.get('id_frequence_methodo_new')] = settings.FLORA_FREQUENCY_ESTIMATION
                    if area.frequency.type == "transect":
                        new_feature[json_to_db.get('frequenceap')] = area.frequency.value
                        new_feature[json_to_db.get('nb_transects_frequence')] = area.frequency.transects
                        new_feature[json_to_db.get('nb_points_frequence')] = area.frequency.transect_no  # TODO check
                        new_feature[json_to_db.get('nb_contacts_frequence')] = area.frequency.transect_yes  # TODO check
                        new_feature[json_to_db.get('longueur_pas')] = area.frequency.computed_recommended_step
                        new_feature[json_to_db.get('id_frequence_methodo_new')] = settings.FLORA_FREQUENCY_TRANSECT

                    string_geom = get_geometry_string_from_coords(area.feature.geometry.coordinates, area.feature.geometry.type)
                    new_feature[json_to_db.get('geometry')] = string_geom

                    if area.counting.type == "none":
                        new_feature[json_to_db.get('id_comptage_methodo')] = settings.FLORA_COUNTING_NONE
                    if area.counting.type == "exhaustive":
                        new_feature[json_to_db.get('total_steriles')] = area.counting.total_sterile
                        new_feature[json_to_db.get('total_fertiles')] = area.counting.total_fertile
                        new_feature[json_to_db.get('id_comptage_methodo')] = settings.FLORA_COUTING_EXHAUSTIVE
                    if area.counting.type == "sampling":
                        new_feature[json_to_db.get('total_steriles')] = area.counting.total_sterile
                        new_feature[json_to_db.get('total_fertiles')] = area.counting.total_fertile
                        new_feature[json_to_db.get('nb_placettes_comptage')] = area.counting.plots
                        new_feature[json_to_db.get('surface_placette_comptage')] = area.counting.plot_surface
                        new_feature[json_to_db.get('effectif_placettes_steriles')] = area.counting.sterile
                        new_feature[json_to_db.get('effectif_placettes_fertiles')] = area.counting.fertile
                        new_feature[json_to_db.get('id_comptage_methodo')] = settings.FLORA_COUTING_SAMPLING

                    new_feature[json_to_db.get('comment')] = area.comment

                    objects.append(new_feature)
                    cursor = sync_db(objects, table_infos, database_id)

                    # Physiognomies
                    for physiognomy in area.physiognomy:
                        objects = []
                        new_feature = {}

                        new_feature['table_name'] = settings.TABLE_FLORA_COR_AP_PHYSIONOMIE
                        new_feature['indexap'] = area.id
                        new_feature['id_physionomie'] = physiognomy
                        objects.append(new_feature)
                        sync_db(objects, table_infos, database_id)

                    # Disturbances
                    for disturbance in area.disturbances:
                        objects = []
                        new_feature = {}

                        new_feature['table_name'] = settings.TABLE_FLORA_COR_AP_PERTURB
                        new_feature['indexap'] = area.id
                        new_feature['codeper'] = disturbance
                        objects.append(new_feature)
                        sync_db(objects, table_infos, database_id)

                break  # even the json offers a list, there's only one taxa

            # Insert into TABLE_SHEET_ROLE (multiple observers enable)
            for observer in d.observers_id:
                objects = []
                new_feature = {}

                new_feature['table_name'] = settings.TABLE_FLORA_COR_ZP_OBS
                new_feature['indexzp'] = d.id

                new_feature['codeobs'] = observer
                objects.append(new_feature)
                sync_db(objects, table_infos, database_id)

            # Commit transaction
            commit_transaction(database_id)
            
            # Sync external DB
            cmd = "%s%s" % (settings.SYNC_DB_CMD, d.id)
            os.system(cmd)

            response_content.update({
                'status_code': _("0"),
                'status_message': "id_prospection: %s, ids_areass: %s" % (d.id, ','.join(map(str, areas_ids)))
            })
        except Exception, e:
            ###  Insert rejected JSON into synchro_table (text format)
            id_failed = archive_bad_data(data, json_data)

            response_content.update({
                'status_code': _("1"),
                'status_message': _("Bad json or data (%d)") % id_failed
            })