Example #1
0
    def i_download(self, dd):
        
        """
        Interface function to download archives on the website Theia Land. This function extract 
        the number of downloadable image with :func:`Archive.Archive.listing`.
        
        Then, this function download :func:`Archive.Archive.download` and unzip :func:`Archive.Archive.decompress` images
        in the archive folder (**folder_archive**).
        
        :param dd: Boolean variable to launch download images -> 0 or 1.
    
            - 0 means, not downloading
            - 1 means, launch downloading
        :type dd: int
        """
        
        self.folder_archive = self.captor_project + '_PoleTheia'
        self.check_download = Archive(self.captor_project, self.classif_year, self.path_area, self.path_folder_dpt, self.folder_archive)
        self.check_download.listing()
        self.nb_avalaible_images = len(self.check_download.list_archive)
        # check_download.set_list_archive_to_try(check_download.list_archive[:3])
        if dd == 1:
#             self.check_download.download_auto(self.user, self.password)
            self.check_download.download_auto(self.user, self.password)
            self.check_download.decompress()
Example #2
0
    def __init__(self, fileName):
        self.archive = Archive()

        self.fileName = fileName
        self.file = self.archive.openFile(fileName, 'r')
        self.convFileToBin()

        file = self.archive.openFile(self.fileName + '.binario', 'r')
        self.str = file.readline()
        self.archive.close(file)

        self.getEthernet()
        self.getIP()

        if self.protocol == 'TCP':
            self.getTCP()
        else:
            self.getUDP()
Example #3
0
    def __init__(self, name):
        AssetContainer.__init__(self, name)

        from Archive import Archive
        self.archive = Archive(name)

        from Headers import Headers
        self._headers = Headers(name)

        return
Example #4
0
 def i_glob(self):
     """
     Function to load existing images to launch the processing. 
     It need to archives. Then, to select processing images, select archives 
     
     """
     
     self.folder_archive = self.captor_project + '_PoleTheia'
     self.check_download = Archive(self.captor_project, self.classif_year, self.path_area, self.path_folder_dpt, self.folder_archive)
     self.check_download.decompress()
Example #5
0
def main():
    # Google static maps API key must be stored in api_key.txt
    api_key = open('api_key.txt', 'r').read()

    # Define start positions for the lat and log
    lat_start = -27.458462
    long_start = 153.035735
    # Define the lat and long increments - this depends on the start position
    lat_inc = -0.0007
    long_inc = 0.0009

    # Make a file list to add to an archive and upload to S3
    file_list = []

    # Iterate over a 10x10 grid and capture satellite images
    for i, j in itertools.product(range(10), range(10)):
        latitude = lat_start + (i * lat_inc)
        longitude = long_start + (j * long_inc)

        # Generate the Google maps URL
        map_address = ('https://maps.googleapis.com/maps/api/staticmap?' +
                       'center={},{}'.format(latitude, longitude) +
                       '&zoom=20&size=640x640' +
                       '&maptype=satellite&key={}'.format(api_key))

        # Specify the directory and filename to save the png to
        directory = 'google_image_dump'
        if not os.path.exists(directory):
            os.makedirs(directory)
        img_fname = os.path.join('google_image_dump', '{}{}.png'.format(i, j))

        # Fetch the image at the URL and save it to the filename
        with open(img_fname, 'wb') as f:
            f.write(requests.get(map_address).content)

        file_list.append(img_fname)

    # Build an archive with the image files
    archive = Archive(file_list)
    buffer = archive.streamtargz()
    # Upload the archive to the zepto-archive bucket in S3
    s3bucket = S3Bucket('zepto-archive')
    s3bucket.uploadstream(buffer, archive.name)
Example #6
0
def run_expertise(wind, exp_id, emp_id):

    win = QtGui.QWidget()
    if exp_id == 11:
        win = Calculator(emp_id)
    elif exp_id == 12:
        win = AlcoholExcretion(emp_id)
    elif exp_id == 21:
        win = BodyWeight(emp_id)
    elif exp_id == 31:
        win = BMI(emp_id)
    elif exp_id == 41:
        win = BioAgeKidneys(emp_id)
    elif exp_id == 51:
        win = DipPlane(emp_id)
    elif exp_id == 61:
        win = Archive()
    win.show()
Example #7
0
class Operations:
    def __init__(self, fileName):
        self.archive = Archive()

        self.fileName = fileName
        self.file = self.archive.openFile(fileName, 'r')
        self.convFileToBin()

        file = self.archive.openFile(self.fileName + '.binario', 'r')
        self.str = file.readline()
        self.archive.close(file)

        self.getEthernet()
        self.getIP()

        if self.protocol == 'TCP':
            self.getTCP()
        else:
            self.getUDP()

    def convFileToBin(self):  # converter todo o arquivo para binario
        fileOutput = self.archive.openFile(
            self.fileName + '.binario',
            'w')  # cria um novo arquivo para salvar os binarios

        self.line = self.file.read()  # guardando em self.line todo o arquivo

        for i in range(0, len(self.line)):  # iterando em cada byte do arquivo
            num = 0

            if self.line[i] == ' ' or self.line[
                    i] == '\n':  # quando for ' ' ou '\n' nao gravar
                continue

            if self.line[
                    i] in string.ascii_lowercase:  # se a dada posicao tiver um caracter de 'a' a 'f', entao recupera o respectivo valor em decimal
                num = 10 + string.ascii_lowercase.index(self.line[i])
            else:
                num = int(self.line[i])

            for bi in self.convToBinary(
                    num):  # convertendo os valores para binario
                fileOutput.write(str(bi))  # gravar os binarios no arquivo

        self.archive.close(fileOutput)  #fecha o arquivo
        self.archive.close(self.file)

    def whatsLetter(
        self, letter
    ):  # esta funcao converte uma letra 'a', 'b', ..., 'f' para um numero na base decimal
        num = string.ascii_lowercase.index(letter)

        if num == None:  # se a letra for maiuscula
            num = string.ascii_uppercase.index(
                letter)  #recupera a posicao dela na string de maiusculas

        return 10 + num  # retorna 10 + sua posicao, ou seja, seu valor em hexadecimal

    def convToBinary(
        self, num
    ):  # esta funcao converte um numero decimal para binario e devolve em uma lista: 9 = [1,0,0,1]

        if num == 0:
            return [0, 0, 0, 0]

        bin = []
        while num > 1:
            num /= 2
            if num == int(num):
                bin.append(0)
            else:
                num -= 0.5
                bin.append(1)

        if num == 1:
            bin.append(1)

        binFinal = []
        num = len(bin)
        for i in range(
                0, 4 - num
        ):  #se o numero em binario tiver menos que 4 bits, precisamos inserir os 0's na frente para completar os 4 bits
            binFinal.append(0)

        for i in range(
                0, num
        ):  # como a lista esta invertida, vamos colocar na ordem correta
            binFinal.append(bin[num - i - 1])

        return binFinal

    def convToDecimal(self, num):  #converte um numero binario em decimal
        mult = 2**(len(num) - 1)
        result = 0

        for i in range(0, len(num)):
            if num[i] == '1':
                result += mult

            mult /= 2

        return int(result)

    def getIp(
            self
    ):  # recupera os primeiros 32 bits no self.str para um endereço IP
        ipStr = ''
        for i in range(0, 4):  # iterando em cada octeto
            ipStr += (str(self.convToDecimal(self.str[i * 8:(i + 1) * 8])) +
                      '.')  # convertendo o octeto para decimal e concatenando

        self.str = self.str[
            32:]  # apagando da self.str os bits que já foram usados
        return ipStr[:
                     -1]  # retorna a string inteira menos o ultimo ponto que foi concatenado

    def getNextByte(self, qtd):  # recupera os proximos 'qtd' bytes
        result = ''
        for i in range(0, qtd):
            result += self.str[i * 8:(i + 1) * 8]

        self.str = self.str[qtd * 8:]
        return result

    def getMac(
            self):  # usando os proximos 6 bytes, será retornado o valor do MAC
        hex = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 'a', 'b', 'c', 'd', 'e', 'f']
        result = ''

        for i in range(0, 12):
            bin = self.str[i * 4:(i + 1) * 4]
            dec = self.convToDecimal(bin)
            result += str(hex[dec])

            if i % 2 != 0:
                result += ':'

        self.str = self.str[48:]
        return result[:-1]

    def getEthernet(self):
        print("***************  ETHERNET  *****************")
        #print("Preambulo: ", self.convToDecimal(self.getNextByte(8)))
        print("MAC1: ", self.getMac())
        print("MAC2: ", self.getMac())
        print("EtherType: ", self.convToDecimal(self.getNextByte(2)))
        print()

    def getIP(self):
        print("***************  IP  *****************")
        print("Version: ", self.convToDecimal(self.str[:4]))
        print("IHL: ", self.convToDecimal(self.str[4:8]))
        print("TOS: ", self.convToDecimal(self.str[8:16]))
        print("TOTAL LENGTH: ", self.convToDecimal(self.str[16:32]))
        self.str = self.str[32:]
        print("IDENTIFICATION: ", self.convToDecimal(self.str[:16]))
        print("FLAGS: ", self.str[17:19])
        print("FRAGMENT OFFSET: ", self.convToDecimal(self.str[19:32]))
        self.str = self.str[32:]
        print("TTL: ", self.convToDecimal(self.str[:8]))

        self.protocol = self.convToDecimal(
            self.str[8:16]
        )  # este campo determinara se o proximo sera o TCP ou o UDP
        if self.protocol == 6:
            self.protocol = 'TCP'
        elif self.protocol == 17:
            self.protocol = 'UDP'

        print("PROTOCOL: ", self.protocol)
        print("CHECKSUM: ", self.convToDecimal(self.str[16:32]))
        self.str = self.str[32:]
        print("IP SOURCE: ", self.getIp())
        print("IP DESTINATION: ", self.getIp())
        print()

    def getTCP(self):
        print("***************  TCP  *****************")
        print("Source Port: ", self.convToDecimal(self.str[:16]))
        print("Destination Port: ", self.convToDecimal(self.str[16:32]))
        self.str = self.str[32:]
        print("Sequence Number: ", self.convToDecimal(self.str[:32]))
        self.str = self.str[32:]
        print("Acknowledgement Number: ", self.convToDecimal(self.str[:32]))
        self.str = self.str[32:]
        print("Header Length: ", self.convToDecimal(self.str[:4]))
        print("Reserved: ", self.convToDecimal(self.str[4:10]))
        print("Bits: ", self.str[10:16])
        print("Window Size: ", self.convToDecimal(self.str[16:32]))
        self.str = self.str[32:]
        print("Checksum: ", self.convToDecimal(self.str[0:16]))
        print("Urgent Pointer: ", self.convToDecimal(self.str[16:32]))

    def getUDP(self):
        print("***************  UDP  *****************")
        print("Source Port: ", self.convToDecimal(self.str[:16]))
        print("Destination Port: ", self.convToDecimal(self.str[16:32]))
        self.str = self.str[32:]
        print("Length: ", self.convToDecimal(self.str[:16]))
        print("Checksum: ", self.convToDecimal(self.str[16:32]))
Example #8
0
def archive(name):
    from Archive import Archive
    return Archive(name)
Example #9
0
obs_space = env.get_obs_space()

n_train = 1000
n_test = 10
pop_size = 100
curr_gen = 0

print("Act space:", act_space)
print("Obs space:", obs_space)

# optimization stuff
nn = FFIndiv(obs_space, act_space, hidden_size=16)
optimizer = ES.CMAES(nn.get_params().shape[0], pop_size=pop_size)

# archives
sample_archive = Archive(max_size=n_train * pop_size)
thetas_archive = Archive(max_size=n_train)

# sampler
sampler = Samplers.BasicSampler(sample_archive, thetas_archive)

# training
for i in range(n_train):

    batch = optimizer.ask(pop_size)
    scores = np.zeros(pop_size)
    # newly drawn samples
    for j in range(pop_size):

        nn.set_params(batch[j])
        score = env.eval(nn, render=False)
Example #10
0
class Processing():
    
    """
    Main processing. This class launch the others system classes. It take into account
    CarHab classification method MOBA. 
    
    This way is broken down into 3 parts :
        - Image Processing (Search, download and processing)
        - Vector Processing (Optimal threshold, Sample processing)
        - Classification 
        - Validation 
    
    **Main parameters**
    
    :param captor_project: Satellite captor name
    :type captor_project: str
    :param classif_year: Classification year
    :type classif_year: str
    :param nb_avalaible_images: Number download available images
    :type nb_avalaible_images: int
    :param path_folder_dpt: Main folder path
    :type path_folder_dpt: str
    :param folder_archive: Archive downloaded folder path
    :type folder_archive: str
    :param folder_processing: Processing folder name. By default : 'Traitement'
    :type folder_processing: str
    :param path_area: Study area shapefile
    :type path_area: str
    :param path_ortho: VHRS image path
    :type path_ortho: str
    :param path_mnt: MNT image path
    :type path_mnt: str
    :param path_segm: Segmentation shapefile
    :type path_segm: str
    
    **Id information to download on theia platform**
    
    :param user: Connexion Username
    :type user: str
    :param password: Connexion Password
    :type password: str
    
    **Output parameters**
    
    :param output_name_moba: Output classification shapefile 
    :type output_name_moba: str
    :param out_fieldname_carto: Output shapefile field name
    :type out_fieldname_carto: list of str
    :param out_fieldtype_carto: Output shapefile field type
    :type out_fieldtype_carto: list of str (eval ogr pointer)
    
    **Sample parameters**
    
    :param fieldname_args: Sample field names 2 by 2
    :type fieldname_args: list of str
    :param class_args: Sample class names 2 by 2
    :type class_args: list of str
    :param sample_name: List of sample name (path)
    :type sample_name: list of str
    :param list_nb_sample: Number of polygons for every sample
    :type list_nb_sample: list of int
    
    **Multi-processing parameters**
    
    :param mp: Boolean variable -> 0 or 1.
    
            - 0 means, not multi-processing
            - 1 means, launch process with multi-processing
    :type mp: int
    """
    
    def __init__(self):
        
        # Used variables
        self.captor_project = ''
        self.classif_year = ''
        self.path_folder_dpt = ''
        self.folder_archive = ''
        self.folder_processing = 'Traitement'
        self.path_area = ''
        self.path_ortho = ''
        self.path_mnt = ''
        self.path_segm = ''
        self.output_name_moba = ''
        
        # Id information to download on theia platform
        self.user = ''
        self.password = ''

        # List of output raster path
        self.raster_path = []
        self.list_band_outraster = []
        
        # Class name
        
        # TODO : Change index of the classes -> Harbacées 6 / Ligneux 7 by Agriculuture 4 / Eboulis 5
        
        self.in_class_name = ['Non Vegetation semi-naturelle', 'Vegetation semi-naturelle',\
                         'Herbacees', 'Ligneux', \
                         'Ligneux mixtes', 'Ligneux denses',\
                         'Agriculture', 'Eboulis', \
                         'Forte phytomasse', 'Moyenne phytomasse', 'Faible phytomasse']
        # Sample field names 2 by 2
        self.fieldname_args = []
#                                'CODE_GROUP', 'CODE_GROUP',\
#                           'echant', 'echant',\
#                           'echant', 'echant']
        # Sample class names 2 by 2
        self.class_args = []
#                            '1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 14, 15, 20, 21, 22, 23, 24, 26, 28', '18',\
#                       'H','LF, LO',\
#                       'LF', 'LO']
        
        # Decision tree combination
        self.tree_direction = [[0],\
                          [0],\
                          [1, 3, 4],\
                          [1, 3, 5],\
                          [1, 2, 8],\
                          [1, 2, 9],\
                          [1, 2, 10]] # [['Cultures'],['Semi-naturelles', 'Herbacees', 'Forte phytomasse'], ...
                                    # ..., ['Semi-naturelles', 'Ligneux', 'Ligneux denses']]
        # Slope degrees
        self.slope_degree = 30
        
        # Output shapefile field name
        self.out_fieldname_carto = ['ID', 'AREA'] #, 'NIVEAU_1', 'NIVEAU_2', 'NIVEAU_3', 'POURC']
        # Output shapefile field type
        self.out_fieldtype_carto = [ogr.OFTString, ogr.OFTReal] #, ogr.OFTString, ogr.OFTString, ogr.OFTString, ogr.OFTReal]
        
        # List of sample name path
        self.sample_name = []
        # List of number sample
        self.list_nb_sample = []
        # Number download available images
        self.nb_avalaible_images = 0
        # Multi-processing variable
        self.mp = 1
        
        # Function followed
        self.check_download = ''
        self.decis = {}
        # Images after processing images
        self.out_ndvistats_folder_tab = defaultdict(list)
        
        # Validation shapefiles information
        self.valid_shp = []
        
    def i_tree_direction(self):
        
        """
        Interface function to can extract one level or two levels of the final classification 
        """

        if len(self.out_fieldname_carto) == 3:
            self.tree_direction = [[0], [1]]
            
        if len(self.out_fieldname_carto) == 4:
            self.tree_direction = [[0], [0], [1, 2], [1, 3]]
        
    def i_download(self, dd):
        
        """
        Interface function to download archives on the website Theia Land. This function extract 
        the number of downloadable image with :func:`Archive.Archive.listing`.
        
        Then, this function download :func:`Archive.Archive.download` and unzip :func:`Archive.Archive.decompress` images
        in the archive folder (**folder_archive**).
        
        :param dd: Boolean variable to launch download images -> 0 or 1.
    
            - 0 means, not downloading
            - 1 means, launch downloading
        :type dd: int
        """
        
        self.folder_archive = self.captor_project + '_PoleTheia'
        self.check_download = Archive(self.captor_project, self.classif_year, self.path_area, self.path_folder_dpt, self.folder_archive)
        self.check_download.listing()
        self.nb_avalaible_images = len(self.check_download.list_archive)
        # check_download.set_list_archive_to_try(check_download.list_archive[:3])
        if dd == 1:
#             self.check_download.download_auto(self.user, self.password)
            self.check_download.download_auto(self.user, self.password)
            self.check_download.decompress()
    
    def i_glob(self):
        """
        Function to load existing images to launch the processing. 
        It need to archives. Then, to select processing images, select archives 
        
        """
        
        self.folder_archive = self.captor_project + '_PoleTheia'
        self.check_download = Archive(self.captor_project, self.classif_year, self.path_area, self.path_folder_dpt, self.folder_archive)
        self.check_download.decompress()
     
    def i_img_sat(self):
        
        """
        Interface function to processing satellite images:
        
            1. Clip archive images and modify Archive class to integrate clip image path.
            With :func:`Toolbox.clip_raster` in ``Toolbox`` module.
        
            2. Search cloud's percentage :func:`RasterSat_by_date.RasterSat_by_date.pourc_cloud`, select 
            image and compute ndvi index :func:`RasterSat_by_date.RasterSat_by_date.calcul_ndvi`. If cloud's percentage is 
            greater than 40%, then not select and compute ndvi index.
        
            3. Compute temporal stats on ndvi index [min, max, std, min-max]. With :func:`Toolbox.calc_serie_stats` 
            in ``Toolbox`` module.
            
            4. Create stats ndvi raster and stats cloud raster.
            
            >>> import RasterSat_by_date
            >>> stats_test = RasterSat_by_date(class_archive, big_folder, one_date)
            >>> stats_test.complete_raster(stats_test.create_raster(in_raster, stats_data, in_ds), stats_data)
        """

        # Clip archive images and modify Archive class to integrate clip image path
        for clip in self.check_download.list_img:
            clip_index = self.check_download.list_img.index(clip)
            self.check_download.list_img[clip_index][3] = clip_raster(clip[3], self.path_area) # Multispectral images
            self.check_download.list_img[clip_index][4] = clip_raster(clip[4], self.path_area) # Cloud images
           
        # Images pre-processing
        spectral_out = []
        for date in self.check_download.single_date:
               
            check_L8 = RasterSat_by_date(self.check_download, self.folder_processing, date)
            check_L8.mosaic_by_date()
             
            # Search cloud's percentage, select image and compute ndvi index if > cl
            cl = check_L8.pourc_cloud(check_L8._one_date[3], check_L8._one_date[4])
            if cl > 0.60:
                check_L8.calcul_ndvi(check_L8._one_date[3])
                spectral_out.append(check_L8._one_date)
        
        # Compute temporal stats on ndvi index [min, max, std, min-max]
        spectral_trans = np.transpose(np.array(spectral_out, dtype=object))
        stats_name = ['Min', 'Max', 'Std', 'MaxMin']
        stats_ndvi, stats_cloud = calc_serie_stats(spectral_trans)

        # Create stats ndvi raster and stats cloud raster
        stats_L8 = RasterSat_by_date(self.check_download, self.folder_processing, [int(self.classif_year)])
        # Stats cloud raster
        out_cloud_folder = stats_L8._class_archive._folder + '/' + stats_L8._big_folder + '/' + self.classif_year + \
                           '/Cloud_number_' + self.classif_year + '.TIF'
        stats_L8.complete_raster(stats_L8.create_raster(out_cloud_folder, stats_cloud, \
                                                         stats_L8.raster_data(self.check_download.list_img[0][4])[1]), \
                                 stats_cloud)
        
        # Stats ndvi rasters        
        for stats_index in range(len(stats_ndvi)):
            out_ndvistats_folder = stats_L8._class_archive._folder + '/' + stats_L8._big_folder + '/' + self.classif_year + \
                           '/' + stats_name[stats_index] + '_' + self.classif_year + '.TIF'
            self.out_ndvistats_folder_tab[stats_index] = out_ndvistats_folder
            stats_L8.complete_raster(stats_L8.create_raster(out_ndvistats_folder, stats_ndvi[stats_index], \
                                                            stats_L8.raster_data(self.check_download.list_img[0][4])[1]), \
                                     stats_ndvi[stats_index])
        
    def i_slope(self):
        """
        Interface function to processing slope raster. From a MNT, and with a command line gdal,
        this function compute slope in degrees :func:`Slope.Slope`.
 
        """
        
        path_mnt = clip_raster(self.path_mnt, self.path_area)
        study_slope = Slope(path_mnt)
        study_slope.extract_slope()# Call this function to compute slope raster
        self.path_mnt = study_slope.out_mnt
    
    def i_vhrs(self):#, vs):  
        """
        Interface function to processing VHRS images. It create two OTB texture images :func:`Vhrs.Vhrs` : SFS Texture and Haralick Texture
        """
  
        # Create texture image
        # Clip orthography image 
        path_ortho = clip_raster(self.path_ortho, self.path_area)
        texture_irc = Vhrs(path_ortho, self.mp)
        self.out_ndvistats_folder_tab['sfs'] = texture_irc.out_sfs
        self.out_ndvistats_folder_tab['haralick'] = texture_irc.out_haralick
        
    def i_images_processing(self, vs): 
        
        """
        Interface function to launch processing VHRS images :func:`i_vhrs` and satellite images :func:`i_img_sat` in multi-processing.
        
        :param vs: Boolean variable to launch processing because of interface checkbox -> 0 or 1.
    
            - 0 means, not texture processing
            - 1 means, launch texture processing
        :type vs: int
        """
        
        # Multiprocessing
        mgr = BaseManager()
        mgr.register('defaultdict', defaultdict, DictProxy)
        mgr.start()
        self.out_ndvistats_folder_tab = mgr.defaultdict(list)
        
        p_img_sat = Process(target=self.i_img_sat)
        p_img_sat.start()
        if self.mp == 0:
            p_img_sat.join()
        
        if vs == 1:
            p_vhrs = Process(target=self.i_vhrs)#, args=(vs, ))
            p_vhrs.start()
            p_vhrs.join()
        
        if self.mp == 1:
            p_img_sat.join()
        
        # List of output raster path
        self.raster_path.append(self.out_ndvistats_folder_tab[0])
        # List of output raster band
        self.list_band_outraster.append(1)
        
        if vs == 1:
            self.raster_path.append(self.out_ndvistats_folder_tab['sfs'])
            self.list_band_outraster.append(4)
            self.raster_path.append(self.out_ndvistats_folder_tab['haralick'])
            self.list_band_outraster.append(2)
        
        # To slope, to extract scree
        if self.path_mnt != '':
            self.raster_path.append(self.path_mnt)
            self.list_band_outraster.append(1)
            
        self.raster_path.append(self.out_ndvistats_folder_tab[1])
        # example raster path tab :
        #                [path_folder_dpt + '/' + folder_processing + '/' + classif_year + '/Min_2014.TIF',\
        #                os.path.dirname(path_ortho) + '/Clip_buffer_surface_dep_18_IRCOrtho65_2m_sfs.TIF',\
        #                os.path.dirname(path_ortho) + '/Clip_buffer_surface_dep_18_IRCOrtho65_2m_haralick.TIF',\
        #                path_folder_dpt + '/' + folder_processing + '/' + classif_year + '/Max_2014.TIF']
        
        # List of output raster band
        self.list_band_outraster.append(1) #[1, 4, 2, 1]
        
        print("End of images processing !")


    def i_rpg(self, path_rpg): 
        """
        Interface function to extract mono rpg crops.
        
        :param path_rpg: Input RPG shapefile.
        :type path_rpg: str
        
        :returns: str -- variable **Rpg.vector_used**, output no duplicated crops shapefile (path).
        """
               
        # Extract mono rpg crops
        mono_sample = Rpg(path_rpg, self.path_area)
        mono_sample.mono_rpg()
        mono_sample.create_new_rpg_files()
        print('End of RPG processing')
        
        return mono_sample.vector_used
         
        
    def i_sample(self):
        """
        Interface function to compute threshold with various sample. It also extract a validation layer (shapefile) to compute
        the precision of the next classification :func:`i_validate`. 
        
        It create samples 2 by 2 with kwargs field names and class :func:`Sample.Sample.create_sample`. 
        Then, it compute zonal statistics by polygons :func:`Vector.Sample.zonal_stats`.
        
        With zonal statistics computed, a optimal threshold is determined :func:`Seath.Seath.separability_and_threshold` that
        will print in a text file .lg in the main folder.
        
        .. warning:: :func:`Seath.Seath.separability_and_threshold` does not always allow to discriminate optimal threshold. 
                    Then, this function will be launch at least ten time until it reaches a optimal threshold.
        """
        
        # Compute threshold with various sample
        i_s = 0
        while i_s < 10:
            try :
                self.valid_shp = []
                sample_rd = {}
                for sple in range(len(self.sample_name) * 2):
                    kwargs = {}
                    kwargs['fieldname'] = self.fieldname_args[sple]
                    kwargs['class'] = self.class_args[sple]
                    sample_rd[sple] = Sample(self.sample_name[sple/2], self.path_area, self.list_nb_sample[sple/2])
                    sample_rd[sple].create_sample(**kwargs)
                    sample_rd[sple].zonal_stats((self.raster_path[sple/2], self.list_band_outraster[sple/2]))
                    
                    # Add the validation shapefile
                    self.valid_shp.append([sample_rd[sple].vector_val, kwargs['fieldname'], kwargs['class']])

                # Search the optimal threshold by class 
                # Open a text file to print stats of Seath method
                file_J = self.path_folder_dpt + '/log_J.lg'
                f = open(file_J, "wb")
                for th_seath in range(len(self.sample_name)):
                    self.decis[th_seath] = Seath()
                    self.decis[th_seath].value_1 = sample_rd[th_seath*2].stats_dict
                    self.decis[th_seath].value_2 = sample_rd[th_seath*2 + 1].stats_dict
                    self.decis[th_seath].separability_and_threshold()
                    
                    # Print the J value in the text file .lg
                    f.write('For ' + str(self.sample_name[th_seath]) + ' :\n')
                    f.write('J = ' + str(self.decis[th_seath].J[0]) +'\n')
                    f.write('The class 1 ' + str(self.decis[th_seath].threshold[0]) +'\n')
                    
                f.close()    
                i_s = 20
            except:
                i_s = i_s + 1
        # Method to stop the processus if there is not found a valid threshold
        if i_s != 20:
            print 'Problem in the sample processing !!!'
            sys.exit(1)
    
    def i_sample_rf(self):
        """
        
        """
        
        sample_rd = {}
        for sple in range(len(self.sample_name) * 2):
            kwargs = {}
            kwargs['fieldname'] = self.fieldname_args[sple]
            kwargs['class'] = self.class_args[sple]
            sample_rd[sple] = Sample(self.sample_name[sple/2], self.path_area, self.list_nb_sample[sple/2])
            sample_rd[sple].create_sample(**kwargs)
            sample_rd[sple].zonal_stats((self.raster_path[sple/2], self.list_band_outraster[sple/2]))

    def i_classifier(self): 
        """
        Interface function to launch decision tree classification with a input segmentation :func:`Segmentation.Segmentation`.
        
        This function store optimal threshold by class **Segmentation.out_threshold**. Then compute zonal statistics by polygons
        for every images in multi-processing (if **mp** = 1).
        """ 
        
        # Multiprocessing
        mgr = BaseManager()
        mgr.register('defaultdict', defaultdict, DictProxy)
        mgr.start()
        multi_process_var = [] # Multi processing variable
          
        # Extract final cartography
        out_carto = Segmentation(self.path_segm, self.path_area) 
        out_carto.output_file = self.output_name_moba
        out_carto.out_class_name = self.in_class_name
        out_carto.out_threshold = []
        for ind_th in range(len(self.sample_name)):
            out_carto.out_threshold.append(self.decis[ind_th].threshold[0])
            if '>' in self.decis[ind_th].threshold[0]:
                out_carto.out_threshold.append(self.decis[ind_th].threshold[0].replace('>', '<='))
            elif '<' in self.decis[ind_th].threshold[0]:
                out_carto.out_threshold.append(self.decis[ind_th].threshold[0].replace('<', '>='))
        #     out_carto.zonal_stats((raster_path[ind_th], list_band_outraster[ind_th]))
            multi_process_var.append([self.raster_path[ind_th], self.list_band_outraster[ind_th]])
         
        # Compute zonal stats on slope raster
        multi_process_var.append([self.raster_path[ind_th+1], self.list_band_outraster[ind_th+1]])
        out_carto.out_threshold.append('<'+str(self.slope_degree)) # To agriculture
        out_carto.out_threshold.append('>='+str(self.slope_degree)) # To scree
        if self.path_mnt != '':
            # Add class indexes
            self.tree_direction[0].append(6)
            self.tree_direction[0].append(7)
            
        # Compute zonal stats on Max NDVI raster  
        try:  
            # out_carto.zonal_stats((raster_path[ind_th+1], list_band_outraster[ind_th+1]))
            multi_process_var.append([self.raster_path[ind_th+2], self.list_band_outraster[ind_th+2]])
            # Compute stats twice, because there is 3 classes and not 2
            # out_carto.zonal_stats((raster_path[ind_th+1], list_band_outraster[ind_th+1]))
            multi_process_var.append([self.raster_path[ind_th+2], self.list_band_outraster[ind_th+2]])
        except:
            print('Not MNT on the 3rd step')
            multi_process_var.append([self.raster_path[ind_th+1], self.list_band_outraster[ind_th+1]])
            multi_process_var.append([self.raster_path[ind_th+1], self.list_band_outraster[ind_th+1]])

        # Compute zonal stats with multi processing
        out_carto.stats_dict = mgr.defaultdict(list)
        p = []
        kwargs = {}
        for i in range(len(multi_process_var)):
            kwargs['rank'] = i
            kwargs['nb_img'] = len(multi_process_var)
            p.append(Process(target=out_carto.zonal_stats, args=(multi_process_var[i], ), kwargs=kwargs))
            p[i].start()
            
            if self.mp == 0:
                p[i].join()
        
        if self.mp == 1:       
            for i in range(len(multi_process_var)):
                p[i].join()

        # If there is more one fieldnames line edit fulled in classification tab
        if len(self.sample_name) > 2:
            # Compute the biomass and density distribution
            out_carto.compute_biomass_density()
            
        out_carto.class_tab_final = defaultdict(list)
        self.i_tree_direction()
        out_carto.decision_tree(self.tree_direction)
        
        # If there is more one fieldnames line edit fulled in classification tab
        if len(self.sample_name) > 2:     
            # Compute biomass and density scale
            out_carto.append_scale(self.in_class_name[2], 'self.stats_dict[ind_stats][3]/self.max_bio')
            out_carto.append_scale(self.in_class_name[3], 'self.stats_dict[ind_stats][2]/self.max_wood_idm')
          
        # Final cartography
        out_carto.create_cartography(self.out_fieldname_carto, self.out_fieldtype_carto)
       
    def i_validate(self):
        """
        Interface to validate a classification. It going to rasterize the validation shapefile and the 
        classification shapefile with :func:`layer_rasterization`. Next, to compare pixel by pixel, the classification
        quality to built a confusion matrix in a csv file.
        
        """
        # Variable to convert the input classname to an individual interger
        # Only for the validate sample
        class_validate = 0
        complete_validate_shp = os.path.dirname(self.valid_shp[0][0]) + '/validate.shp'
        
        # TODO: Set this method in the Precision_moba class
        
        # Processing to rasterize the validate shapefile. 1) Merge sahpefiles 2) Rasterization
        for val in self.valid_shp:
            if class_validate != 2: 
                # Grassland to 1
                if (class_validate !=3 and len(self.out_fieldname_carto) != 4+2) or len(self.out_fieldname_carto) == 4+2:
                    # To the level 3 with woodeen to 4 and 5
                    #
                    # Self.valid_shp is a list of list. In this variable there is :
                    # [Shapefile path, fieldname classes, classnames]
                    opt = {}
                    opt['Remove'] = 1 # To overwrite 
        
                    # Create a raster to valide the classification
                    # First time, create a new shapefile with a new field integer
                    sample_val = Sample(val[0], self.path_area, 1, **opt)
                    opt['add_fieldname'] = 1 
                    opt['fieldname'] = 'CLASS_CODE'
                    opt['class'] = str(class_validate) # Add integer classes
                    # Set the new shapefile
                    val[0] = val[0][:-4] + '_.shp'
                    val[1] = opt['fieldname']
                    val[2] = opt['class']
                    # Complete the new shapefile
                    sample_val.fill_sample(val[0], 0, **opt)
                    # Second time, merge the validate shapefile
                    if class_validate == 0:
                        process_tocall_merge =  ['ogr2ogr', '-overwrite', complete_validate_shp, val[0]]
                    elif class_validate > 0:
                        process_tocall_merge =  ['ogr2ogr', '-update', '-append', complete_validate_shp, \
                                                 val[0], '-nln', os.path.basename(complete_validate_shp[:-4])]
                    subprocess.call(process_tocall_merge)
            # Increrment variable
            class_validate = self.valid_shp.index(val) + 1
        
        # Compute precision of the classification
        valid = Precision_moba(self.path_area, self.path_folder_dpt)     
        valid.complete_validation_shp = complete_validate_shp
        valid.ex_raster = self.raster_path[0]
        
        # TODO: Call the RasterSat_by_Date class here instead of the Precision_moba class
        
        valid.preprocess_to_raster_precision(self.output_name_moba, 'FBPHY_SUB') # To the classification's data
        valid.preprocess_to_raster_precision(complete_validate_shp, val[1]) # To the validation's data
        
        # Compute precision on the output classification
        valid.confus_matrix(valid.complete_img[0].raster_data(valid.img_pr[0])[0], \
                            valid.complete_img[1].raster_data(valid.img_pr[1])[0])
Example #11
0
# Should make base class from which all modules inherit

#
# ###GENOME HELPER TEST
# from GenomeHelper import GenomeHelper
# # print GenomeHelper.readGenomeFromFasta('./TestData/testSeqs.fasta')
# seq, q = GenomeHelper.readGenomeFromFastq('./TestData/testSeq.fastq')
# print seq, q
#
#
# ##GET ARCHIVE TEST
from Archive import Archive
# from ComparativeGenomics import GetGentree
result = Archive.GetArchive().getArchiveById(id='ENSG00000157764')
print result.json()

rs = Archive.GetArchive().getArchiveById('ENSG00000157764', type='xml')

print rs.text

##POST ARCHIVE TEST
# result = Archive.GetArchive().postArchiveById(type='json', data = ["ENSG00000157764", "ENSG00000248378"])
# result2 = Archive.GetArchive().postArchiveById(type='xml', data = ["ENSG00000157764", "ENSG00000248378"])
# print result.json()
# print result2.text
#
# ###GET GENTREE TEST
# from ComparativeGenomics import GetGeneTree
# tstId = 'ENSGT00390000003602'
# r = GetGeneTree.GetGenTree().getGeneTreeById(tstId, type='json')
# rz = GetGeneTree.GetGeneTree().getGeneTreeById(tstId, type='xml')
Example #12
0
    curr_gen = 0
    n_ind = 0

    env = Env("CartPole-v0")
    act_space = env.get_action_space()
    obs_space = env.get_obs_space()

    # optimization stuff
    nn = FFIndiv(obs_space, act_space, hidden_size=2)
    best_params = nn.get_params()
    best_score = env.eval(nn, render=False)
    optimizer = ES.CMAES(nn.get_params().shape[0], pop_size=pop_size)

    # archives
    sample_archive = Archive(max_size=n_train * pop_size)
    thetas_archive = Archive(max_size=n_train)
    mu, cov = optimizer.get_distrib_params()
    thetas_archive.add_sample(Theta(mu, cov, []))

    # sampler
    sampler = Samplers.ClosestSampler(sample_archive,
                                      thetas_archive,
                                      accept_ratio=accept_ratio)

    print("Problem dimension:", mu.shape[0])
    df = pd.DataFrame(columns=[
        "n_reused", "best_score", "average_score", "sample_time",
        "evaluation_time"
    ])