Esempio n. 1
0
    def file_properties(self):
        share_name = self._create_share()
        directory_name = self._create_directory(share_name)
        file_name = self._get_file_reference()

        metadata = {'val1': 'foo', 'val2': 'blah'}
        self.service.create_file(share_name, directory_name, file_name, 512, metadata=metadata)

        settings = ContentSettings(content_type='html', content_language='fr')

        # Basic
        self.service.set_file_properties(share_name, directory_name, file_name, content_settings=settings)
        file = self.service.get_file_properties(share_name, directory_name, file_name)
        content_language = file.properties.content_settings.content_language  # fr
        content_type = file.properties.content_settings.content_type  # html
        content_length = file.properties.content_length  # 512

        # Metadata
        # Can't set metadata, but get will return metadata already on the file
        file = self.service.get_file_properties(share_name, directory_name, file_name)
        metadata = file.metadata  # metadata={'val1': 'foo', 'val2': 'blah'}

        # Replaces values, does not merge
        settings = ContentSettings(content_encoding='utf-8')
        self.service.set_file_properties(share_name, directory_name, file_name, content_settings=settings)
        file = self.service.get_file_properties(share_name, directory_name, file_name)
        content_encoding = file.properties.content_settings.content_encoding  # utf-8
        content_language = file.properties.content_settings.content_language  # None

        self.service.delete_share(share_name)
Esempio n. 2
0
 def create(self, file_name, img_file):
     block_blob_service.create_container(MY_CONTAINER)
     block_blob_service.create_blob_from_bytes(
         MY_CONTAINER,
         file_name,
         img_file.read(),
         content_settings=ContentSettings(content_type='image/png'))
Esempio n. 3
0
    def create_file(self):
        share_name = self._create_share()
        directory_name = self._create_directory(share_name)

        # Basic
        file_name1 = self._get_file_reference()
        self.service.create_file(share_name, directory_name, file_name1, 512)

        # Properties
        settings = ContentSettings(content_type='html', content_language='fr')
        file_name2 = self._get_file_reference()
        self.service.create_file(share_name,
                                 directory_name,
                                 file_name2,
                                 512,
                                 content_settings=settings)

        # Metadata
        metadata = {'val1': 'foo', 'val2': 'blah'}
        file_name2 = self._get_file_reference()
        self.service.create_file(share_name,
                                 directory_name,
                                 file_name2,
                                 512,
                                 metadata=metadata)

        self.service.delete_share(share_name)
Esempio n. 4
0
def send_to_azure(fileService, fileName, filePath):
    fileService.create_file_from_path(
        'security',
        'securityPhotos',  # We want to create this blob in the 'securityPhotos' directory
        fileName,
        filePath,
        content_settings=ContentSettings(content_type='image/jpg'))
    print("File sent to Azure!")
Esempio n. 5
0
 def update(self, old_file_name, new_file_name, img_file):
     block_blob_service.create_blob_from_bytes(
         MY_CONTAINER,
         new_file_name,
         img_file.read(),
         content_settings=ContentSettings(content_type='image/png'))
     if old_file_name != new_file_name:
         self.delete(old_file_name)
def saveFile(fileName, fileText, dirName):
    fileService.create_file_from_text(filesDir,
                                      dirName,
                                      fileName,
                                      fileText,
                                      content_settings=ContentSettings())


#print(getFiles('test'))
def upload_image(local_filename, storage_filename):
    file_service.create_file_from_path(
        'images',
        'original',
        storage_filename,
        local_filename,
        content_settings=ContentSettings(content_type='image/png')
    )

    image = {'name': storage_filename, 'likes': 0}
    db.images_info.insert_one(image)
Esempio n. 8
0
def UploadFile(target_directory,
               file_name,
               source_path,
               content_type_str='application/json'):
    #Default file type is json.  for Pickle files, use application/octet-stream
    _content_settings = ContentSettings(content_type=content_type_str)

    file_service.create_file_from_path(fileshare,
                                       target_directory,
                                       file_name,
                                       source_path,
                                       content_settings=_content_settings)
Esempio n. 9
0
    def file_with_bytes(self):
        share_name = self._create_share()
        directory_name = self._create_directory(share_name)

        # Basic
        data = self._get_random_bytes(15)
        file_name = self._get_file_reference()
        self.service.create_file_from_bytes(share_name, directory_name, file_name, data)
        file = self.service.get_file_to_bytes(share_name, directory_name, file_name)
        content = file.content  # data

        # Download range
        file = self.service.get_file_to_bytes(share_name, directory_name, file_name,
                                              start_range=3, end_range=10)
        content = file.content  # data from 3-10

        # Upload from index in byte array
        file_name = self._get_file_reference()
        self.service.create_file_from_bytes(share_name, directory_name, file_name, data, index=3)

        # Content settings, metadata
        settings = ContentSettings(content_type='html', content_language='fr')
        metadata = {'val1': 'foo', 'val2': 'blah'}
        file_name = self._get_file_reference()
        self.service.create_file_from_bytes(share_name, directory_name, file_name, data,
                                            content_settings=settings,
                                            metadata=metadata)
        file = self.service.get_file_to_bytes(share_name, directory_name, file_name)
        metadata = file.metadata  # metadata={'val1': 'foo', 'val2': 'blah'}
        content_language = file.properties.content_settings.content_language  # fr
        content_type = file.properties.content_settings.content_type  # html

        # Progress
        # Use slightly larger data so the chunking is more visible
        data = self._get_random_bytes(8 * 1024 * 1024)

        def upload_callback(current, total):
            print('({}, {})'.format(current, total))

        def download_callback(current, total):
            print('({}, {}) '.format(current, total))

        file_name = self._get_file_reference()

        print('upload: ')
        self.service.create_file_from_bytes(share_name, directory_name, file_name, data,
                                            progress_callback=upload_callback)

        print('download: ')
        file = self.service.get_file_to_bytes(share_name, directory_name, file_name,
                                              progress_callback=download_callback)

        self.service.delete_share(share_name)
Esempio n. 10
0
def upload_to_azure(url, filename):
    print(f'Uploading {filename}')

    with urlopen(url + filename) as fp:
        byte = fp.read()

    file_service.create_file_from_bytes(
        'covid-share',
        'data',
        filename,
        byte,
        content_settings=ContentSettings(content_type='image/jpeg'))
Esempio n. 11
0
    def test_set_file_properties(self):
        # Arrange
        file_client = self._create_file()

        # Act
        content_settings = ContentSettings(
            content_language='spanish',
            content_disposition='inline')
        resp = file_client.set_http_headers(content_settings=content_settings)

        # Assert
        properties = file_client.get_file_properties()
        self.assertEqual(properties.content_settings.content_language, content_settings.content_language)
        self.assertEqual(properties.content_settings.content_disposition, content_settings.content_disposition)
Esempio n. 12
0
def num3():
    file = request.form['filename']
    file += ".jpg"
    file_service = FileService(
        account_name='mystorge',
        account_key=
        '0T4f/dzyV7AIw4a9bevK5ysML0qP55CEWEqJyJWXyr6fKRxowLq8tL7mep/MfSc//mcQggeH1+K79A4HUDug3w=='
    )
    file_service.create_file_from_path(
        'image1',
        None,
        file,
        file,
        content_settings=ContentSettings(content_type='image/png'))
    return "<h1> File Uploaded sucessfully</h1>"
Esempio n. 13
0
def UploadFile(target_directory,
               target_file_name,
               source_file,
               content_type_str='application/json'):
    #Default file type is json.  for Pickle files, use application/octet-stream
    #Could add parameters for types of file or overloaded functions, lets see what we need
    #This will work fine for basic upload/download for now
    #Source file needs to be fully qualified location with file name like C:\\Users\\Jeffj\Myfile.json

    _content_settings = ContentSettings(content_type=content_type_str)

    file_service.create_file_from_path(fileshare,
                                       target_directory,
                                       target_file_name,
                                       source_file,
                                       content_settings=_content_settings)
Esempio n. 14
0
    def test_set_file_properties(self):
        # Arrange
        file_name = self._create_file()

        # Act
        content_settings = ContentSettings(content_language='spanish',
                                           content_disposition='inline')
        resp = self.fs.set_file_properties(self.share_name,
                                           None,
                                           file_name,
                                           content_settings=content_settings)

        # Assert
        properties = self.fs.get_file_properties(self.share_name, None,
                                                 file_name).properties
        self.assertEqual(properties.content_settings.content_language,
                         content_settings.content_language)
        self.assertEqual(properties.content_settings.content_disposition,
                         content_settings.content_disposition)
 def Upload(self, loc_directory, fileshare_directory_name):
     local_path = os.path.expanduser("~/" + loc_directory)
     self.generator = self.file_service.list_directories_and_files(
         self.fileshare_name + "/" + fileshare_directory_name)
     print("\nUploading the following files to " + fileshare_directory_name)
     entries = os.listdir(local_path)
     # for entry in entries:
     #     print(entry)
     for entry in entries:
         self.file_service.create_file_from_path(
             self.fileshare_name,  #Fileshare name
             fileshare_directory_name,  # We want to create this blob in the root directory, so we specify None for the directory_name
             entry,  #name of the file that is created
             local_path + entry,  #file that needs to be uploaded
             content_settings=ContentSettings(
                 content_type='application/vnd.ms-excel'))
     print("The followig files have been uploaded")
     #listing the files in the fileshare_name
     obj.List_directory(fileshare_name + "/" +
                        fileshare_directory_name_upload)
Esempio n. 16
0
def onboard(config, folder_name):
    blob_storage = get_azure_storage_client(config)
    uri = 'https://' + config.get(
        "storage_account") + '.blob.core.windows.net/' + config.get(
            "storage_container") + '/'
    functions_url = config.get('url') + '/api/onboarding'
    user_name = config.get("tagging_user")
    images = []
    for image in os.listdir(folder_name):
        if image.lower().endswith('.png') or image.lower().endswith(
                '.jpg') or image.lower().endswith(
                    '.jpeg') or image.lower().endswith('.gif'):
            local_path = os.path.join(folder_name, image)
            print('Uploading image ' + image)

            # Upload the created file, use image name for the blob name
            blob_storage.create_blob_from_path(
                config.get("storage_container"),
                image,
                local_path,
                content_settings=ContentSettings(content_type='image/png'))
            images.append(uri + image)

    # Post this data to the server to add them to database and kick off active learning
    data = {}
    data['imageUrls'] = images
    headers = {'content-type': 'application/json'}
    query = {"code": config.get('key'), "userName": user_name}

    #TODO: Ensure we don't get 4xx or 5xx return codes
    response = requests.post(functions_url,
                             data=json.dumps(data),
                             headers=headers,
                             params=query)
    json_resp = response.json()
    count = len(json_resp['imageUrls'])
    print("Successfully uploaded " + str(count) + " images.")
    for url in json_resp['imageUrls']:
        print(url)
Esempio n. 17
0
def write_to_db(data, path, date_format="%Y-%m-%d", dbhost="", dbuser="", dbpass="", dbname="", azure_account="", azure_key="", pdf_path=""):
    """Insert extracted fields to mysql

    Parameters
    ----------
    data : dict
        Dictionary of extracted fields
    path : str
        path of the original pdf file
    date_format : str
        Date format used in generated file

    Notes
    ----
    Do give file name to the function parameter path.

    Examples
    --------
        re = output_module.write_to_db(res, f.name, args.output_date_format, 
                args.dbpass, args.azure_account, args.azure_key)

    """
    try:
        conn = MySQLdb.connect(host= dbhost,
                user=dbuser,
                passwd=dbpass,
                db=dbname)

        x = conn.cursor()

        if data['issuer'].replace("\'","\\\'") == 'Watercare Services Limited':
            connWatercare = MySQLdb.connect(host= dbhost,
                user=dbuser,
                passwd=dbpass,
                db='watercare')
        watercareCur = connWatercare.cursor()

    except Exception as e:
        logger.error("Connecting mysql error " + str(e))
        return 'link db failed'

    if data is None:
        return

    try:
        sqlstr = ''
        description = ''
        if 'description' in data:
            description += data['description'] + '; '
        if 'this_month_reading' in data:
            description += data['this_month_reading'] + '; '
        if 'last_month_reading' in data:
            description += data['last_month_reading'] + '; '
        if 'last_month_reading' in data:
            description += data['last_month_reading'] + '; '
        if 'water_usage_0_water_1_wastewater' in data:
            description += 'water usage: ' + data['water_usage_0_water_1_wastewater'] + '; '
        if 'fixed_charges' in data:
            description += 'fixed charges: ' + data['fixed_charges'] + '; '

        #Norma requested @23.09.2020, Watercare description: Water charges 18/04/2019-22/05/2019
        if data['issuer'].replace("\'","\\\'") == 'Watercare Services Limited':
            if 'this_month_reading' in data and data['this_month_reading'] and 'last_month_reading' in data and data['last_month_reading']:
                last_month = re.findall("(\d{1,2}\-[a-zA-Z]{3}\-\d\d)", data['last_month_reading'])
                this_month = re.findall("(\d{1,2}\-[a-zA-Z]{3}\-\d\d)", data['this_month_reading'])
        if last_month and this_month:
                    description = 'Water charges ' + last_month[0] + ' - ' + this_month[0]
        gst = 0
        try:
            gst = float(data['gst'])
        except:
            pass
        gross = 0
        try:
            gross = float(data['amount'])
        except:
            pass
        net = gross - gst

        if data['issuer'].replace("\'","\\\'") == 'Watercare Services Limited':
            getbccodesql= 'select accountnumber, bccode from useraccount where accountnumber=\'' + data['invoice_number'].replace(u'\u2212', '-').decode('utf-8','ignore').encode("utf-8").replace("\'","\\\'") + '\''
            try:
                watercareCur.execute(getbccodesql)
            except Exception as e:
                logger.error("db operation error: " + str(e))
                print(getbccodesql)
            accountNumberRows = watercareCur.fetchall()
            if accountNumberRows:
                for row in accountNumberRows:
                    data['bc_number'] = row[1]
                    break

            checksql='select `BC number`, `Invoice Date`, `Invoice Total` from edms where `BC number` = ' + "'"+data['bc_number'].replace("\'","\\\'")+"'" + ' and `Invoice Date`= ' + ("'"+data['date'].strftime('%Y-%m-%d')+"'") + ' and `Invoice Total`= ' + str(gross)
            try:
                x.execute(checksql)
            except Exception as e:
                logger.error("db operation error: " + str(e))
                print(checksql)
            checkRows = x.fetchall()
            if checkRows:
                for row in checkRows:
                    return 'exists'



        onlinefilename = str(uuid.uuid4()) + '.pdf'
        if azure_account == 'nextcloud' and azure_key == 'nextcloud':
            uploadfolder = os.path.join(os.path.abspath(os.path.join(pdf_path, os.pardir)),'upload')
            if not os.path.exists(uploadfolder):
                os.makedirs(uploadfolder)
            copyfile(path, os.path.join(uploadfolder,onlinefilename))
        else:
            file_service = FileService(protocol = 'https', endpoint_suffix = 'core.windows.net', 
            account_name = azure_account, 
            account_key = azure_key)
            file_service.create_file_from_path(
                'cinvoice',
                None, # We want to create this blob in the root directory, so we specify None for the directory_name
                onlinefilename,
                path,
                content_settings=ContentSettings(content_type='application/pdf'))
            # file_service.get_file_to_path('cinvoice', None, onlinefilename, 'out-from-file-service.pdf')

        sqlstr = """
INSERT INTO edms set
`Document type` = 'invoice2data',
`Supplier` = '%s',
`Invoice Number` = '%s',
`BC number` = '%s',
`Invoice Date` = %s,
`Due Date` = %s,
`Net Total` = %s,
`Tax Total` = %s,
`Invoice Total` = %s,
`GST Number` = '%s',
`defaultcoa` = 0,
`defaultcoastring` = null,
`description` = '%s',
GUID = '%s',
flag = 0,
addTime = NOW(),
creditor_id = null,
creditor_name = null
""" % (
            data['issuer'].replace("\'","\\\'") if data['issuer'] is not None else '',
            (data['invoice_number'].replace(u'\u2212', '-').decode('utf-8','ignore').encode("utf-8").replace("\'","\\\'") + "/" + data['date'].strftime('%d%m%Y')) if data['invoice_number'] is not None else '',
            data['bc_number'].replace("\'","\\\'") if data['bc_number'] is not None else '',
            ("'"+data['date'].strftime('%Y-%m-%d')+"'") \
            if data['date'] is not None 
            and (type(data['date']) is datetime.date or type(data['date']) is datetime.datetime) \
            else 'null',
            ("'"+data['due_date'].strftime('%Y-%m-%d')+"'") \
            if 'due_date' in data and data['due_date'] is not None 
            and (type(data['due_date']) is datetime.date or type(data['due_date']) is datetime.datetime) \
            else 'null',
            net,
            gst,
            gross,
            data['gst_number'].replace(u'\u2212', '-').decode('utf-8','ignore').encode("utf-8").replace("\'","\\\'").replace(' ', '') if 'gst_number' in data and data['gst_number'] is not None else '',
            description.replace("\'",""),
            onlinefilename.replace("\'","\\\'")
        )
        x.execute(sqlstr)
        conn.commit()
        return 'succeed'
    except Exception as e:
        logger.error("db operation error: " + str(e))
        if sqlstr:
            print(str(sqlstr))
        conn.rollback()
    try:
        conn.close()
    except:
        pass
Esempio n. 18
0
    def upload_to_file_storage():
        #init file manager
        fnm = FilenameManager()

        # get a list of pdf files in dir_pdfs
        template = dir_upload + "**"
        if operating_system == 'mac' or operating_system == 'linux':
            template += '/*.pdf'
        elif operating_system == 'windows':
            template += '\\*.pdf'
        lpdfs = glob.glob(template, recursive=True)
        lpdfs.sort()
        #os.chdir(dir_pdfs) # needed for ftp.storbinary('STOR command work not with paths but with filenames
        # connect to FTP server and upload files
        try:
            file_storage_url = dparameters['fs_server'].strip()
            file_storage_user = dparameters['fs_username'].strip()
            file_storage_pwd = dparameters['fs_password'].strip()
            file_storage_share = dparameters['fs_share'].strip()
            file_storage_dir = dparameters['fs_directory_prefix'].strip()
            file_service = FileService(account_name=file_storage_user,
                                       account_key=file_storage_pwd)
            try:
                if file_service.exists(file_storage_share):
                    print(
                        'Connection to Azure file storage successfully established...'
                    )
                    if len(file_storage_dir) > 0 and not file_service.exists(
                            file_storage_share,
                            directory_name=file_storage_dir):
                        file_service.create_directory(file_storage_share,
                                                      file_storage_dir)
                        print('Created directory:' + file_storage_dir)
                else:
                    print(
                        'Failed to connect to Asure file storage, share does not exist: '
                        + file_storage_share)
            except Exception as ex:
                print('Error connecting to Azure file storage: ', ex)

            for pdffile in lpdfs:
                file_details = db.readFileStatus(file_original_name=pdffile,
                                                 file_status='Uploaded')
                if file_details is None:
                    file_id = None
                    file_details = db.readFileStatus(
                        file_original_name=pdffile, file_status='Classified')
                    if file_details is not None:
                        file_id = file_details["id"]
                    dir, rpdffile = ntpath.split(pdffile)

                    destinationdir = ''

                    if (dir + '\\') == dir_upload or (dir + '/') == dir_upload:
                        destinationdir = 'Unclassified'
                    else:
                        dir, year = ntpath.split(dir)
                        dir, destinationdir = ntpath.split(dir)

                    retries = 0
                    while retries < 3:
                        try:
                            path = pdffile
                            print('Uploading {}'.format(path))
                            filename = pdffile
                            remote_filename = fnm.azure_validate_filename(
                                rpdffile)
                            if not remote_filename:
                                return
                            if len(file_storage_dir) > 0:
                                directory = file_storage_dir + '/' + destinationdir
                            else:
                                directory = destinationdir
                            if not file_service.exists(
                                    file_storage_share,
                                    directory_name=directory):
                                file_service.create_directory(
                                    file_storage_share, directory)
                            directory += '/' + year
                            if not file_service.exists(
                                    file_storage_share,
                                    directory_name=directory):
                                file_service.create_directory(
                                    file_storage_share, directory)
                            print('Checking if {}/{} already exists'.format(
                                directory, remote_filename))
                            if file_service.exists(file_storage_share,
                                                   directory_name=directory,
                                                   file_name=remote_filename):
                                print('{}/{} already exists'.format(
                                    directory, remote_filename))
                                if file_id is None:
                                    db.saveFileStatus(
                                        script_name=script_name,
                                        file_original_name=pdffile,
                                        file_upload_path=directory,
                                        file_upload_name=remote_filename,
                                        file_status='Uploaded')
                                else:
                                    db.saveFileStatus(
                                        id=file_details["id"],
                                        file_upload_path=directory,
                                        file_upload_name=remote_filename,
                                        file_status='Uploaded')
                                os.remove(pdffile)
                                break
                            file_service.create_file_from_path(
                                file_storage_share,
                                directory,
                                remote_filename,
                                path,
                                content_settings=ContentSettings(
                                    content_type='application/pdf'))
                            if file_id is None:
                                db.saveFileStatus(
                                    script_name=script_name,
                                    file_original_name=pdffile,
                                    file_upload_path=directory,
                                    file_upload_name=remote_filename,
                                    file_status='Uploaded')
                            else:
                                db.saveFileStatus(
                                    id=file_details["id"],
                                    file_upload_path=directory,
                                    file_upload_name=remote_filename,
                                    file_status='Uploaded')
                            print('{}/{} uploaded'.format(
                                directory, remote_filename))
                            retries = 3
                            os.remove(pdffile)
                        except Exception as e:
                            print('Error uploading to Asure file storage,',
                                  str(e))
                            retries += 1
                else:
                    print('File {} was uploaded before'.format(
                        file_details["file_original_name"]))
                    os.remove(pdffile)
        except Exception as e:
            print(str(e))
            logging.critical(str(e))
Esempio n. 19
0
myshare = "privateuploadedblobs"

#Create a FileShare Object to manipulate in this script
file_service = FileService(account_name=FSAccountName,
                           account_key=FSAccountKey)

#Download and Delete the Blobs Uploaded
block_blob_service = BlockBlobService(account_name=BAccountName,
                                      account_key=BAccountKey)
generator = block_blob_service.list_blobs(mycontainer)
for blob in generator:
    try:
        block_blob_service.get_blob_to_path(
            mycontainer, blob.name,
            str("%s\%s" % (directorytocreate, blob.name)))
        block_blob_service.delete_blob(mycontainer, blob.name)
        file_service.create_file_from_path(
            myshare,
            None,
            str("%s_%s" % (timestamp, blob.name)),
            str("%s\%s" % (directorytocreate, blob.name)),
            content_settings=ContentSettings(content_type='image'))
    except:
        continue

#Deletge Directory where blobs where copied
try:
    shutil.rmtree(directorytocreate)
except:
    pass
Esempio n. 20
0
def main(path, debug, remote_directory, typhoonname):
    initialize.setup_cartopy()
    start_time = datetime.now()
    print(
        '---------------------AUTOMATION SCRIPT STARTED---------------------------------'
    )
    print(str(start_time))
    #%% check for active typhoons
    print(
        '---------------------check for active typhoons---------------------------------'
    )
    print(str(start_time))
    remote_dir = remote_directory
    if debug:
        typhoonname = 'SURIGAE'
        remote_dir = '20210421120000'
        logger.info(f"DEBUGGING piepline for typhoon{typhoonname}")
        Activetyphoon = [typhoonname]
    else:
        # If passed typhoon name is None or empty string
        if not typhoonname:
            Activetyphoon = Check_for_active_typhoon.check_active_typhoon()
            if not Activetyphoon:
                logger.info("No active typhoon in PAR stop pipeline")
                sys.exit()
            logger.info(f"Running on active Typhoon(s) {Activetyphoon}")
        else:
            Activetyphoon = [typhoonname]
            remote_dir = remote_directory
            logger.info(f"Running on custom Typhoon {Activetyphoon}")

    Alternative_data_point = (start_time -
                              timedelta(hours=24)).strftime("%Y%m%d")

    date_dir = start_time.strftime("%Y%m%d%H")
    Input_folder = os.path.join(path, f'forecast/Input/{date_dir}/Input/')
    Output_folder = os.path.join(path, f'forecast/Output/{date_dir}/Output/')

    if not os.path.exists(Input_folder):
        os.makedirs(Input_folder)
    if not os.path.exists(Output_folder):
        os.makedirs(Output_folder)
    #download NOAA rainfall
    try:
        #Rainfall_data_window.download_rainfall_nomads(Input_folder,path,Alternative_data_point)
        Rainfall_data.download_rainfall_nomads(Input_folder, path,
                                               Alternative_data_point)
        rainfall_error = False
    except:
        traceback.print_exc()
        #logger.warning(f'Rainfall download failed, performing download in R script')
        logger.info(
            f'Rainfall download failed, performing download in R script')
        rainfall_error = True
    ###### download UCL data

    try:
        ucl_data.create_ucl_metadata(path, os.environ['UCL_USERNAME'],
                                     os.environ['UCL_PASSWORD'])
        ucl_data.process_ucl_data(path, Input_folder,
                                  os.environ['UCL_USERNAME'],
                                  os.environ['UCL_PASSWORD'])
    except:
        logger.info(f'UCL download failed')
    #%%
    ##Create grid points to calculate Winfield
    cent = Centroids()
    cent.set_raster_from_pnt_bounds((118, 6, 127, 19), res=0.05)
    #this option is added to make the script scaleable globally To Do
    #cent.set_raster_from_pnt_bounds((LonMin,LatMin,LonMax,LatMax), res=0.05)
    cent.check()
    cent.plot()
    ####
    admin = gpd.read_file(
        os.path.join(path, "./data-raw/phl_admin3_simpl2.geojson"))
    df = pd.DataFrame(data=cent.coord)
    df["centroid_id"] = "id" + (df.index).astype(str)
    centroid_idx = df["centroid_id"].values
    ncents = cent.size
    df = df.rename(columns={0: "lat", 1: "lon"})
    df = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.lon, df.lat))
    #df.to_crs({'init': 'epsg:4326'})
    df.crs = {'init': 'epsg:4326'}
    df_admin = sjoin(df, admin, how="left").dropna()

    # Sometimes the ECMWF ftp server complains about too many requests
    # This code allows several retries with some sleep time in between
    n_tries = 0
    while True:
        try:
            logger.info("Downloading ECMWF typhoon tracks")
            bufr_files = TCForecast.fetch_bufr_ftp(remote_dir=remote_dir)
            fcast = TCForecast()
            fcast.fetch_ecmwf(files=bufr_files)
        except ftplib.all_errors as e:
            n_tries += 1
            if n_tries >= ECMWF_MAX_TRIES:
                logger.error(
                    f' Data downloading from ECMWF failed: {e}, '
                    f'reached limit of {ECMWF_MAX_TRIES} tries, exiting')
                sys.exit()
            logger.error(
                f' Data downloading from ECMWF failed: {e}, retrying after {ECMWF_SLEEP} s'
            )
            time.sleep(ECMWF_SLEEP)
            continue
        break

    #%% filter data downloaded in the above step for active typhoons  in PAR
    # filter tracks with name of current typhoons and drop tracks with only one timestep
    fcast.data = [
        track_data_clean.track_data_clean(tr) for tr in fcast.data
        if (tr.time.size > 1 and tr.name in Activetyphoon)
    ]

    # fcast.data = [tr for tr in fcast.data if tr.name in Activetyphoon]
    # fcast.data = [tr for tr in fcast.data if tr.time.size>1]
    for typhoons in Activetyphoon:
        #typhoons=Activetyphoon[0]
        logger.info(f'Processing data {typhoons}')
        fname = open(
            os.path.join(path, 'forecast/Input/',
                         "typhoon_info_for_model.csv"), 'w')
        fname.write('source,filename,event,time' + '\n')
        if not rainfall_error:
            line_ = 'Rainfall,' + '%srainfall' % Input_folder + ',' + typhoons + ',' + date_dir  #StormName #
            fname.write(line_ + '\n')

        line_ = 'Output_folder,' + '%s' % Output_folder + ',' + typhoons + ',' + date_dir  #StormName #
        #line_='Rainfall,'+'%sRainfall/' % Input_folder +','+ typhoons + ',' + date_dir #StormName #
        fname.write(line_ + '\n')

        #typhoons='SURIGAE'  # to run it manually for any typhoon
        # select windspeed for HRS model

        fcast.data = [tr for tr in fcast.data if tr.name == typhoons]
        tr_HRS = [tr for tr in fcast.data if (tr.is_ensemble == 'False')]

        if tr_HRS != []:
            HRS_SPEED = (tr_HRS[0].max_sustained_wind.values / 0.84).tolist(
            )  ############# 0.84 is conversion factor for ECMWF 10MIN TO 1MIN AVERAGE
            dfff = tr_HRS[0].to_dataframe()
            dfff[['VMAX', 'LAT',
                  'LON']] = dfff[['max_sustained_wind', 'lat', 'lon']]
            dfff['YYYYMMDDHH'] = dfff.index.values
            dfff['YYYYMMDDHH'] = dfff['YYYYMMDDHH'].apply(
                lambda x: x.strftime("%Y%m%d%H%M"))
            dfff['STORMNAME'] = typhoons
            dfff[['YYYYMMDDHH', 'VMAX', 'LAT', 'LON',
                  'STORMNAME']].to_csv(os.path.join(Input_folder,
                                                    'ecmwf_hrs_track.csv'),
                                       index=False)
            line_ = 'ecmwf,' + '%secmwf_hrs_track.csv' % Input_folder + ',' + typhoons + ',' + date_dir  #StormName #
            #line_='Rainfall,'+'%sRainfall/' % Input_folder +','+ typhoons + ',' + date_dir #StormName #
            fname.write(line_ + '\n')
            # Adjust track time step
            data_forced = [
                tr.where(tr.time <= max(tr_HRS[0].time.values), drop=True)
                for tr in fcast.data
            ]
            # data_forced = [track_data_clean.track_data_force_HRS(tr,HRS_SPEED) for tr in data_forced] # forced with HRS windspeed

            #data_forced= [track_data_clean.track_data_clean(tr) for tr in fcast.data] # taking speed of ENS
            # interpolate to 3h steps from the original 6h
            #fcast.equal_timestep(3)
        else:
            len_ar = np.min([len(var.lat.values) for var in fcast.data])
            lat_ = np.ma.mean([var.lat.values[:len_ar] for var in fcast.data],
                              axis=0)
            lon_ = np.ma.mean([var.lon.values[:len_ar] for var in fcast.data],
                              axis=0)
            YYYYMMDDHH = pd.date_range(fcast.data[0].time.values[0],
                                       periods=len_ar,
                                       freq="H")
            vmax_ = np.ma.mean(
                [var.max_sustained_wind.values[:len_ar] for var in fcast.data],
                axis=0)
            d = {
                'YYYYMMDDHH': YYYYMMDDHH,
                "VMAX": vmax_,
                "LAT": lat_,
                "LON": lon_
            }
            dfff = pd.DataFrame(d)
            dfff['STORMNAME'] = typhoons
            dfff['YYYYMMDDHH'] = dfff['YYYYMMDDHH'].apply(
                lambda x: x.strftime("%Y%m%d%H%M"))
            dfff[['YYYYMMDDHH', 'VMAX', 'LAT', 'LON',
                  'STORMNAME']].to_csv(os.path.join(Input_folder,
                                                    'ecmwf_hrs_track.csv'),
                                       index=False)
            line_ = 'ecmwf,' + '%secmwf_hrs_track.csv' % Input_folder + ',' + typhoons + ',' + date_dir  #StormName #
            #line_='Rainfall,'+'%sRainfall/' % Input_folder +','+ typhoons + ',' + date_dir #StormName #
            fname.write(line_ + '\n')
            data_forced = fcast.data

        # calculate windfields for each ensamble
        threshold = 0  #(threshold to filter dataframe /reduce data )
        df = pd.DataFrame(data=cent.coord)
        df["centroid_id"] = "id" + (df.index).astype(str)
        centroid_idx = df["centroid_id"].values
        ncents = cent.size
        df = df.rename(columns={0: "lat", 1: "lon"})

        #calculate wind field for each ensamble members
        list_intensity = []
        distan_track = []
        for tr in data_forced:
            logger.info(
                f"Running on ensemble # {tr.ensemble_number} for typhoon {tr.name}"
            )
            track = TCTracks()
            typhoon = TropCyclone()
            track.data = [tr]
            #track.equal_timestep(3)
            tr = track.data[0]
            typhoon.set_from_tracks(track, cent, store_windfields=True)
            # Make intensity plot using the high resolution member
            if tr.is_ensemble == 'False':
                logger.info("High res member: creating intensity plot")
                plot_intensity.plot_inensity(typhoon=typhoon,
                                             event=tr.sid,
                                             output_dir=Output_folder,
                                             date_dir=date_dir,
                                             typhoon_name=tr.name)
            windfield = typhoon.windfields
            nsteps = windfield[0].shape[0]
            centroid_id = np.tile(centroid_idx, nsteps)
            intensity_3d = windfield[0].toarray().reshape(nsteps, ncents, 2)
            intensity = np.linalg.norm(intensity_3d, axis=-1).ravel()
            timesteps = np.repeat(track.data[0].time.values, ncents)
            #timesteps = np.repeat(tr.time.values, ncents)
            timesteps = timesteps.reshape((nsteps, ncents)).ravel()
            inten_tr = pd.DataFrame({
                'centroid_id': centroid_id,
                'value': intensity,
                'timestamp': timesteps,
            })
            inten_tr = inten_tr[inten_tr.value > threshold]
            inten_tr['storm_id'] = tr.sid
            inten_tr['ens_id'] = tr.sid + '_' + str(tr.ensemble_number)
            inten_tr['name'] = tr.name
            inten_tr = (pd.merge(inten_tr,
                                 df_admin,
                                 how='outer',
                                 on='centroid_id').dropna().groupby(
                                     ['adm3_pcode', 'ens_id'],
                                     as_index=False).agg(
                                         {"value": ['count', 'max']}))
            inten_tr.columns = [
                x for x in ['adm3_pcode', 'storm_id', 'value_count', 'v_max']
            ]
            list_intensity.append(inten_tr)
            distan_track1 = []
            for index, row in df.iterrows():
                dist = np.min(
                    np.sqrt(
                        np.square(tr.lat.values - row['lat']) +
                        np.square(tr.lon.values - row['lon'])))
                distan_track1.append(dist * 111)
            dist_tr = pd.DataFrame({
                'centroid_id': centroid_idx,
                'value': distan_track1
            })
            dist_tr['storm_id'] = tr.sid
            dist_tr['name'] = tr.name
            dist_tr['ens_id'] = tr.sid + '_' + str(tr.ensemble_number)
            dist_tr = (pd.merge(dist_tr,
                                df_admin,
                                how='outer',
                                on='centroid_id').dropna().groupby(
                                    ['adm3_pcode', 'name', 'ens_id'],
                                    as_index=False).agg({'value': 'min'}))
            dist_tr.columns = [
                x for x in ['adm3_pcode', 'name', 'storm_id', 'dis_track_min']
            ]  # join_left_df_.columns.ravel()]
            distan_track.append(dist_tr)
        df_intensity_ = pd.concat(list_intensity)
        distan_track1 = pd.concat(distan_track)

        typhhon_df = pd.merge(df_intensity_,
                              distan_track1,
                              how='left',
                              on=['adm3_pcode', 'storm_id'])

        typhhon_df.to_csv(os.path.join(Input_folder, 'windfield.csv'),
                          index=False)

        line_ = 'windfield,' + '%swindfield.csv' % Input_folder + ',' + typhoons + ',' + date_dir  #StormName #
        #line_='Rainfall,'+'%sRainfall/' % Input_folder +','+ typhoons + ',' + date_dir #StormName #
        fname.write(line_ + '\n')
        fname.close()

        #############################################################
        #### Run IBF model
        #############################################################
        os.chdir(path)

        if platform == "linux" or platform == "linux2":  #check if running on linux or windows os
            # linux
            try:
                p = subprocess.check_call(
                    ["Rscript", "run_model_V2.R",
                     str(rainfall_error)])
            except subprocess.CalledProcessError as e:
                logger.error(f'failed to excute R sript')
                raise ValueError(str(e))
        elif platform == "win32":  #if OS is windows edit the path for Rscript
            try:
                p = subprocess.check_call([
                    "C:/Program Files/R/R-4.1.0/bin/Rscript", "run_model_V2.R",
                    str(rainfall_error)
                ])
            except subprocess.CalledProcessError as e:
                logger.error(f'failed to excute R sript')
                raise ValueError(str(e))

        #############################################################
        # send email in case of landfall-typhoon
        #############################################################

        image_filenames = list(Path(Output_folder).glob('*.png'))
        data_filenames = list(Path(Output_folder).glob('*.csv'))

        if image_filenames or data_filenames:
            message_html = """\
            <html>
            <body>
            <h1>IBF model run result </h1>
            <p>Please find attached a map and data with updated model run</p>
            <img src="cid:Impact_Data">
            </body>
            </html>
            """
            Sendemail.sendemail(
                smtp_server=os.environ["SMTP_SERVER"],
                smtp_port=int(os.environ["SMTP_PORT"]),
                email_username=os.environ["EMAIL_LOGIN"],
                email_password=os.environ["EMAIL_PASSWORD"],
                email_subject='Updated impact map for a new Typhoon in PAR',
                from_address=os.environ["EMAIL_FROM"],
                to_address_list=os.environ["EMAIL_TO_LIST"].split(','),
                cc_address_list=os.environ["EMAIL_CC_LIST"].split(','),
                message_html=message_html,
                filename_list=image_filenames + data_filenames)
        else:
            raise FileNotFoundError(
                f'No .png or .csv found in {Output_folder}')
            ##################### upload model output to 510 datalack ##############

        file_service = FileService(
            account_name=os.environ["AZURE_STORAGE_ACCOUNT"],
            protocol='https',
            connection_string=os.environ["AZURE_CONNECTING_STRING"])
        file_service.create_share('forecast')
        OutPutFolder = date_dir
        file_service.create_directory('forecast', OutPutFolder)

        for img_file in image_filenames:
            file_service.create_file_from_path(
                'forecast',
                OutPutFolder,
                os.fspath(img_file.parts[-1]),
                img_file,
                content_settings=ContentSettings(content_type='image/png'))

        for data_file in data_filenames:
            file_service.create_file_from_path(
                'forecast',
                OutPutFolder,
                os.fspath(data_file.parts[-1]),
                data_file,
                content_settings=ContentSettings(content_type='text/csv'))

        ##################### upload model input(Rainfall+wind intensity) to 510 datalack ##############
        # To DO

    print(
        '---------------------AUTOMATION SCRIPT FINISHED---------------------------------'
    )
    print(str(datetime.now()))
Esempio n. 21
0
def upload_to_file_storage():
    # get a list of pdf files in dir_pdfs
    lpdfs = glob.glob(dir_pdfs + "*.pdf")
    lpdfs.sort()
    #os.chdir(dir_pdfs) # needed for ftp.storbinary('STOR command work not with paths but with filenames
    # connect to FTP server and upload files
    try:
        file_storage_url = dparameters['fs_server'].strip()
        file_storage_user = dparameters['fs_username'].strip()
        file_storage_pwd = dparameters['fs_password'].strip()
        file_storage_share = dparameters['fs_share'].strip()
        file_storage_dir = dparameters['fs_directory_prefix'].strip()
        file_service = FileService(account_name=self.file_storage_user,
                                   account_key=self.file_storage_pwd)
        try:
            if self.file_service.exists(self.file_storage_share):
                print(
                    'Connection to Azure file storage successfully established...'
                )
                if len(self.file_storage_dir
                       ) > 0 and not self.file_service.exists(
                           self.file_storage_share,
                           directory_name=self.file_storage_dir):
                    self.file_service.create_directory(self.file_storage_share,
                                                       self.file_storage_dir)
                    print('Created directory:' + self.file_storage_dir)
            else:
                print(
                    'Filaed to connect to Asure file storage, share does not exist: '
                    + self.file_storage_share)
        except Exception as ex:
            print('Error connecting to Azure file storage: ', ex)

        for pdffile in lpdfs:
            rpdffile = ntpath.basename(pdffile)

            try:
                destinationdir = ddestdir[ddestdiropp[rpdffile]]
            except:
                destinationdir = 'Unclassified'

            retries = 0
            while retries < 3:
                try:
                    path = os.path.join(dir_pdfs, rpdffile)
                    print('Uploading {}'.format(path))
                    remote_filename = rpdffile
                    if not remote_filename:
                        return
                    if len(self.file_storage_dir) > 0:
                        directory = self.file_storage_dir + '/' + destinationdir
                    if not self.file_service.exists(self.file_storage_share,
                                                    directory_name=directory):
                        self.file_service.create_directory(
                            self.file_storage_share, directory)
                    if not self.config.getboolean(self.section,
                                                  'overwrite_remote_files',
                                                  fallback=False):
                        print('Checking if {}/{} already exists'.format(
                            directory, filename))
                        if self.file_service.exists(self.file_storage_share,
                                                    directory_name=directory,
                                                    file_name=filename):
                            print('{}/{} already exists'.format(
                                directory, filename))
                            return
                    self.file_service.create_file_from_path(
                        self.file_storage_share,
                        directory,
                        filename,
                        path,
                        content_settings=ContentSettings(
                            content_type='application/pdf'))
                    print('{} uploaded'.format(path))
                    retries = 3
                except Exception as e:
                    print('Error uploading to Asure file storage,', str(e))
                    retries += 1
    except Exception as e:
        print(str(e))
        logging.critical(str(e))


######
Esempio n. 22
0
from azure.storage.file import FileService

file_service = FileService(account_name="account_name",
                           account_key="account_key")

# ファイル一覧表示
generator = file_service.list_directories_and_files('my-file')
for file_or_dir in generator:
    print(file_or_dir.name)

# ローカルのtest.txtをAzure Storageへアップロード
from azure.storage.file import ContentSettings
file_service.create_file_from_path(
    'my-file',
    None,
    'test.txt',
    'test.txt',
    content_settings=ContentSettings(content_type='text/plain'))
Esempio n. 23
0
def uploadfile(src, dest):
    file_service.create_file_from_path('kaggle',
                                       None, # We want to create this blob in the root directory, so we specify None for the directory_name
                                       dest,
                                       src,
                                       content_settings=ContentSettings(content_type='text/plain'))
Esempio n. 24
0
from azure.storage.file import FileService, ContentSettings
import yaml

config_file = 'config.yml'

with open(config_file, 'r') as stream:
    try:
        f = yaml.safe_load(stream)
    except yaml.YAMLError as exc:
        print(exc)

file_service = FileService(account_name=f['account_name'],
                           account_key=f['account_key'])

file_service.create_share('myshare')

file_service.create_file_from_path(
    'housing_prices',
    None,  # We want to create this blob in the root directory, so we specify None for the directory_name
    'myfile',
    'test.txt',
    content_settings=ContentSettings(content_type='txt'))
Esempio n. 25
0
def upload_to_file_storage(filename):
    global script_name
    old_filename = filename
    downloads_path = dir_pdfs
    fnm = FilenameManager()
    retries = 0
    while retries < 3:
        try:
            path = os.path.join(downloads_path, old_filename)
            file_details = db.readFileStatus(file_original_name=old_filename,
                                             file_status='Uploaded')
            if file_details is not None:
                print(
                    'File {} was already uploaded before'.format(old_filename))
                retries = 3
                break
            file_details = db.readFileStatus(file_original_name=old_filename,
                                             file_status='Downloaded')
            print('Uploading {}'.format(path))
            remote_filename = _get_remote_filename(old_filename)
            directory = None
            if not remote_filename:
                return
            try:
                directory, filename, year = remote_filename
            except:
                directory, filename = remote_filename
            filename = fnm.azure_validate_filename(filename)
            if len(file_storage_dir) > 0:
                directory = file_storage_dir + '/' + directory
            if not file_service.exists(file_storage_share,
                                       directory_name=directory):
                file_service.create_directory(file_storage_share, directory)
            if year:
                directory += '/' + year
                if not file_service.exists(file_storage_share,
                                           directory_name=directory):
                    file_service.create_directory(file_storage_share,
                                                  directory)
            if not overwrite_remote_files:
                print('Checking if {}/{} already exists'.format(
                    directory, filename))
                if file_service.exists(file_storage_share,
                                       directory_name=directory,
                                       file_name=filename):
                    print('{}/{} already exists'.format(directory, filename))
                    if file_details is None:
                        db.saveFileStatus(script_name=script_name,
                                          file_original_name=old_filename,
                                          file_upload_path=directory,
                                          file_upload_name=filename,
                                          file_status='Uploaded')
                    else:
                        db.saveFileStatus(id=file_details['id'],
                                          script_name=script_name,
                                          file_upload_path=directory,
                                          file_upload_name=filename,
                                          file_status='Uploaded')
                    return
            file_service.create_file_from_path(
                file_storage_share,
                directory,
                filename,
                path,
                content_settings=ContentSettings(
                    content_type='application/pdf'))
            if file_details is None:
                db.saveFileStatus(script_name=script_name,
                                  file_original_name=old_filename,
                                  file_upload_path=directory,
                                  file_upload_name=filename,
                                  file_status='Uploaded')
            else:
                db.saveFileStatus(id=file_details['id'],
                                  script_name=script_name,
                                  file_upload_path=directory,
                                  file_upload_name=filename,
                                  file_status='Uploaded')
            print('{} uploaded'.format(path))
            retries = 3
        except Exception as e:
            print('Error uploading to Asure file storage,', str(e))
            filename = old_filename
            retries += 1

#Initialize an Azure Storage Account File Service Instance
omnipresence_storage_account = FileService(account_name=omnipresence_storage_account_name, account_key=omnipresence_storage_account_key)

#test if your storage file share exists on Azure or not, if not, create it
if (not omnipresence_storage_account.exists(omnipresence_storage_file_share)):
    omnipresence_storage_account.create_share(omnipresence_storage_file_share, quota='10')

#walk through current directory, make directorys under Azure File Share and upload local files onto your Azure storage account File Share except for hiden files and directory
for base_dir, dirs, file_names in os.walk(".", topdown=True):
    file_names = [ f for f in file_names if not f[0] == '.'] #parse out files whose name begins with a dot
    dirs[:] = [d for d in dirs if not d[0] == '.'] #parse out directorys whose name begins with a dot
    for local_file_name in file_names:
        remote_file_name = os.path.join(base_dir, local_file_name)[2:]
        local_file_name = remote_file_name
        if (omnipresence_storage_account.exists(omnipresence_storage_file_share)):
            omnipresence_storage_account.create_file_from_path(
                omnipresence_storage_file_share,
                None, # We want to create files under current remote directory, so we specify None for the directory_name
                remote_file_name,
                local_file_name,
                content_settings=ContentSettings(content_type='file'))
        print('Uploaded the file -', local_file_name, '\n')

    for directory in dirs:
        remote_dir_path = os.path.join(base_dir, directory)[2:]
        if (not omnipresence_storage_account.exists(omnipresence_storage_file_share, directory_name=remote_dir_path)):
            omnipresence_storage_account.create_directory(omnipresence_storage_file_share, remote_dir_path, metadata=None, fail_on_exist=False, timeout=None)
        print('Created the remote folder -', os.path.join(base_dir,directory)[2:])