def main(req: func.HttpRequest, audioBlob: func.InputStream,
         videoBlob: func.InputStream,
         outputBlob: func.Out[func.InputStream]) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    #name = req.get_json()['Audio']['FileName']

    outFile = tempfile.NamedTemporaryFile(suffix='.mkv')

    audioFile = tempfile.NamedTemporaryFile(suffix='.ogg')
    audioData = audioBlob.read(-1)
    audioFile.write(audioData)
    audioLen = len(audioData)

    videoFile = tempfile.NamedTemporaryFile(suffix='.webm')
    videoData = videoBlob.read(-1)
    videoFile.write(videoData)
    videoLen = len(videoData)

    # As outFile now exists use -y option to ffmpeg to allow overwriting of this zero length file
    result = subprocess.run([
        'ffmpeg', '-y', '-i', audioFile.name, '-i', videoFile.name, '-acodec',
        'copy', '-vcodec', 'copy', outFile.name
    ],
                            capture_output=True)
    logging.info(result.stderr)

    #data = outFile.read(-1)
    outputBlob.set(outFile)
    datalen = 0  #len(data)

    return func.HttpResponse(f"All done {audioLen} {datalen}", status_code=200)
def main(req: func.HttpRequest, audioBlob: func.InputStream,
         videoBlob: func.InputStream, transcriptBlob: func.InputStream,
         outputBlob: func.Out[func.InputStream]) -> func.HttpResponse:

    outFile = tempfile.NamedTemporaryFile(suffix='.mkv')

    audioFile = tempfile.NamedTemporaryFile(suffix='.ogg')
    audioData = audioBlob.read(-1)
    audioFile.write(audioData)

    videoFile = tempfile.NamedTemporaryFile(suffix='.webm')
    videoData = videoBlob.read(-1)
    videoFile.write(videoData)

    transcriptFile = tempfile.NamedTemporaryFile(suffix='.srt')
    transcriptFile.write(transcriptBlob.read(-1))

    # As outFile now exists use -y option to ffmpeg to allow overwriting of this zero length file
    result = subprocess.run([
        'ffmpeg', '-y', '-i', audioFile.name, '-i', videoFile.name, '-f',
        'srt', '-i', transcriptFile.name, '-acodec', 'copy', '-vcodec', 'copy',
        '-c:s', 'srt', outFile.name
    ],
                            capture_output=True)
    #logging.info(result.stderr)

    outputBlob.set(outFile)

    return func.HttpResponse(f"All done", status_code=200)
Exemplo n.º 3
0
def main(req: func.HttpRequest, privatekeyblob: func.InputStream,
         publickeyblob: func.InputStream) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    privatekeyfilename = "private.key"
    publickeyfilename = "public.key"
    publicKey = publickeyblob.read().decode("utf-8")
    logging.info(publicKey)
    privatekey = privatekeyblob.read().decode("utf-8")

    return func.HttpResponse(f"Hello {publicKey}!")
def main(inputblob: func.InputStream, outputblob: func.Out[bytes], erroroutputblob: func.Out[bytes]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {inputblob.name}\n"
                 f"Blob Size: {inputblob.length} bytes")
    try:
        body = json.loads(inputblob.read(size=-1))
        body_enriched = get_luis(body)
        outputblob.set(json.dumps(body_enriched))
    except Exception as e:
        logging.error(f"Encountered error processing {inputblob.name} :")
        logging.error(e)
        erroroutputblob.set(inputblob.read(size=-1))    
Exemplo n.º 5
0
def download_model(tempdir,
                   itos_blob: func.InputStream,
                   weight_blob: func.InputStream,
                   enc_weight_blob: func.InputStream):
    itos_path = os.path.join(tempdir, "itos.pkl")
    weight_path = os.path.join(tempdir, "lm_5_ep_lr2-3_5_stlr.pth")
    enc_weight_path = os.path.join(tempdir, "lm_5_ep_lr2-3_5_stlr_enc.pth")

    with open(itos_path, "wb") as f:
        f.write(itos_blob.read())
    with open(weight_path, "wb") as f:
        f.write(weight_blob.read())
    with open(enc_weight_path, "wb") as f:
        f.write(enc_weight_blob.read())
    return itos_path, weight_path, enc_weight_path
Exemplo n.º 6
0
def main(req: func.HttpRequest,
         covblob: func.InputStream) -> func.HttpResponse:

    country = req.params.get('country') or 'Russia'
    output = req.params.get('output') or 'csv'

    logging.info(
        'covidata function triggered with country={} and blob with len={}'.
        format(country, covblob.length))

    binary = covblob.read()
    # logging.info("binary is {}, len={}".format(binary[:15],len(binary)))
    binary = base64.decodebytes(binary)
    data = pickle.loads(binary)

    pop, df = data[country]

    if output == "plot":
        plt.figure()
        CountryData.plot(pop, df)
        buf = io.BytesIO()
        plt.savefig(buf, format='jpg')
        buf.seek(0)
        return func.HttpResponse(body=buf.read(), mimetype='image/jpeg')
    else:
        res = df.to_csv()
        return func.HttpResponse(res)
Exemplo n.º 7
0
def main(myblob: func.InputStream):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")
    acts = json.loads(myblob.read().decode('utf-8'))
    logging.info(acts)

    session = sessionmaker(bind=engine)()

    for act in acts:
        average_heartrate = act.get('average_heartrate')
        average_speed = act.get('average_speed')
        distance = act.get('distance')
        elapsed_time = act.get('elapsed_time')
        moving_time = act.get('moving_time')
        name = act.get('name')
        start_date = act.get('start_date')
        wid = act.get('id')

        print(
            'hr:%s,sp:%s, dist:%s, etime:%s, mtime:%s, name:%s, sdate=%s, wid=%s'
            % (average_heartrate, average_speed, distance, elapsed_time,
               moving_time, name, start_date, wid))

        sact = StA(average_heartrate=act.get('average_heartrate'),
                   average_speed=act.get('average_speed'),
                   distance=act.get('distance'),
                   elapsed_time=act.get('elapsed_time'),
                   moving_time=act.get('moving_time'),
                   name=act.get('name'),
                   start_date=act.get('start_date'),
                   wid=act.get('id'))
        session.merge(sact)
        session.commit()
def main(msg: func.QueueMessage, inputblob: func.InputStream,
         outputblob: func.Out[func.InputStream]) -> None:

    blob_source_raw_name = msg.get_body().decode('utf-8')
    logging.info('Python queue trigger function processed a queue item: %s',
                 blob_source_raw_name)

    # thumbnail filename
    local_file_name_thumb = blob_source_raw_name[:-4] + "_thumb.jpg"

    #####
    # Download file from Azure Blob Storage
    #####
    with open(blob_source_raw_name, "w+b") as local_blob:
        local_blob.write(inputblob.read())

    #####
    # Use PIL to create a thumbnail
    #####
    new_size = 200, 200
    im = Image.open(local_blob.name)
    im.thumbnail(new_size)
    im.save(local_file_name_thumb, quality=95)

    # write the stream to the output file in blob storage
    new_thumbfile = open(local_file_name_thumb, "rb")
    outputblob.set(new_thumbfile.read())
async def main(stationData: func.InputStream):
    """ Azure function body """
    logging.info('Python blob trigger function processed blob (%s) - %s bytes',
                 stationData.name, stationData.length)

    table_service = TableService(
        connection_string=os.environ['TableBindingConnection'])

    table_name = 'WeatherStations'
    table_service.create_table(table_name, fail_on_exist=False)

    batch_manager = BatchManager(table_service, table_name)

    bytes_data = stationData.read()

    stationData = StringIO(str(bytes_data, 'ascii'), newline="\n")

    station_list = parse_station_list(stationData)

    logging.info('Processing %i records', len(station_list))

    for record in station_list:
        entity = create_entity(record)
        batch_manager.add_entity(entity)

    batch_manager.process()

    logging.info('Updated %s - %i records', table_name, len(station_list))
Exemplo n.º 10
0
def main(myBlob: func.InputStream, myOutputBlob: func.Out[func.InputStream]):
    """ Convert a PDF file into JPEG images.

    Args:
        myBlob (func.InputStream): A PDF file dropped into a storage account.
        myOutputBlob (func.Out[func.InputStream]): A binary sink to write new data into a storage account
    """
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myBlob.name}\n"
                 f"Blob Size: {myBlob.length} bytes")

    if(not myBlob.name.endswith(".pdf")):
        logging.info(f"{myBlob.name} isn't a PDF file, aborting.")
        return

    # Convert the PDF BLOB bytes stream into another bytes stream of multiple readeable images
    # JPEG Headers will later serve to tell them apart
    img_list = convert_from_bytes(
        myBlob.read(), fmt="jpg", thread_count=cpu_count())
    bIO = BytesIO()
    for img in img_list:
        img.save(bIO, format="jpeg")

    # Save the binary stream to a new Blob
    myOutputBlob.set(bIO.getvalue())
    logging.info(
        f"Processing complete for file {myBlob.name} ({len(img_list)} pages)")
Exemplo n.º 11
0
def main(myblob: func.InputStream):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")

    connection_string = os.environ['conversationalkm_STORAGE']
    container = os.environ['telemetry_processed']

    blob_service_client = BlobServiceClient.from_connection_string(
        connection_string)

    # Read Blob and split it by new lines
    INPUT_data = myblob.read().decode("utf-8").split('\n')

    # Processing
    messages = []
    for line in INPUT_data:
        try:
            messages.append(process_line(json.loads(line)))
        except ValueError as e:
            print(e)
    messages = list(filter(None, messages))
    # messages

    # Extract all ConversationId
    conversation_ids = set(map(lambda x: x['ConversationId'], messages))
    for conv in conversation_ids:
        save_conversation(conv,
                          messages=list(
                              filter(lambda x: x['ConversationId'] == conv,
                                     messages)),
                          blob_service_client=blob_service_client,
                          container=container)
Exemplo n.º 12
0
def main(myblob: func.InputStream):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")

    text = myblob.read().decode('utf-8')
    df = pd.read_csv(StringIO(text), sep=';')

    prediction = score_model(df)
    df['prediction'] = prediction

    json_result = df.to_json(orient="records")

    logging.info(json_result)

    storage_account = os.environ.get("ADLS_STORAGE_ACCOUNT")
    client_id = os.environ.get("ADLS_CLIENT_ID")
    client_secret = os.environ.get("ADLS_CLIENT_SECRET")
    tenant_id = os.environ.get("TENANT_ID")

    utc_timestamp = datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S")

    service_client = initialize_adls(storage_account, client_id, client_secret,
                                     tenant_id)
    upload_file_to_directory(service_client, "diabetes", "diabetes-prediction",
                             json_result, f"prediction_{utc_timestamp}.json")
Exemplo n.º 13
0
def main(req: func.HttpRequest, inputBlob: func.InputStream,
         outputQueue: func.Out[func.QueueMessage]) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    url = req.params.get('url')
    if not url:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            url = req_body.get('url')

    if url:
        names = [
            'preg', 'plas', 'pres', 'skin', 'test', 'mass', 'pedi', 'age',
            'class'
        ]
        dataframe = pd.read_csv(url, names=names)
        xscaled = apply_model(dataframe)
        model = pickle.loads(inputBlob.read())
        y_pred = model.predict(xscaled)
        y_pred_str = str(y_pred)
        return func.HttpResponse(y_pred_str)
        outputQueue.set(url)

    else:
        return func.HttpResponse(
            "Please pass a name on the query string or in the request body",
            status_code=400)
def main(myblob: func.InputStream, EEOutputBlob: func.Out[func.InputStream]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")

    msg = myblob.read()
    try:
        data = literal_eval(msg.decode('utf8'))
    except:
        func.HttpResponse('Unable to read input blob', status_code=400)
    else:
        try:
            portfolio_df = stock_forwards_mc.convert_dict_to_dataframe(data)
        except ValueError as e:
            logging.info(str(e))

    mc = stock_forwards_mc.StockForwardMC(portfolio_df)
    expected_values = mc.get_expected_value()
    ev_as_string = np.array2string(expected_values,
                                   precision=2,
                                   separator=', ',
                                   max_line_width=1000000)
    response_str = '"Expected Value":' + ev_as_string
    response_json = json.dumps(response_str)
    EEOutputBlob.set(response_str)
Exemplo n.º 15
0
def main(myblob: func.InputStream, doc: func.Out[func.Document]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")

    img_data = myblob.read()
    try:
        api_url = "{0}vision/v1.0/analyze?{1}".format(api_endpoint, params)
        logging.info("API URL:{}".format(api_url))

        r = requests.post(api_url,
                    headers=headers,
                    data=img_data)

        parsed = r.json()
        logging.info("Response:")
        logging.info(json.dumps(parsed, sort_keys=True, indent=2))

        # Set output data
        outdata = {}
        outdata['name'] = myblob.name
        taglist = parsed['description']['tags']
        outdata['text'] =  ' '.join(taglist)
        logging.info(json.dumps(outdata, sort_keys=True, indent=2))

        ## Store output data using Cosmos DB output binding
        doc.set(func.Document.from_json(json.dumps(outdata)))
    except Exception as e:
        print('Error:')
        print(e)
Exemplo n.º 16
0
def main(myblob: func.InputStream, outputblob: func.Out[func.InputStream]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")
    ##Loading the MNIST Model
    weights = pickle.load(open(os.environ['ModelWeightsPath'], 'rb'))
    json = pickle.load(open(os.environ['ModelJSONPath'], 'rb'))
    model = model_from_json(json)
    model.set_weights(weights)
    #Reading image stream
    imgStream = myblob.read(-1)
    #Converting to image object
    nparr = np.fromstring(imgStream, np.uint8)
    img = cv2.imdecode(nparr, cv2.IMREAD_GRAYSCALE)
    img = img.reshape(1, 28, 28, 1)
    logging.info(img.shape)
    #Predicting on image
    prediction = model.predict(img)
    #Maximum probability prediction
    logging.info(np.argmax(prediction))
    #Connection to table storage
    table_service = TableService(
        connection_string=os.environ['AzureWebJobsStorage'])
    #Storing in table storage (adding 1 hour to convert servertime to my timezone)
    values = [
        os.path.basename(myblob.name),
        str(datetime.datetime.now() + datetime.timedelta(hours=1)),
        str(np.argmax(prediction))
    ]
    names = ["PartitionKey", "RowKey", "Prediction"]
    dictionary = dict(zip(names, values))
    table_service.insert_entity('imagedata', dictionary)

    #Saving to output container - Shows how to save a possible processed image
    outputblob.set(imgStream)
def main(msg: func.QueueMessage, contentStream: func.InputStream, gsheetQueue: func.Out[str], statsQueue: func.Out[str]) -> None:

    conn_string = os.environ["DBConnectionString"]

    # Only process SLO files
    if not msg.get_body().decode("utf-8").endswith("-slo.txt"):
        logging.error("Unrecognized file name. Skipping.")
        return

    try:
        content = contentStream.read()
        data = slo_file_parser.parse_slo_file(content)
        logging.info("Parsed SLO file.")

    except Exception as e:
        logging.error("Failed to parse SLO file.")
        logging.error(e)
        raise

    save_db(data, conn_string)

    statsQueue.set("Database Updated")
    logging.info("Added entry to stats queue.")

    gsheetQueue.set(json.dumps(data))
    logging.info("Added entry to GSheet queue.")
Exemplo n.º 18
0
def main(inblob: func.InputStream, outblob: func.Out[bytes]):

    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {inblob.name}\n"
                 f"Blob Size: {inblob.length} bytes"
                 f"Uri: {inblob.uri}")

    filename = inblob.name
    basefilename, ext = os.path.splitext(filename)

    blob_bytes = inblob.read()

    # Load file into a Pandas dataframe
    exceldf = pd.read_excel(blob_bytes, 'Sheet1', index_col=None)

    # Replace all columns having spaces with underscores
    exceldf.columns = [c.replace(' ', '_') for c in exceldf.columns]

    # Replace all fields having line breaks with space
    df = exceldf.replace('\n', ' ', regex=True)

    #Write dataframe into csv
    result = df.to_csv(sep='\t',
                       encoding='utf-8',
                       index=False,
                       quotechar='#',
                       line_terminator='\r\n')

    outblob.set(result)
Exemplo n.º 19
0
def main(msg: func.QueueMessage, inputblob: func.InputStream, outputblob: func.Out[func.InputStream]) -> None:

    blob_source_raw_name = msg.get_body().decode('utf-8')
    logging.info('Python queue trigger function processed a queue item: %s', blob_source_raw_name)  

    a= inputblob.read()
    logging.info('File Name and Path: %s', a) 
    outputblob.set(a) 
Exemplo n.º 20
0
def main(msg: func.QueueMessage, inputblob: func.InputStream,
         outputblob: func.Out[func.InputStream]) -> None:
    logging.info('Queue item id:%s, body:%s, expiration_time:%s', msg.id,
                 msg.get_body().decode('utf-8'), msg.expiration_time)
    #https://github.com/Azure/azure-functions-python-worker/issues/576
    # logging.info(f'Python Queue trigger function processed : {inputblob.name}')
    clear_text = inputblob.read()
    logging.info(f'Clear text :{clear_text}')
    outputblob.set(inputblob)
def main(req: func.HttpRequest, audioBlob: func.InputStream,
         videoBlob: func.InputStream) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    name = req.get_json()['Audio']['FileName']
    data_bytes = audioBlob.read(-1)
    nbytes = len(data_bytes)
    return func.HttpResponse(f"Hello {nbytes} read from {name}",
                             status_code=200)
Exemplo n.º 22
0
def main(req: func.HttpRequest, jsonBlob: func.InputStream,
         outputBlob: func.Out[func.InputStream]) -> func.HttpResponse:

    insightsData = json.loads(jsonBlob.read(-1))
    transcript = []
    ExtractTranscript.insights_to_vtt(insightsData, transcript)
    outputBlob.set(''.join(transcript))

    return func.HttpResponse(f"Done", status_code=200)
Exemplo n.º 23
0
def main(inputblob: func.InputStream, outputblob: func.Out[bytes]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Blob Name: {inputblob.name}\n"
                 f"Blob Size: {inputblob.length} bytes\n"
                 f"Blob URI: {inputblob.uri}")
    readbytes = inputblob.read()
    logging.info(f'Trigger function processed {len(readbytes)} bytes')
    # Copy blob to output
    outputblob.set(readbytes)
    logging.info(f"Copied")
Exemplo n.º 24
0
def main(myblob: func.InputStream, doc: func.Out[func.Document]):

    vote_data = json.loads(myblob.read().decode('utf-8'))

    voter_id = vote_data.get('voter_id')
    vote = vote_data.get('vote')

    logging.info(f'Processing vote for {vote} by {voter_id}')

    doc.set(func.Document.from_json(json.dumps(vote_data)))
Exemplo n.º 25
0
def main(inputblob: func.InputStream,
         outputblob: func.Out[func.InputStream],
         csvoutputblob: func.Out[func.InputStream]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {inputblob.name}\n"
                 f"Blob Size: {inputblob.length} bytes\n")

    output_json = FormsRecognizer.call_forms_recognizer(
        form_data=inputblob.read())
    outputblob.set(json.dumps(output_json))
    csvoutputblob.set(json.dumps(output_json))
Exemplo n.º 26
0
def main(inputblob: func.InputStream,
         privatekeyblob: func.InputStream,
         publickeyblob: func.InputStream,
         outputblob: func.Out[func.InputStream]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {inputblob.name}\n"
                 f"Blob Size: {inputblob.length} bytes")
  
    publicKey = publickeyblob.read().decode("utf-8") 
    logging.info(publicKey)
    privatekey = privatekeyblob.read().decode("utf-8") 

    #Get the PGPObject
    pgpyObj = PGPy("AmericanAirlines")
    #Import Public Key
    key = pgpyObj.import_keys(publicKey)
    #Load into memory Source Content
    srcContent = inputblob.read().decode("utf-8")
    #Encrypt the Content
    encryptedContent = pgpyObj.encrypt_msg(srcContent)
    outputblob.set(encryptedContent)
Exemplo n.º 27
0
def main(myblob: func.InputStream, outputblob: func.Out[func.InputStream]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")

    blob_bytes = myblob.read()
    blob_to_read = BytesIO(blob_bytes)
    df = pd.read_table(blob_to_read, sep='   ')
    print("Length of csv: " + str(len(df.index)))

    outputs = df.to_csv(index=False)
    outputblob.set(outputs)
Exemplo n.º 28
0
def main(myblob: func.InputStream, outputblob: func.Out[func.InputStream]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")

    img_buffer = myblob.read()
    img_data = np.frombuffer(img_buffer, dtype=np.uint8)
    image_result = blur_function(img_data)

    result = cv2.imencode(".jpg", image_result)[1]
    data_encode = np.array(result)
    outputblob.set(data_encode.tobytes())
Exemplo n.º 29
0
def main(myblob: func.InputStream, outputDoc: func.Out[func.DocumentList]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")

    outdata = json.loads(myblob.read().decode('utf-8'))
    download_time = outdata["download_time"]
    gameweek = transformations.get_game_week(outdata["events"])
    transformations.add_gw_and_download_time(outdata["elements"],
                                             download_time, gameweek)
    transformations.add_unique_id(outdata["elements"])
    outputDoc.set(func.DocumentList(outdata["elements"]))
Exemplo n.º 30
0
def main(triggerblob: func.InputStream, outputblob: func.Out[str]):
    
    parser = JsonComment(json)

    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {triggerblob.name}\n"
                 f"Blob Size: {triggerblob.length} bytes")

    raw_text = triggerblob.read().decode("utf-8")
    py_obj = parser.loads(raw_text)
    str_parsed = json.dumps(py_obj)
    outputblob.set(str_parsed)