Beispiel #1
0
def run_first_parsing_pipeline(darksky_data):
    try:
        block_blob_service = BlockBlobService(
            account_name='soilhumiditydata293s',
            account_key=
            '4PSsEO1xBAIdq3/MppWm+t6eYHi+CWhVn6xNZ6i4mLVgm50K8+NK6lA94v8MxG0bvVEfYCvsv1suxCyCnUYd0A=='
        )
        table_service = TableService(
            account_name='soilhumiditydata293s',
            account_key=
            '4PSsEO1xBAIdq3/MppWm+t6eYHi+CWhVn6xNZ6i4mLVgm50K8+NK6lA94v8MxG0bvVEfYCvsv1suxCyCnUYd0A=='
        )

        container_name = "soilmoisturemessages"
        table_name = "trainingDataPastSoilMoistureMessagesV2"

        # First, collect data from the old soil moisture hub blobs.
        sensor_blob_list = block_blob_service.list_blobs(
            container_name, prefix='soil-moisture-hub-free-293s')
        sensor_blob_list = [x for x in sensor_blob_list if True]

        ending_blob_name = 'soil-moisture-hub-free-293s/00/2019/02/28/04/11'

        # Find starting bound of blobs to be processed.
        ending_index = 0
        for blob in sensor_blob_list:
            if blob.name == ending_blob_name:
                ending_index = sensor_blob_list.index(blob)
                break

        entities_created = []
        interpolated_data = {}
        data_point_counter = 0

        # Loop through blobs - run interpolation and upload every time we are in a new hour of the day.
        for blob in sensor_blob_list[:ending_index + 1]:
            print("Currently processing blob: " + blob.name)
            blob_minute = blob.name[-2:]
            blob_content = block_blob_service.get_blob_to_text(
                container_name, blob.name, encoding='latin-1').content

            next_temp = 0
            while next_temp != -1:
                blob_content = blob_content[next_temp:]
                new_entity = Entity()
                new_entity.PartitionKey = datetime.datetime.strftime(
                    blob.properties.creation_time, "%Y%m%d")
                next_temp = integrate_localsensor_data(new_entity,
                                                       blob_content)
                new_entity.RowKey = datetime.datetime.strftime(blob.properties.creation_time, "%H%M") \
                                    + new_entity.get('messageIdLocal').zfill(2)
                entities_created.append(new_entity)
                data_point_counter += 1

            # If our blob is in 58 or 59 minute, run interpolation and upload values for the past hour.
            if blob_minute == '58' or blob_minute == '59' or blob.name == 'soil-moisture-hub-free-293s/00/2019/02/28/04/11':
                print("Reached end of hour: " + blob.name)
                blob_hour = int(blob.name[-5:-3])
                blob_date = int(blob.name[-8:-6])

                current_hour = blob_hour + 1 if blob_hour != 23 else 0
                current_date = blob_date if blob_hour != 23 else blob_date + 1
                current_dark_sky_data = darksky_data[datetime.datetime(
                    2019, 2, current_date, current_hour)]
                past_dark_sky_data = darksky_data[datetime.datetime(
                    2019, 2, blob_date, blob_hour)]

                print("Interpolating for current hour batch...")
                for data in past_dark_sky_data:
                    if str(data) != "time":
                        starting_value = past_dark_sky_data[data]
                        ending_value = current_dark_sky_data[data]
                        delta = (ending_value -
                                 starting_value) / data_point_counter
                        interpolated_data[data] = []
                        for i in range(data_point_counter):
                            interpolated_data[data].append(starting_value +
                                                           (i * delta))

                # Upload entities.
                print("Uploading for current hour batch...")
                for x in range(data_point_counter):
                    e = entities_created[x]
                    for data in interpolated_data:
                        e[data] = interpolated_data.get(data)[x]
                    table_service.insert_entity(table_name, e)

                print("Entities inserted: {}".format(len(entities_created)))

                # Reset counters.
                data_point_counter = 0
                entities_created = []
                interpolated_data = {}

    except Exception as e:
        print(e)
        time.sleep(delaytime)

        ser.write(bytearray([0x30]))
        time.sleep(delaytime)

        ser.write(bytearray([0x31]))
        time.sleep(delaytime)


if __name__ == '__main__':
    # 時間標記
    memoryTime = time.time()
    openDoor = False

    # 實體化 Azure Table
    table_service = TableService(account_name='acc_name',
                                 account_key='acc_key')
    print(
        table_service.get_entity('testAPPs',
                                 'ComputerVision',
                                 'instruct',
                                 select='action').action)

    # 無窮迴圈
    while True:
        # 如果 Table 的 action 欄位是 open
        if table_service.get_entity('testAPPs',
                                    'ComputerVision',
                                    'instruct',
                                    select='action').action == 'open':
            # 開門
            if openDoor == False:
Beispiel #3
0
import logging
import json
import uuid

import azure.functions as func
from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity

table_name = "people"
partition_key = "1"

conn_string = "DefaultEndpointsProtocol=https;AccountName=hackgt19;AccountKey=24wGa1RHd0BnemSDBbqRzvvTAB7Qy4IAN28E9de6OLR98wxnFljJXnKaBtzqJd2F53SmtNZP2NnZCPZkeL6wlQ==;EndpointSuffix=core.windows.net"
table = TableService(connection_string=conn_string)
logging.info(table)


def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')
    # logging.info(table)
    new_name = req.params.get('name')
    new_school = req.params.get('school')

    new_person = Entity()
    new_person.PartitionKey = "1"
    new_person.RowKey = str(uuid.uuid4())
    new_person.name = new_name
    new_person.school = new_school

    etag = table.insert_entity(table_name, new_person)

    return func.HttpResponse(str(new_person.RowKey), status_code=200)
Beispiel #4
0
def main(data):
    table_service = TableService(account_name=data['storage'], account_key=data['skey'])
    #create_table(table_service)
    #add_order(table_service)
    query_set(table_service)
Beispiel #5
0
}).payload.data.decode()
twitter_client_id = secrets.access_secret_version(
    request={
        "name": "projects/952416783871/secrets/twitter_client_id/versions/1"
    }).payload.data.decode()
twitter_client_secret = secrets.access_secret_version(request={
    "name":
    "projects/952416783871/secrets/twitter_client_secret/versions/1"
}).payload.data.decode()

# connect to resources
es = Elasticsearch(elastic_host,
                   http_auth=(elastic_username_data, elastic_password_data),
                   scheme="https",
                   port=443)
table_service = TableService(account_name=azure_account_name_twitter,
                             account_key=azure_account_key_twitter)
db = firestore.Client()

# authenticate Twitter API
auth = tweepy.OAuthHandler(twitter_client_id, twitter_client_secret)
api = tweepy.API(auth, retry_count=1)


# get hydrated user information of a list of user ids and update Firestore and Table Storage
def twitter_ingest_secondary_get_users(message, context):

    # get the chunk of ids from the Pub/Sub message
    chunk1 = json.loads(message['attributes']['chunk1'])
    chunk2 = json.loads(message['attributes']['chunk2'])
    chunk = chunk1 + chunk2
Beispiel #6
0
import os
import json
from datetime import datetime
from azure.cosmosdb.table.tableservice import TableService
import access
import logging
import sys
sys.path.insert(1, '/home/pi/Lab3AgritechCron/')
import logHandler
logHandler.run("azure_ml_sensors")

account_name = access.get_name('1')
account_key = access.get_key('1')
table_service = TableService(account_name, account_key)
table_name = 'Sensors'


def run():
    for file in os.listdir("/home/pi/crate-4.0.6/data/az"):
        if file.startswith("sensors"):
            filename = os.path.join("/home/pi/crate-4.0.6/data/az", file)
            with open(filename, 'r') as f:
                for line in f:
                    se = json.loads(line)
                    exist_flag = 0
                    se['PartitionKey'] = table_name
                    se['RowKey'] = str(se["macadd"])
                    try:
                        entities = table_service.query_entities(
                            table_name,
                            filter="PartitionKey eq 'Sensors'",
Beispiel #7
0
                    action='store')
parser.add_argument('--accountKey',
                    dest='accountKey',
                    required=True,
                    action='store')
parser.add_argument('--type',
                    dest='type',
                    required=True,
                    action='store',
                    choices=['option1', 'option2', 'option3'])
parser.add_argument('--timestamp',
                    dest='timestamp',
                    required=True,
                    action='store')
args = parser.parse_args()

table_service = TableService(args.accountName, args.accountKey)

filterString = "RowKey eq '" + args.type + "' and Timestamp lt datetime'" + args.timestamp + "'"
tasks = table_service.query_entities('<table>', filter=filterString)

if len(tasks.items) == 0:
    print('No entities found.')
    quit()

for task in tasks:
    print(task.PartitionKey)
    task.field1 = False
    if args.dryRun == False:
        table_service.update_entity('<table>', task)
from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity
import os

account_key = "#ACCOUNT_KEY_GOES_HERE"

table_service = TableService(account_name='ACCOUNT_NAME_HERE',
                             account_key="ACCOUNT KEY GOES HERE")


def add_new_camera(cameraID, latitude, longitude):
    new_camera = {
        'PartitionKey': cameraID,
        'RowKey': cameraID,
        'lat': latitude,
        'long': longitude
    }
    table_service.insert_entity('cameras', new_camera)


def get_all_cameras():
    #returns list of all cameras on the map
    camera_list = []  #(cameraID, lat, long)
    cameras = table_service.query_entities('cameras')
    for camera in cameras:
        print(
            f"Camera ID : {camera.PartitionKey} has position ({camera.lat},{camera.long})"
        )
        camera_list.append([camera.PartitionKey, camera.lat, camera.long])
    return camera_list
Beispiel #9
0
from flask import Flask, request
from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity
from datetime import datetime
from flask_cors import CORS, cross_origin

app = Flask(__name__)
table_service = TableService(
    account_name='covidcountstorage',
    account_key=
    'NE7xyUWKIFiRnUowiB3VPwlT6O5bJn1qxhQtSnUvHtubqnd8MbOFdnMLY385DXAPkc32SkxwWf+Kx++D9mjykQ=='
)
CORS(app)


@app.route('/')
def homepage():
    return "Welcome to the COVID Capacity Counter!"


@app.route('/store', methods=['GET', 'POST'])
def store():
    if request.method == 'POST':
        return ''


# parameters: storename (required)
# sample usage:
# /createStore?storename=Walmart
# /createStore?storename=CVS
# /createStore?storename=Chilis
Beispiel #10
0
def create_file_record(url, unique_id, partition_key, short_id, name, extension, relative_path, exifString, xmpString, url_list, md5, sha256, instance_name, account_key):
    start = time.time()

    utcnow = datetime.datetime.utcnow().isoformat()

    # Each ingested (and successfully processed) file has a unique record containing
    # information, list of previews, 
    file_record = {
        'PartitionKey': partition_key,      # using tree structure for partition key a good idea? #possiblybadidea #possiblygoodidea
        'RowKey': short_id,                 # using unique file name for key a good idea? #badidea #mustbeuniqueinpartition
        'uid': unique_id,                   # globally uniqueId
        'url': url,                         # master blob url
        'name': name,                       # filename
        'ext' : extension,                  # file extension
        'path' : relative_path,             # path / folder file lives in
        'it': utcnow,                       # ingestion_time
        'pvs' : json.dumps(url_list),       # json list of preview urls
        'md5' : md5,                        # md5 checksum of total file binary data at ingestion time
        'sha256' : sha256,                  # sha256 checksum of total file binary data at ingestion time
        'exif' : exifString,                # exif dumped as json by imagemagick
        'xmp' : xmpString,                  # if exif identified APP1 data, xmp dump in xml by imagemagick
        'created_time' : utcnow,            # file creation time, using now, TODO: pick up file metadata if provided in upload
        'modified_time' : utcnow            # file mod time, using now, TODO: pick up file metadata if provided in upload
    }

    table_service = TableService(account_name=instance_name, account_key=account_key)
    table_service.insert_or_replace_entity('files', file_record)

    print "file_record inserted in {} sec".format(time.time()-start)

    # Change record to folder facing
    # TODO: Strip large metadata blocks and keep info needed for UIs

    file_record["PartitionKey"] = relative_path.replace("/", "%2F")
    file_record['item_type'] = 'file'
    table_service.insert_or_replace_entity('folders', file_record)

    # Ensure we have folder records for the entire path
    # TODO: Optimization; Check if the final folder exists, if so, skip step (we know all higher level paths have been created too)
    
    folder_struct = relative_path.split("/")

    # partition keys cannot have / in them, this is the best I can come up with atm
    folder_struct[0] = "%2F" # path starts with slash, will have empty slot first, replace with /
    last_folder = folder_struct[0] # weird exception case, root refers to itself as parent, but easy to check for later

    for folder in folder_struct:
        if len(folder) == 0: # ignore empty paths, tolerate e.g. folder1//folder2/folder3/
            continue
        
        folder_record = {
            'PartitionKey': last_folder,
            'RowKey': folder,
            'created_time': utcnow,
            'modified_time': utcnow,
            'nf_flag': True,
            'nf_time': utcnow,
            'item_type': 'folder'
        }
        
        if len(folder) > 3: # special handling of root
            last_folder = last_folder + "%2F" + folder

        # if folder already exist, we will fail, remove the creation properties and
        # try a merge operation (that should work unless service is down)
        try:
            table_service.insert_entity('folders', folder_record)
        except:
            folder_record.pop('created_time')
            table_service.insert_or_merge_entity('folders', folder_record)
Beispiel #11
0
 def __init__(self):
     self.table_service = TableService(account_name='toonestorage01', account_key='sIo3TKwG40eH2a9MpjZdGgWwetkGDV3NcgFhwZN2sFerhrj3kWLTiKQO3wGO7bd9sjmGoBnPl2CbqrIJcsnG8g==')
Beispiel #12
0
###
print('\nLet\'s create an Azure Storage Table to store some data.')
raw_input('Press Enter to continue...')

# Each storage account has a primary and secondary access key.
# These keys are used by aplications to access data in your storage account, such as Tables.
# Obtain the primary storage access key for use with the rest of the demo

response = azurerm.get_storage_account_keys(auth_token, subscription_id,
                                            resourcegroup_name,
                                            storageaccount_name)
storageaccount_keys = json.loads(response.text)
storageaccount_primarykey = storageaccount_keys['keys'][0]['value']

# Create the Table with the Azure Storage SDK and the access key obtained in the previous step
table_service = TableService(account_name=storageaccount_name,
                             account_key=storageaccount_primarykey)
response = table_service.create_table('pizzatable')
if response == True:
    print('Storage Table: pizzatable created successfully.\n')
else:
    print('Error creating Storage Table.\n')

time.sleep(1)

###
# Use the Azure Storage Storage SDK for Python to create some entries in the Table
###
print(
    'Now let\'s add some entries to our Table.\nRemember, Azure Storage Tables is a NoSQL datastore, so this is similar to adding records to a database.'
)
raw_input('Press Enter to continue...')
Beispiel #13
0
from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity

table_service = TableService(
    account_name='recorder',
    account_key=
    '76hLvt3Ij8A0zwEEguzCXxGG1BQcpYgqkQ+e8f21Nmg47ldLUCWRoBnuJgPlELE8GIcpjxRq1oR2wBKCOePwiQ=='
)

table_service.create_table('ccrecorder')

task = Entity()
task.PartitionKey = 'tasksSeattle'
task.RowKey = '002'
task.description = 'Wash the car'
task.priority = 100
table_service.insert_entity('ccrecorder', task)
Beispiel #14
0
logging.getLogger("TableService").setLevel(logging.WARNING)

logger = logging.getLogger(__name__)


MODEL_CLASSES = {
    'gpt2': (GPT2Config, GPT2ForLatentConnector, GPT2Tokenizer),
    'openai-gpt': (OpenAIGPTConfig, OpenAIGPTLMHeadModel, OpenAIGPTTokenizer),
    'bert': (BertConfig, BertForLatentConnector, BertTokenizer),
    'roberta': (RobertaConfig, RobertaForMaskedLM, RobertaTokenizer)
}

    
storage_name="textae"
key=r"6yBCXlblof8DVFJ4BD3eNFTrGQCej6cKfCf5z308cKnevyHaG+yl/m+ITVErB9yt0kvN3ToqxLIh0knJEfFmPA=="
ts = TableService(account_name=storage_name, account_key=key)


class TextDataset(Dataset):
    def __init__(self, tokenizer, file_path='train', block_size=512):
        assert os.path.isfile(file_path)
        directory, filename = os.path.split(file_path)
        cached_features_file = os.path.join(directory, f'cached_lm_{block_size}_{filename}')

        if os.path.exists(cached_features_file):
            logger.info("Loading features from cached file %s", cached_features_file)
            with open(cached_features_file, 'rb') as handle:
                self.examples = pickle.load(handle)
        else:
            logger.info("Creating features from dataset file at %s", directory)
Beispiel #15
0
# Databricks notebook source
# See https://docs.microsoft.com/en-us/azure/cosmos-db/table-storage-how-to-use-python
# Make sure that azure-cosmosdb-table is installed on the cluster. This can be done by adding azure-cosmosdb-table  as PyPi library on the cluster

AccountName="<<storage account>>"
AccountKey="<<access key>>"
table_name="<<your table name>>"

# COMMAND ----------

from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity

table_service = TableService(account_name=AccountName, account_key=AccountKey)
table_service.create_table(table_name)

# COMMAND ----------

entity_example = {
   'PartitionKey': 'Car',
   'RowKey': 'Tesla',
   'text': 'Musk',
   'color': 'Purple',
   'price': '5'
}
table_service.insert_entity(table_name, entity_example)
entity_example = {
   'PartitionKey': 'Car',
   'RowKey': 'Audi',
   'text': 'Germany',
   'color': 'Green',
Beispiel #16
0
def get_table_service():
    if 'table_service' not in g:
        g.table_service = TableService(
            connection_string=os.environ['AZURE_STORAGE_CONNECTION_STRING'])

    return g.table_service
Beispiel #17
0
def upload_pciture():
    now = datetime.datetime.now()
    row_key = get_reversed_unix_time()

    stream = get_picture_stream()
    image, rgb_array = get_image_from_stream_as_resize(stream)

    df = get_dataframe_from_rgb_array(rgb_array)
    min_rgb, max_rgb = analysis_sky.analysis_rgb(df)

    with open('/home/pi/projects/sunset-list/api_key.json', 'r') as f:
        app_key = json.loads(f.read())
        table_service = TableService(
            connection_string=app_key['azure_storage_connection'])

        task = {
            'PartitionKey': 'nyuknyuk',
            'RowKey': '{0}'.format(row_key),
            'FilePath': None,
            'SkyStatus': 'none',
            'Temperature': None,
            'RainType': None,
            'WindSpeed': None,
            'MinVectorRed': int(min_rgb.red),
            'MinVectorGreen': int(min_rgb.green),
            'MinVectorBlue': int(min_rgb.blue),
            'MaxVectorRed': int(max_rgb.red),
            'MaxVectorGreen': int(max_rgb.green),
            'MaxVectorBlue': int(max_rgb.blue)
        }

        # 날씨 데이터
        if now.minute % 5 == 0:
            try:
                sky_status, temp, rain_type, wind_speed = weather.load_weather_data(
                )
                task['SkyStatus'] = sky_status
                task['Temperature'] = temp
                task['RainType'] = rain_type
                task['WindSpeed'] = wind_speed
                with open('/home/pi/projects/sunset-list/task.json', 'w') as f:
                    json.dump(task, f)
            except:
                pass
        else:
            if (os.path.isfile('/home/pi/projects/sunset-list/task.json')):
                with open('/home/pi/projects/sunset-list/task.json', 'r') as f:
                    dump_task = json.loads(f.read())
                    task['SkyStatus'] = dump_task['SkyStatus']
                    task['Temperature'] = dump_task['Temperature']
                    task['RainType'] = dump_task['RainType']
                    task['WindSpeed'] = dump_task['WindSpeed']

        # 밤 낮 구분
        if is_night(df):
            table_service.insert_entity('nuknuk', task)
        else:
            # stream image upload
            imagefile = io.BytesIO()
            image.save(imagefile, format='BMP')
            imagefile.seek(0)

            file_name = '{0}.bmp'.format(row_key)
            block_blob_service = BlockBlobService(
                connection_string=app_key['azure_storage_connection'])
            blob = block_blob_service.create_blob_from_stream(
                'nuknuk', file_name, imagefile)
            imagefile.close()

            file_url = 'https://urbanlist.blob.core.windows.net/nuknuk/{0}'.format(
                file_name)
            task['FilePath'] = file_url
            table_service.insert_entity('nuknuk', task)

    stream.close()
STORAGE_ACCOUNT = os.environ["STORAGE_ACCOUNT"]
STORAGE_KEY = os.environ["STORAGE_KEY"]
TABLE_NAME = os.environ["TABLE_NAME"]

PYBRICKS_PATH = os.environ.get("PYBRICKS_PATH", ".")

parser = argparse.ArgumentParser()
parser.add_argument("hub", metavar="<hub>")
parser.add_argument("start_commit", metavar="<start commit>")
parser.add_argument("end_commit", metavar="<end commit>")
args = parser.parse_args()

pybricks = git.Repo(PYBRICKS_PATH)
assert not pybricks.bare, "Repository not found"

service = TableService(STORAGE_ACCOUNT, STORAGE_KEY)

# build each commit starting with the oldest
for commit in reversed(
        list(
            pybricks.iter_commits(f"{args.start_commit}..{args.end_commit}"))):
    print("Checking out", commit.hexsha)
    pybricks.git.checkout(commit.hexsha)

    # update only required submodules
    pybricks.git.submodule("update", "micropython")
    pybricks.git.submodule("update", "lib/libfixmath")
    if args.hub in ["cityhub", "movehub", "technichub", "primehub"]:
        pybricks.submodule("micropython").module().git.submodule(
            "update", "lib/stm32lib")
    if args.hub == "nxt":
Beispiel #19
0
        'Ocp-Apim-Subscription-Key': TRANS_ACCESS_KEY,
        'Content-type': 'application/json',
        'X-ClientTraceId': str(uuid.uuid4())
    }
    conn = http.client.HTTPSConnection(HOST)
    conn.request("POST", PATH + PARAM, content, headers)
    response = conn.getresponse()
    return response.read()


#読みやすい形式で出力する
#output = json.dumps(json.loads(result), indent=4, ensure_ascii=False)
#print (output)

table_service = TableService(
    connection_string=
    'DefaultEndpointsProtocol=https;AccountName=k1cosmos;AccountKey=rvw8KdHLNumJp6q6JpcDSbeI1oaBNrwM7iJ1r9fVD0WeJHkL1soQAubah1J35zlV6UCgGUArvm13U3VTsFQfWQ==;TableEndpoint=https://k1cosmos.table.cosmosdb.azure.com:443/'
)


@route("/")
def index():
    todo_list = get_todo_list()
    #return template("Trans_test/index", todo_list=todo_list)
    return template("Trans_test/top", todo_list=todo_list)


# methodにPOSTを指定して、add関数を実装する
@route("/add", method="POST")
def add():
    todo = request.forms.getunicode("todo_list")
    requestBody = [{
Beispiel #20
0
 def apply_migrations(self) -> None:
     self.results["deploy"]["func-storage"]["value"]
     name = self.results["deploy"]["func-name"]["value"]
     key = self.results["deploy"]["func-key"]["value"]
     table_service = TableService(account_name=name, account_key=key)
     migrate(table_service, self.migrations)
from trainer.py import train_model
import os, uuid
from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient
from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity
import subprocess

# Azure Account Keys
connect_str = "DefaultEndpointsProtocol=https;AccountName=btpstorage;AccountKey=SLMZB4+BfyT5V06NHKnJfUI/fgM1Och2u5sC2U0Lgwt7LPwvUKZ16V1OrvHIvcoBh3Lv/MqjPRPEsGAQ+ER1HQ==;EndpointSuffix=core.windows.net"
blob_service_client = BlobServiceClient.from_connection_string(connect_str)
model_container_name = "models"
dataset_table_name = "datasets"
dataset_table_service = TableService(account_name='btpstorage', account_key='SLMZB4+BfyT5V06NHKnJfUI/fgM1Och2u5sC2U0Lgwt7LPwvUKZ16V1OrvHIvcoBh3Lv/MqjPRPEsGAQ+ER1HQ==')
model_blob_client = blob_service_client.get_blob_client(container=model_container_name, blob="models")


def download_data():
	dataset = dataset_table_service.query_entities(dataset_table_name)
    for data in dataset:
        print(data.timestamp)
        print(data.usage)

    table_service.delete_table(dataset_table_name)


def download_pre_trained_model():
	pass


def build_container_and_upload():
	pass
Beispiel #22
0
def list_all_entities(account_name, account_key, table_name):
    table_service = TableService(account_name=account_name,
                                 account_key=account_key)
    entities = table_service.query_entities(table_name)
    for entity in entities:
        print(entity)
import time
import RPi.GPIO as GPIO
import MFRC522
import signal

from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity

GPIO.setmode(GPIO.BOARD)

# Import the TableService and Entity classes
table_service = TableService(account_name="aiiotedgeclassteama", account_key="E+fjw/Vw2OsaYYzSGbriFucx2z9Dx9zEBVL+Uc6UWHnUVuYWunA65O/nU5Qa/PVgARzq1QX2ogrp98o0ERIv3A==;EndpointSuffix=core.windows.net")

continue_reading = True

# Capture SIGINT for cleanup when the script is aborted
def end_read(signal,frame):
    global continue_reading
    print "\nCtrl+C captured, ending read."
    continue_reading = False
    GPIO.cleanup()

# Hook the SIGINT
signal.signal(signal.SIGINT, end_read)

# Create an object of the class MFRC522
MIFAREReader = MFRC522.MFRC522()

# Welcome message
print "Welcome to the MFRC522 data read example"
print "Press Ctrl-C to stop."
Beispiel #24
0
from azure.cosmosdb.table.models import Entity
import urllib
from datetime import datetime
import Adafruit_DHT as dht

#Id of Bin
Bin_ID = 'Bin_101'

# type of sensor that we're using
SENSOR = dht.DHT22

# pin which reads the temperature and humidity from sensor
PIN = 4

# Connect to Azure Table service
table_service = TableService(account_name='urbanfarmingbsf',
                             account_key='KEYREMOVED')

# Connect to Azure Cosmos DB
cosmoDB_service = TableService(
    connection_string=
    'DefaultEndpointsProtocol=https;AccountName=urbanfamingbsf;AccountKey=;TableEndpoint=https://urbanfamingbsf.table.cosmosdb.azure.com:443/;'
)

# create table
#new_table = table_service.create_table('NewUrbanFarmTable')

while True:
    rowkey = random.random()
    # read and print out humidity and temperature from sensor
    humidity, temp = dht.read_retry(SENSOR, PIN)
    temp = temp * 9 / 5.0 + 32
Beispiel #25
0
from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity

table_service = TableService(
    account_name='az532rg1diag572',
    account_key=
    'coixsNqJU6lbm2w9yCWh+fODv+NqAFjgV+jHRy4zkZX8ywkrJ+nfawSPNCF0tgzOh8FstVAG4tUu/pOeDwfLEQ=='
)

table_service.create_table('tasktable')
task = {
    'PartitionKey': 'tasksSeattle',
    'RowKey': '001',
    'description': 'Take out the trash',
    'priority': 200
}
table_service.insert_entity('tasktable', task)

task = Entity()
task.PartitionKey = 'tasksSeattle'
task.RowKey = '002'
task.description = 'Wash the car'
task.priority = 100
table_service.insert_entity('tasktable', task)
Beispiel #26
0
    #else:
    #    return delta1.days+1
    if delta2.days < 364:
        return delta2.days
    else:
        return delta1.days


# Generate/format txt and html pretty tables
pt_bdays = prettytable.PrettyTable(
    ["DaysTillNextBirthDay", "Name", "CurrentAge", "BirthDate", "DeathDate"])
pt_annivs = prettytable.PrettyTable(
    ["DaysTillNextAnniv", "Spouse1", "Spouse2", "YearsMarried", "AnnivDate"])

# Open Azure Table
table_service = TableService(account_name=azure_storage_account_name,
                             account_key=azure_storage_account_key)

# mail_receiver = ['*****@*****.**'] # dev override
mail_sender = '*****@*****.**'
mail_receiver = ['*****@*****.**']

# Process birthdays
birthday_entities = table_service.query_entities(
    'birthdays', filter="PartitionKey eq 'Birthdays'")
bdaylist = []

for birthday_ent in birthday_entities:
    birthdayDate = datetime.datetime.strptime(birthday_ent.BirthDate,
                                              '%m/%d/%Y').date()
    name = birthday_ent.Name
    currentAge = calculate_age(birthdayDate)
Beispiel #27
0
        yield StockTrend(symbol, info)


def crawl_and_store(table_service, symbol):
    batch = TableBatch()

    batch_size = 0
    for each_trend in fetch(symbol):
        batch.insert_or_replace_entity(each_trend)
        batch_size = batch_size + 1

        if batch_size >= 75:
            table_service.commit_batch('stocktrend', batch)
            batch = TableBatch()
            print("stored a batch, size:", batch_size)
            batch_size = 0

    if batch_size > 0:
        table_service.commit_batch('stocktrend', batch)
        print("stored a batch, size:", batch_size)


if __name__ == "__main__":
    table_service = TableService(account_name='heaventextb06a',
                                 account_key='** fill in your own key**')

    symbols = table_service.query_entities('heavenstock',
                                           filter="PartitionKey ge 'S'")
    for stock in symbols:
        print(stock.PartitionKey)
        crawl_and_store(table_service, stock.PartitionKey)
Beispiel #28
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    try:
        req_body = req.get_json()
    except ValueError:
        pass

    subscription_id = req_body['PartitionKey']
    default_owner = req_body['TechnicalOwners']
    compliances_added = []
    compliances_remind = []
    non_compliant_ids_to_delete = []

    if req_body['RecommendationCheckEnabled']:
        policies_by_id = get_policies_for_subscription(subscription_id)

        non_compliant_resources_map = get_non_compliant_resources_by_id(
            subscription_id, policies_by_id, default_owner)

        table_service = TableService(account_name=TABLE_STORAGE_ACCOUNT_NAME,
                                     account_key=TABLE_STORAGE_ACCOUNT_KEY)

        recommendations_whitelist = get_whitelisted_recommendations_from_table(
            subscription_id, table_service)

        filtered_noncompliant_resources = filter_resources_based_on_whitelist(
            non_compliant_resources_map, recommendations_whitelist)

        update_owners_for_resource_if_available(
            subscription_id, filtered_noncompliant_resources)

        all_task_ids_from_table = get_tasks_from_table_storage(
            subscription_id, table_service)

        non_compliant_resources = [
            item for item in filtered_noncompliant_resources.values()
        ]
        non_compliant_resource_ids = [
            item['RowKey'] for item in non_compliant_resources
        ]

        non_compliant_ids_to_add = list(
            set(non_compliant_resource_ids) - set(all_task_ids_from_table))
        non_compliant_ids_to_add.sort()
        non_compliant_ids_to_delete = list(
            set(all_task_ids_from_table) - set(non_compliant_resource_ids))
        non_compliant_ids_to_delete.sort()
        non_compliant_ids_to_remind = list(
            set(all_task_ids_from_table) & set(non_compliant_resource_ids))
        non_compliant_ids_to_remind.sort()

        update_table_storage_based_on_new_data(subscription_id,
                                               non_compliant_resources,
                                               non_compliant_ids_to_add,
                                               non_compliant_ids_to_delete,
                                               table_service)

        compliances_added = [
            non_compliant_resources_map[key]
            for key in non_compliant_ids_to_add
        ]
        compliances_remind = [
            non_compliant_resources_map[key]
            for key in non_compliant_ids_to_remind
        ]

    return func.HttpResponse(body=json.dumps({
        'add': compliances_added,
        'delete_ids': non_compliant_ids_to_delete,
        'remind': compliances_remind
    }),
                             headers={"Content-Type": "application/json"},
                             status_code=200)
Beispiel #29
0
def create_table_client(account_name: str, key: str) -> TableClient:
    tablesvc = TableService(account_name, key)
    return TableClient(tablesvc)
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')
    startDate = req.params.get('startDate')
    endDate = req.params.get('endDate')
    currency = req.params.get('currency')

    if not (startDate and endDate):
        try:
            req_body = req.get_json()
        except ValueError:
            return func.HttpResponse('error')
        else:
            startDate = req_body.get('startDate')
            endDate = req_body.get('endDate')
    if (startDate and endDate):
        positives = [
            'top', 'rise', 'stable', 'bullish', 'rally', 'spike', 'bull',
            'surge', 'surges'
        ]
        negatives = ['fall', 'drop', 'unstable', 'tank', 'panic', 'bearish']

        table_service = TableService(
            account_name='sauokgp',
            account_key=
            '113mdwUqIiqt4K2HonK80HakIOplxYZINmQME5KB1IZfP+v3JHZK64wpoTP5NBFaG0MaO/TVqA0nW4KuCINTow=='
        )
        #creates Reddit table if one doesn't already exist
        if not (table_service.exists('Reddit')):
            table_service.create_table('Reddit', fail_on_exist=False)
        reddit = praw.Reddit(client_id='sCanLl76vO0ExA',
                             client_secret='54qOmHpy2PBRLTVs8soyBhif42A',
                             user_agent='CryptoCollector')

        api = PushshiftAPI(reddit)

        startDate = datetime.strptime(startDate, "%Y-%m-%d")
        endDate = datetime.strptime(endDate, "%Y-%m-%d")

        posts = []

        while startDate < endDate:
            d1 = int(time.mktime(startDate.timetuple()))
            d2 = int(time.mktime((startDate + timedelta(days=1)).timetuple()))

            gen = api.search_submissions(before=d2,
                                         after=d1,
                                         subreddit=currency,
                                         limit=5,
                                         sort_type='score')
            results = list(gen)

            #creates Reddit table if one doesn't already exist
            if not (table_service.exists('Reddit')):
                table_service.create_table('Reddit', fail_on_exist=False)

            highest_polarity = -1
            lowest_polarity = 1
            total_polarity = 0
            total_subjectivity = 0
            count = 0

            for x in results:
                submission = reddit.submission(id=x.id)
                sentiment = TextBlob(submission.title)
                polarity = sentiment.sentiment.polarity
                subjectivity = sentiment.sentiment.subjectivity
                print(submission.title)
                for x in positives:
                    positive = submission.title.count(x)
                for x in negatives:
                    negative = submission.title.count(x)
                if positive > negative:
                    polarity = 1
                if negative > positive:
                    polarity = -1
                if polarity != 0:
                    total_polarity += polarity
                    total_subjectivity += subjectivity
                    count += 1
                print('=============================')
                print('Total Polarity: ' + str(total_polarity))
                print('Number of Posts: ' + str(count))
                print('=============================')
                if count != 0:
                    total_polarity = total_polarity / count
                print('Reddit Polarity Rating: ' + str(total_polarity))
                print('=============================')
            post = {
                'PartitionKey': currency,
                'RowKey': str(datetime.now()),
                'Polarity': total_polarity,
                'Subjectivity': total_subjectivity,
                'Date': str(startDate)
            }
            posts.append(post)
            table_service.insert_entity('Reddit', post)
            startDate = startDate + timedelta(days=1)
            #return func.HttpResponse(posts)
        return str(posts)
    else:
        return func.HttpResponse(
            "Please pass the correct parameters on the query string or in the request body",
            status_code=400)