#!/usr/bin/env python3
from google.cloud import datastore
from collections import namedtuple
import time
from XLIFFToXLSX import process_xliff_soup
from XLIFFReader import findXLIFFFiles, parse_xliff_file
from concurrent.futures import ThreadPoolExecutor
from ansicolor import black
from AutoTranslationIndexer import TextTagIndexer
from DatastoreUtils import DatastoreChunkClient

client = datastore.Client(project="watts-198422")
executor = ThreadPoolExecutor(512)
chunkClient = DatastoreChunkClient(client, executor)


def export_lang_to_db(lang, filt):
    count = 0
    indexer = TextTagIndexer(lang)
    for file in findXLIFFFiles("cache/{}".format(lang), filt=filt):
        # e.g. '1_high_priority_platform/about.donate.xliff'
        canonicalFilename = "/".join(file.split("/")[2:])
        print(black(file, bold=True))
        soup = parse_xliff_file(file)
        for entry in process_xliff_soup(soup, also_approved=True):
            indexer.add(entry.Source, entry.Translated,
                        file.rpartition("/")[2], entry.IsApproved)
            # Stats
            count += 1
            if count % 1000 == 0:
                print("Processed {} records".format(count))
Пример #2
0
def get_datastore_client():
    global datastore_client
    if datastore_client is None:
        from google.cloud import datastore
        datastore_client = datastore.Client()
    return datastore_client
Пример #3
0
# this generates all base ratings for every units
# to use you will need pymongo
import re
import os
import json
from google.cloud import datastore

client = datastore.Client(project='monplan-api-dev')


def handleSETU(unitCode):
    setuData = open("setuData.json", "r")
    inputData = json.loads(setuData.read())
    for items in inputData:
        if (items['UnitCode'] == unitCode):
            return items
            return True
    return {
        'enjoyScore': 3,
        'enjoyResponse': 0,
        'learnScore': 3,
        'learnResponse': 0
    }  #if no data on code return base rating of 3, and response of 0


def generateLocationAndTimeObject(array):
    tempArray = []
    for i in range(0, len(array)):
        #array i: [['Peninsula'], ['First semester 2017 (Day)']]
        location = array[i][0][0]
        time = array[i][1]
Пример #4
0
 def __init__(self, project, bucket_name, **kwargs):
     super(DataStoreClient, self).__init__(project, bucket_name, **kwargs)
     self.datastore = datastore.Client(self.project)
     self.name = "datastore"
     self.batch = DataStoreManager(client=self.datastore)
Пример #5
0
def clone_client(client):
    return datastore.Client(project=client.project,
                            namespace=client.namespace,
                            credentials=client._credentials,
                            _http=client._http)
Пример #6
0
def get_datastore_data():
    client = datastore.Client()
    query = client.query(kind='msft-compete')
    results = list(query.fetch())
    return results
Пример #7
0
def DatastoreClient():
    return datastore.Client(
                project="sbt-endpoints"
            )
Пример #8
0
def upload_text():
    file_name = request.form["text"]

    client = storage.Client()
    bucket = client.get_bucket('gee-nlp')
    blob = bucket.get_blob(file_name)
    downloaded_blob = blob.download_as_string()
    downloaded_blob_1 = downloaded_blob.decode('utf-8', 'ignore')
    downloaded_blob_1 = downloaded_blob_1.replace("\r\n\r\n", " ")
    text = downloaded_blob_1
    # Analyse sentiment using Sentiment API call
    #sentiment = analyze_text_sentiment(text)[0].get('sentiment score')

    results = analyze_text_sentiment(text)
    print("results: ", results)
    sentiment = results["overallResults"].get('score')
    magnitude = results["overallResults"].get('magnitude')
    print("sentiment in upload_text(): ", sentiment)

    sentence_sentiment = results["sentence_sentiment"]
    classify_document = gcp_classify_text(downloaded_blob_1)
    print("classify_document: ", classify_document)

    df_sentiment = pd.DataFrame(sentence_sentiment)
    df_sentiment
    gcp_plot_sentiments(df_sentiment)

    # Assign a label based on the score
    overall_sentiment = 'unknown'
    if sentiment > 0:
        overall_sentiment = 'positive'
    if sentiment < 0:
        overall_sentiment = 'negative'
    if sentiment == 0:
        overall_sentiment = 'neutral'

    # Create a Cloud Datastore client.
    datastore_client = datastore.Client()

    # Fetch the current date / time.
    current_datetime = datetime.now()

    # The kind for the new entity. This is so all 'Sentences' can be queried.
    kind = "gee-nlp-demo"

    # Create the Cloud Datastore key for the new entity.
    key = datastore_client.key(kind, 'sample_task1')

    # Alternative to above, the following would store a history of all previous requests as no key
    # identifier is specified, only a 'kind'. Datastore automatically provisions numeric ids.
    # key = datastore_client.key(kind)

    # Construct the new entity using the key. Set dictionary values for entity
    entity = datastore.Entity(key)
    entity["file_name"] = file_name
    entity["timestamp"] = current_datetime
    entity["sentiment"] = overall_sentiment
    entity["classify_document"] = classify_document
    entity["magnitude"] = magnitude

    print("entity: ", entity)
    # Save the new entity to Datastore.
    datastore_client.put(entity)

    # Redirect to the home page.
    return redirect("/")
Пример #9
0
def get_from_datastore(config_key, env):
    ds = datastore.Client()
    product_key = ds.key('Settings', env)
    entity = ds.get(product_key)

    return entity[config_key]
Пример #10
0
def clear_listings_cache(request):
    datastore_client = datastore.Client()
    query = datastore_client.query(kind="Listing")
    results = list(query.fetch())
    for listing in results:
        datastore_client.delete(listing.key)
Пример #11
0
from google.cloud import datastore
from . import events
import os

PROJECT_NAME = os.getenv("PROJECT_NAME")

if PROJECT_NAME is None:
    raise Exception("env_var PROJECT_NAME was not found")

datastore_client = datastore.Client(PROJECT_NAME)


def exists(task_id):
    key = datastore_client.key("tasks", task_id)

    entity = datastore_client.get(key)

    return entity is not None


def create(task):

    with datastore_client.transaction():
        key = datastore_client.key("tasks", task["id"])

        task.managerState = "Active"
        entity = {
            "key": key,
            "exclude_from_indexes": ["id", "data", "attributes"],
            "data": task
        }
def get_all_testers():
    # Instantiates a client
    client = datastore.Client()
    query = client.query(kind='Tester')
    query_iter = query.fetch()
    return query_iter
Пример #13
0
    # We need to first prepare the DataStore for a new days data.
    # http://stackoverflow.com/questions/22445217/python-webbrowser-open-to-open-chrome-browser
    url = 'daily-stock-forecast.com/cleards'
    # MacOS
    #chrome_path = 'open -a /Applications/Google\ Chrome.app %s'
    # Windows
    # chrome_path = 'C:\Program Files (x86)\Google\Chrome\Application\chrome.exe %s'
    # Linux
    chrome_path = '/usr/bin/google-chrome %s'
    webbrowser.get(chrome_path).open(url)
    print('\nWaiting 30 sec to ensure DS Prep...')
    time.sleep(30)

    # Set the dataset from the command line parameters.
    #datastore.set_options(dataset="daily-stock-forecast")
    client = datastore.Client("daily-stock-forecast")

    top_5 = []
    #Save each symbol into the datastore
    for i in np.arange(len(data.symbols)):
        if rank[data.symbols[i]] <= 100000:
            #try:
            #req = datastore.CommitRequest()
            #req.mode = datastore.CommitRequest.NON_TRANSACTIONAL
            #entity = req.mutation.insert_auto_id.add()

            # Create a new entity key.
            key = client.key('Forecast')
            entity = datastore.Entity(key)

            # Set the entity key with only one `path_element`: no parent.
Пример #14
0
import os

from flask import Flask

from google.cloud import datastore

APP_CREATED_FLASK_1PATH = "Created Flask application in folder: {}"


def create_app(config={}):
    if path := config.get('INSTANCE_PATH'):
        app = Flask(__name__, instance_path=path)
    else:
        app = Flask(__name__, instance_relative_config=True)
    app.logger.info(APP_CREATED_FLASK_1PATH.format(app.instance_path))
    app.config.from_pyfile(os.path.join(app.instance_path, 'config.py'))
    app.config.update(config)

    if config["PROJECT"] is None:
        exit("No project ID specified in kwargs or config.py")

    with app.app_context():
        app.db = datastore.Client(project=app.config["PROJECT"])
        from jigs.routes import jig_bp
        app.register_blueprint(jig_bp)
    return app
Пример #15
0
        result += toAsciiMap[ch]
    return result


for word in words:
    translit = get_translit(word)
    word_translits[translit] = word


def get_label_ge(label_translit):
    return word_translits[label_translit]


from google.cloud import datastore

ds = datastore.Client("ge-open-speech-recording")


def label_dir(dir_name):
    files = os.listdir(dir_name)
    for file_name in files:
        key = os.path.splitext(file_name)[0]
        sound = ds.get(sound_key)
        if sound:
            print("Already classified:", key)
            continue
        try:
            wave_file = wave.open(dir_name + "/" + file_name, 'r')
        except:
            print("Error:", file_name)
            continue
Пример #16
0
def create_client(project_id):
    return datastore.Client(project_id)
Пример #17
0
def initialize_datastore_client():
    global datastore_client
    if datastore_client is None:
        datastore_client = datastore.Client(PROJECT_ID)
Пример #18
0
from telegram import Bot, ChatAction, Update, InlineKeyboardButton, InlineKeyboardMarkup, KeyboardButton, \
    ReplyKeyboardMarkup, ReplyKeyboardRemove

from google.cloud import datastore

from geopy import distance

# Enable logging
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
                    level=logging.INFO)
logger = logging.getLogger(__name__)

# Define global vars and constants
bot = Bot(token=os.environ["TELEGRAM_TOKEN"])
db = datastore.Client()

# A dictionary of lists holding organizations as keys and members belonging to them as values (lists).
# A user can be in several orgs
organizations = defaultdict(list)

# A dictionary holding members as keys and their preferences as values.
# members['pytonist'] =
# {'selected_org': 'Pythonists', 'travel_radius': '3', 'location': {'longitude': 37.742, 'latitude': 55.45}}
members = {}


def db_query_by_kind(kind):
    """Query the datastore by entity kind (e.g. Organization, Member). The result is a list of Entities.
    To get an entity's id do result[index].id. If the id is custom, do result[index].key.name
    To get entity's items do result[index].items(). It can be converted to list result[index]['members'].
Пример #19
0
 def __init__(self, kind):
     self.client = datastore.Client()
     self.kind = kind
Пример #20
0
def get_client():
    return datastore.Client(current_app.config['PROJECT_ID'])
Пример #21
0
 def __init__(self):
     credentials = service_account.Credentials.from_service_account_file(
         './GoogleCloudStuff/MyoBasketball-6daa0c614684.json')
     # Instantiates a client
     self.datastore_client = datastore.Client(credentials=credentials)
Пример #22
0
import re
import shutil
import sys
import tempfile
import traceback

from google.cloud import datastore, storage
import requests

import config
import gsutil
import wptreport


_log = logging.getLogger(__name__)
_datastore = datastore.Client()


def _get_uploader_password(username):
    """Gets the password for an uploader.

    Datastore exceptions may be raised.

    Args:
        username: A username (string).

    Returns:
        A string, the password for this user.
    """
    return _datastore.get(_datastore.key('Uploader', username))['Password']
Пример #23
0
def generate_client():
    global client
    client = datastore.Client('dancedeets-hrd')
Пример #24
0
    def setUp(self):
        EndToEndWithFakes.prometheus_port += 1

        # The directory of files on the server rsync directory.
        self.rsync_data_dir = '/tmp/iupui_ndt/'
        # rmtree removes the directory, so we need to use shell here.
        self.addCleanup(lambda: os.system('rm -Rf /tmp/iupui_ndt/*'))

        # The directory that serves as a fake Google Cloud Storage bucket.
        self.cloud_upload_dir = '/tmp/cloud_storage_bucket/'
        os.makedirs(self.cloud_upload_dir)
        self.addCleanup(lambda: shutil.rmtree('/tmp/cloud_storage_bucket'))

        # Patch credentials to do nothing. The local datastore emulator doesn't
        # even look at creds.
        creds_patcher = mock.patch.object(gce,
                                          'AppAssertionCredentials',
                                          return_value=EmulatorCreds())
        creds_patcher.start()
        self.addCleanup(creds_patcher.stop)
        local_datastore_client = datastore.Client(project='mlab-sandbox',
                                                  namespace='test',
                                                  credentials=EmulatorCreds(),
                                                  _http=requests.Session())
        # Make datastore clients connect to the local datastore emulator
        datastore_client_patcher = mock.patch.object(
            datastore, 'Client', return_value=local_datastore_client)
        datastore_client_patcher.start()
        self.addCleanup(datastore_client_patcher.stop)

        # Empty out the local datastore emulator.
        query = local_datastore_client.query(kind='dropboxrsyncaddress')
        entities = query.fetch()
        for entity in entities:
            local_datastore_client.delete(entity.key)

        # Make an entirely mocked storage service
        self.mock_storage = mock.MagicMock()
        discovery_build_patcher = mock.patch.object(
            apiclient.discovery, 'build', return_value=self.mock_storage)
        discovery_build_patcher.start()
        self.addCleanup(discovery_build_patcher.stop)

        class FakeProgress(object):
            def __init__(self, status):
                self.status = status

            def progress(self):
                return self.status

        surrounding_testcase = self

        class FakeRequest(object):
            """Handle upload requests by copying the file."""
            def __init__(self, **kwargs):
                self.index = -1
                # pylint: disable=protected-access
                shutil.copy(kwargs['media_body']._filename,
                            surrounding_testcase.cloud_upload_dir)
                # pylint: enable=protected-access
                self.return_values = [(FakeProgress(.5), None),
                                      (FakeProgress(1.0), mock.MagicMock())]

            def next_chunk(self):
                self.index += 1
                return self.return_values[self.index]

        inserter = mock.MagicMock()
        inserter.insert.side_effect = FakeRequest
        self.mock_storage.objects.return_value = inserter

        # Keep filenames unique
        self.file_index = 0
Пример #25
0
    parser.error('--storing in datastore can only be done when --model_name is among the supported models: {}.'.format(model_choices))


model_name = args['model_name']
project_id = os.getenv('PROJECT_ID')
bucket_name = os.getenv('BUCKET_NAME')
location = os.getenv('LOCATION')
key_path = os.getenv('SA_KEY_PATH')
dataset_name = os.getenv('BQ_DATASET_NAME')
table_name = os.getenv('BQ_TABLE_NAME')

credentials = service_account.Credentials.from_service_account_file(key_path)

storage_client = storage.Client(credentials=credentials)

datastore_client = datastore.Client(credentials=credentials)

bq_client = bigquery.Client(credentials=credentials)

if args.store_bigquery == 'True':
    start_time = time.time()
    populateBQ(bq_client=bq_client,storage_client=storage_client,
               bucket_name=bucket_name, dataset_name=dataset_name,
               table_name=table_name)
    total_time = time.time() - start_time
    logging.info(
        'The export to BigQuery was completed successfully and took {} seconds.'.format(round(total_time, 1)))
else:
    logging.info('The export to BigQuery was disable.')

if args.store_datastore == 'True':
Пример #26
0
from functools import partial
from copy import deepcopy
from google.cloud import datastore, exceptions
from enum import Enum
from datetime import datetime, date, time
import binascii
"""
	Tiny wrapper around *google.appengine.api.datastore*.

	This just ensures that operations issued directly through the database-api
	doesn't interfere with ViURs internal caching. If you need skeletons anyway,
	query the database using skel.all(); its faster and is able to serve more
	requests from cache.
"""

__client__ = datastore.Client()

# Consts
KEY_SPECIAL_PROPERTY = "__key__"
DATASTORE_BASE_TYPES = Union[None, str, int, float, bool, datetime, date, time]


class SortOrder(Enum):
    Ascending = 1
    Descending = 2
    InvertedAscending = 3
    InvertedDescending = 4


# Proxied Function / Classed
Entity = datastore.Entity
Пример #27
0
def autocomplete(request):

    start_time = time.time()

    global DATASTORE_CLIENT
    global REDIS_CONN

    # Parse typed string from a client.
    typed_string = ''
    request_json = request.get_json()
    if request.args and 'term' in request.args:
        typed_string = request.args.get('term')
    elif request_json and 'term' in request_json:
        typed_string = request_json['term']
    else:
        response = jsonify([{}])
        response.headers.add('Access-Control-Allow-Origin', '*')
        return response

    if not REDIS_CONN:
        try:
            REDIS_CONN = redis.Redis(host=REDIS_CONFIG['host'],
                                     port=REDIS_CONFIG['port'])
        except redis.ConnectionError:
            error_msg = 'Error: Redis Server not available'
            response = jsonify([].append({
                'label': error_msg,
                'value': error_msg
            }))
            response.headers.add('Access-Control-Allow-Origin', '*')
            return response

    redis_key = hashlib.md5(typed_string.encode('utf-8')).hexdigest()
    cached_results = REDIS_CONN.get(redis_key)
    if cached_results:
        matches = json.loads(cached_results)
        response = jsonify(matches)
        response.headers.add('Access-Control-Allow-Origin', '*')

        elapsed_time = (time.time() -
                        start_time) * 1000  # Performance Measurement
        print('Processing time: {0:.0f} ms (cached) [ {1} ] => [ {2} ]'.format(
            elapsed_time, typed_string, matches[0]['label']))
        return response

    # Initialize Datastore Client
    if not DATASTORE_CLIENT:
        try:
            # DATASTORE_CLIENT = datastore.Client(project = PROJECT_ID)
            DATASTORE_CLIENT = datastore.Client()
        except:
            import traceback
            traceback.print_exc()

    # Create query and get the results
    query = DATASTORE_CLIENT.query(kind=GCP_CLOUD_DATASTORE_KIND)
    query.add_filter('name', '>=', typed_string)
    query.order = ['name']
    results = list(query.fetch(limit=COUNT))

    # Create response data
    matches = []
    for result in results:
        matches.append({'label': result['name'], 'value': result['name']})
    response = jsonify(matches)
    REDIS_CONN.set(redis_key, json.dumps(matches))
    response.headers.add('Access-Control-Allow-Origin', '*')

    elapsed_time = (time.time() - start_time) * 1000  # Performance Measurement
    print('Processing time: {0:.0f} ms [ {1} ] => [ {2} ]'.format(
        elapsed_time, typed_string, matches[0]['label']))
    return response
Пример #28
0
def DatastoreClient():
    return datastore.Client(project="wave16-joan")
    'autoescape': True,
    'block_start_string': '<%',
    'block_end_string': '%>',
    'variable_start_string': '%%',
    'variable_end_string': '%%',
    'comment_start_string': '<#',
    'comment_end_string': '#>'
}
TEMPLATE = 'index.html'
# Flask setup
app = Flask(__name__)
app.jinja_options = JINJA_OPTIONS
firebase_app = firebase_admin.initialize_app()

# GCP setup
client = datastore.Client()
curated_partial_keys = None
log_client = glogging.Client()
log_client.get_default_handler()
log_client.setup_logging()


@app.route('/get/mod')
def get_mod():
    """Handler for single-module get requests."""
    global curated_partial_keys

    mute_id = request.args.get('mute')
    if mute_id:
        set_mute(mute_id)
Пример #30
0
def upload_photo():
    photo = request.files["file"]

    # Create a Cloud Storage client.
    storage_client = storage.Client()

    # Get the bucket that the file will be uploaded to.
    bucket = storage_client.get_bucket(CLOUD_STORAGE_BUCKET)

    # Create a new blob and upload the file's content.
    blob = bucket.blob(photo.filename)
    blob.upload_from_string(photo.read(), content_type=photo.content_type)

    # Make the blob publicly viewable.
    blob.make_public()

    # Create a Cloud Vision client.
    vision_client = vision.ImageAnnotatorClient()

    # Use the Cloud Vision client to detect a face for our image.
    source_uri = "gs://{}/{}".format(CLOUD_STORAGE_BUCKET, blob.name)
    image = vision.Image(source=vision.ImageSource(gcs_image_uri=source_uri))
    faces = vision_client.face_detection(image=image).face_annotations

    # If a face is detected, save to Datastore the likelihood that the face
    # displays 'joy,' as determined by Google's Machine Learning algorithm.
    if len(faces) > 0:
        face = faces[0]

        # Convert the likelihood string.
        likelihoods = [
            "Unknown",
            "Very Unlikely",
            "Unlikely",
            "Possible",
            "Likely",
            "Very Likely",
        ]
        face_joy = likelihoods[face.joy_likelihood]
    else:
        face_joy = "Unknown"

    # Create a Cloud Datastore client.
    datastore_client = datastore.Client()

    # Fetch the current date / time.
    current_datetime = datetime.now()

    # The kind for the new entity.
    kind = "Faces"

    # The name/ID for the new entity.
    name = blob.name

    # Create the Cloud Datastore key for the new entity.
    key = datastore_client.key(kind, name)

    # Construct the new entity using the key. Set dictionary values for entity
    # keys blob_name, storage_public_url, timestamp, and joy.
    entity = datastore.Entity(key)
    entity["blob_name"] = blob.name
    entity["image_public_url"] = blob.public_url
    entity["timestamp"] = current_datetime
    entity["joy"] = face_joy

    # Save the new entity to Datastore.
    datastore_client.put(entity)

    # Redirect to the home page.
    return redirect("/")