Example #1
0
	def _fetch_containers_for_front(self, retries=3):
		container_api_host = configuration.read('CONTAINER_API_HOST')
		try:
			url = "http://{host}/list/collections/by/front/{front_id}".format(
				host=container_api_host,
				front_id=self.front_id)

			if self.metadata:
				url = "{url}/and/by/metadata/{metadata}".format(
					url=url,
					metadata=self.metadata)
			
			result = urlfetch.fetch(url, deadline=9)
			
			if result.status_code == 200:
				data = json.loads(result.content)

				return data.get('data', [])
				
			return []
		
		except Exception as e:
			logging.warn('Container API call failed {0}'.format(e))
			logging.warn(traceback.format_exc())

			if retries > 0:
				return self.fetch_title_override(retries=retries-1)

		return []
Example #2
0
def read_container(container_id,
                   retries=3,
                   sort_function=None,
                   additional_capi_params=None):
    container_base_url = configuration.read('CONTAINER_API_BASE_URL')
    try:
        url = "{0}/{1}".format(container_base_url, container_id)

        result = urlfetch.fetch(url, deadline=9)
        if result.status_code == 200:
            data = json.loads(result.content)
            live_stories = data.get('collection', {}).get('live', [])
            live_story_ids = [item['id'] for item in live_stories]

            stories = [
                capi.read_item(item_id,
                               additional_params=additional_capi_params)
                for item_id in live_story_ids
            ]

            if sort_function:
                return sorted(stories, sort_function)

            return stories

        return []

    except Exception as e:
        logging.warn('Container API call failed {0}'.format(e))
        logging.warn(traceback.format_exc())

        if retries > 0:
            return read_container(container_id, retries=retries - 1)

    return []
Example #3
0
def read_container(container_id, retries=3, sort_function=None, additional_capi_params=None):
	container_base_url = configuration.read('CONTAINER_API_BASE_URL')
	try:
		url = "{0}/{1}".format(container_base_url, container_id)

		result = urlfetch.fetch(url, deadline=9)
		if result.status_code == 200:
			data = json.loads(result.content)
			live_stories = data.get('collection', {}).get('live', [])
			live_story_ids = [item['id'] for item in live_stories]
			
			stories = [capi.read_item(item_id, additional_params=additional_capi_params) for item_id in live_story_ids]

			if sort_function:
				return sorted(stories, sort_function)

			return stories

		return []

	except Exception as e:
		logging.warn('Container API call failed {0}'.format(e))
		logging.warn(traceback.format_exc())

		if retries > 0:
			return read_container(container_id, retries=retries-1)

	return []
Example #4
0
def read_item(internal_id, additional_params=None):
    capi_base_url = configuration.read('CAPI_BASE_URL')

    combined_params = default_params

    if additional_params:
        combined_params = default_params.using(**additional_params)

    item_url = "{0}/{1}?{2}".format(capi_base_url, internal_id,
                                    urllib.urlencode(combined_params))
    #logging.info(item_url)

    cached_response = memcache.get(item_url)

    if cached_response:
        return cached_response

    result = urlfetch.fetch(item_url, deadline=8)

    if result.status_code == 200:
        data = json.loads(result.content)
        item_data = data.get('response', {}).get('content', {})

        if len(result.content) < defaults.MAX_MEMCACHE_LENGTH:
            memcache.set(item_url, item_data, defaults.CACHE_TIME)
        return item_data

    return None
Example #5
0
def migrate():
    config = configuration.read()
    rtc = ImportHandler(config)
    rtcworkspace = WorkspaceHandler(config)
    git = Commiter
    initialize(config)
    streamuuid = config.streamuuid
    streamname = config.streamname
    branchname = streamname + "_branchpoint"

    componentbaselineentries = rtc.getcomponentbaselineentriesfromstream(streamuuid)
    rtcworkspace.setnewflowtargets(streamuuid)
    git.branch(branchname)

    history = rtc.readhistory(componentbaselineentries, streamname)
    changeentries = rtc.getchangeentriesofstreamcomponents(componentbaselineentries)

    rtc.acceptchangesintoworkspace(rtc.getchangeentriestoaccept(changeentries, history))
    shouter.shout("All changes until creation of stream '%s' accepted" % streamname)
    git.pushbranch(branchname)
    git.branch(streamname)

    rtcworkspace.setcomponentstobaseline(componentbaselineentries, streamuuid)
    rtcworkspace.load()

    changeentries = rtc.getchangeentriesofstream(streamuuid)
    rtc.acceptchangesintoworkspace(rtc.getchangeentriestoaccept(changeentries, history))
    git.pushbranch(streamname)
    shouter.shout("All changes of stream '%s' accepted - Migration of stream completed" % streamname)
Example #6
0
def read_item(internal_id, additional_params=None):
	capi_base_url = configuration.read('CAPI_BASE_URL')

	combined_params = default_params

	if additional_params:
		combined_params = default_params.using(**additional_params)

	item_url = "{0}/{1}?{2}".format(capi_base_url, internal_id, urllib.urlencode(combined_params))
	#logging.info(item_url)
	
	cached_response = memcache.get(item_url)

	if cached_response:
		return cached_response

	
	result = urlfetch.fetch(item_url, deadline=8)

	if result.status_code == 200:
		data = json.loads(result.content)
		item_data = data.get('response', {}).get('content', {})

		if len(result.content) < defaults.MAX_MEMCACHE_LENGTH:
			memcache.set(item_url, item_data, defaults.CACHE_TIME)
		return item_data

	return None
Example #7
0
    def _fetch_containers_for_front(self, retries=3):
        container_api_host = configuration.read('CONTAINER_API_HOST')
        try:
            url = "http://{host}/list/collections/by/front/{front_id}".format(
                host=container_api_host, front_id=self.front_id)

            if self.metadata:
                url = "{url}/and/by/metadata/{metadata}".format(
                    url=url, metadata=self.metadata)

            result = urlfetch.fetch(url, deadline=9)

            if result.status_code == 200:
                data = json.loads(result.content)

                return data.get('data', [])

            return []

        except Exception as e:
            logging.warn('Container API call failed {0}'.format(e))
            logging.warn(traceback.format_exc())

            if retries > 0:
                return self.fetch_title_override(retries=retries - 1)

        return []
Example #8
0
def migrate():
    config = configuration.read()
    rtc = ImportHandler(config)
    rtcworkspace = WorkspaceHandler(config)
    git = Commiter

    initialize(config)
    streamuuids = config.streamuuids
    for streamuuid in streamuuids:
        componentbaselineentries = rtc.getcomponentbaselineentriesfromstream(streamuuid)
        streamname = config.streamnames[streamuuids.index(streamuuid)]
        rtcworkspace.setnewflowtargets(streamuuid)
        git.branch(streamname)

        history = rtc.readhistory(componentbaselineentries, streamname)
        changeentries = rtc.getchangeentriesofstreamcomponents(componentbaselineentries)

        rtc.acceptchangesintoworkspace(rtc.getchangeentriestoaccept(changeentries, history))
        shouter.shout("All changes of components of stream '%s' accepted" % streamname)
        git.pushbranch(streamname)

        rtcworkspace.setcomponentstobaseline(componentbaselineentries, streamuuid)
        rtcworkspace.load()

        changeentries = rtc.getchangeentriesofstream(streamuuid)
        rtc.acceptchangesintoworkspace(rtc.getchangeentriestoaccept(changeentries, history))
        git.pushbranch(streamname)
        shouter.shout("All changes of stream '%s' accepted - Migration of stream completed" % streamname)

        morestreamstomigrate = streamuuids.index(streamuuid) + 1 is not len(streamuuids)
        if morestreamstomigrate:
            git.checkout("master")
            rtcworkspace.recreateoldestworkspace()
Example #9
0
    def fetch_credentials(self):
        """
        Method to fetch credentails from .env/personal_credentials

        :param: None
        :return: creds (dict)
        """
        logging.info('fetching user credentials locally...')
        config = configparser.RawConfigParser()
        main_path = os.getcwd()
        path = f"{main_path}/.env/personal_credentials"
        logging.info(f"fetching from path: {path}")
        config.read(path)
        username = config.get('default', 'username')
        password = config.get('default', 'password')
        creds = {'username': username, 'password': password}
        return creds
Example #10
0
def command_publish(args, options):
    source_dir = options.source_dir
    output_dir = options.output_dir
    try:
        config = configuration.read(os.path.join(source_dir or '.',
                                                 options.configuration_file))
    except IOError, error:
        logging.error('Error while loading configuration file')
        raise SystemExit(error)
Example #11
0
def command_publish(args, options):
    source_dir = options.source_dir
    output_dir = options.output_dir
    try:
        config = configuration.read(
            os.path.join(source_dir or '.', options.configuration_file))
    except IOError, error:
        logging.error('Error while loading configuration file')
        raise SystemExit(error)
Example #12
0
def main():
    parser = ArgumentParser(
        description="Network emulator for netSLS")
    parser.add_argument(
        '--config', dest='config_path', help='path to configuration file')
    args = parser.parse_args()

    config_path = args.config_path
    if not config_path:
        if os.path.isfile("network_emulator.cfg"):
            config_path = "network_emulator.cfg"
        elif os.path.isfile(os.path.expanduser("~/.network_emulator.cfg")):
            config_path = os.path.expanduser("~/.network_emulator.cfg")
    configuration.read(config_path)

    signal.signal(signal.SIGTERM, sigterm_handler)
    signal.signal(signal.SIGINT, sigint_handler)

    emulator = network_emulator.NetworkEmulator.get_instance()
    emulator.start()
Example #13
0
def main():
    parser = ArgumentParser(description="Network emulator for netSLS")
    parser.add_argument('--config',
                        dest='config_path',
                        help='path to configuration file')
    args = parser.parse_args()

    config_path = args.config_path
    if not config_path:
        if os.path.isfile("network_emulator.cfg"):
            config_path = "network_emulator.cfg"
        elif os.path.isfile(os.path.expanduser("~/.network_emulator.cfg")):
            config_path = os.path.expanduser("~/.network_emulator.cfg")
    configuration.read(config_path)

    signal.signal(signal.SIGTERM, sigterm_handler)
    signal.signal(signal.SIGINT, sigint_handler)

    emulator = network_emulator.NetworkEmulator.get_instance()
    emulator.start()
Example #14
0
def migrate():
    config = configuration.read()
    rtc = ImportHandler(config)
    rtcworkspace = WorkspaceHandler(config)
    git = Commiter

    initialize(config)
    streamuuids = config.streamuuids
    for streamuuid in streamuuids:
        componentbaselineentries = rtc.getcomponentbaselineentriesfromstream(
            streamuuid)
        streamname = config.streamnames[streamuuids.index(streamuuid)]
        rtcworkspace.setnewflowtargets(streamuuid)
        git.branch(streamname)

        history = rtc.readhistory(componentbaselineentries, streamname)
        changeentries = rtc.getchangeentriesofstreamcomponents(
            componentbaselineentries)

        rtc.acceptchangesintoworkspace(
            rtc.getchangeentriestoaccept(changeentries, history))
        shouter.shout("All changes of components of stream '%s' accepted" %
                      streamname)
        git.pushbranch(streamname)

        rtcworkspace.setcomponentstobaseline(componentbaselineentries,
                                             streamuuid)
        rtcworkspace.load()

        changeentries = rtc.getchangeentriesofstream(streamuuid)
        rtc.acceptchangesintoworkspace(
            rtc.getchangeentriestoaccept(changeentries, history))
        git.pushbranch(streamname)
        shouter.shout(
            "All changes of stream '%s' accepted - Migration of stream completed"
            % streamname)

        morestreamstomigrate = streamuuids.index(streamuuid) + 1 is not len(
            streamuuids)
        if morestreamstomigrate:
            git.checkout("master")
            rtcworkspace.recreateoldestworkspace()
Example #15
0
def read_container_title(container_id, retries=3):
	container_base_url = configuration.read('CONTAINER_API_BASE_URL')
	try:
		url = "{0}/{1}".format(container_base_url, container_id)
		logging.info(url)
		result = urlfetch.fetch(url, deadline=9)
		if result.status_code == 200:
			data = json.loads(result.content)
			return data.get("config", {}).get("displayName")

		return None

	except Exception as e:
		logging.warn('Container API call failed {0}'.format(e))
		logging.warn(traceback.format_exc())

		if retries > 0:
			return read_container_title(container_id, retries=retries-1)

	return None
Example #16
0
def read_container_title(container_id, retries=3):
    container_base_url = configuration.read('CONTAINER_API_BASE_URL')
    try:
        url = "{0}/{1}".format(container_base_url, container_id)
        logging.info(url)
        result = urlfetch.fetch(url, deadline=9)
        if result.status_code == 200:
            data = json.loads(result.content)
            return data.get("config", {}).get("displayName")

        return None

    except Exception as e:
        logging.warn('Container API call failed {0}'.format(e))
        logging.warn(traceback.format_exc())

        if retries > 0:
            return read_container_title(container_id, retries=retries - 1)

    return None
	def fetch_data(self, retries=3):
		container_base_url = configuration.read('CONTAINER_API_BASE_URL')
		try:
			url = "{0}/{1}".format(container_base_url, self.container_id)
			
			result = urlfetch.fetch(url, deadline=9)
			if result.status_code == 200:
				data = json.loads(result.content)
				live_stories = data.get('collection', {}).get('live', [])
				live_story_ids = [item['id'] for item in live_stories]
				
				return [read_capi_item(item_id) for item_id in live_story_ids]

			return []

		except Exception as e:
			logging.warn('Container API call failed {0}'.format(e))
			logging.warn(traceback.format_exc())

			if retries > 0:
				return self.fetch_data(retries=retries-1)

		return []
Example #18
0
def migrate():
    config = configuration.read()
    rtc = ImportHandler(config)
    rtcworkspace = WorkspaceHandler(config)
    git = Commiter
    initialize(config)
    streamuuid = config.streamuuid
    streamname = config.streamname
    branchname = streamname + "_branchpoint"

    componentbaselineentries = rtc.getcomponentbaselineentriesfromstream(
        streamuuid)
    rtcworkspace.setnewflowtargets(streamuuid)
    git.branch(branchname)

    history = rtc.readhistory(componentbaselineentries, streamname)
    changeentries = rtc.getchangeentriesofstreamcomponents(
        componentbaselineentries)

    rtc.acceptchangesintoworkspace(
        rtc.getchangeentriestoaccept(changeentries, history))
    shouter.shout("All changes until creation of stream '%s' accepted" %
                  streamname)
    git.pushbranch(branchname)
    git.branch(streamname)

    rtcworkspace.setcomponentstobaseline(componentbaselineentries, streamuuid)
    rtcworkspace.load()

    changeentries = rtc.getchangeentriesofstream(streamuuid)
    rtc.acceptchangesintoworkspace(
        rtc.getchangeentriestoaccept(changeentries, history))
    git.pushbranch(streamname)
    shouter.shout(
        "All changes of stream '%s' accepted - Migration of stream completed" %
        streamname)
import os
import logging

import webapp2

from guardianapi.apiClient import ApiClient

import configuration
import handlers

# TODO: Hide me away somewhere warm and secret.
api_key = configuration.read('CAPI_KEY')
ophan_key = configuration.read('OPHAN_API_KEY')
base_url=configuration.read('CAPI_BASE_URL', 'https://content.guardianapis.com/')
ophan_base_url = configuration.read('OPHAN_BASE_URL')
discussion_base_url = 'https://discussion.guardianapis.com/discussion-api'

client = ApiClient(base_url, api_key, edition="uk")
clientUS = ApiClient(base_url, api_key, edition='us')
clientAUS = ApiClient(base_url, api_key, edition='au')

# Super dirty
# import now after the common functionality of this module is defined
# The result of script execution flow

import email_definitions as emails

app = webapp2.WSGIApplication([('/daily-email/(.+)', emails.uk.DailyEmail),
                               ('/daily-email-us/(.+)', emails.us.DailyEmailUS),
                               ('/daily-email-aus/(.+)', emails.au.DailyEmailAUS),
                               ('/australian-politics/(.+)', emails.au.Politics),
Example #20
0
 def test_read_passedin_configfile(self):
     self._assertTestConfig(configuration.read(testhelper.getrelativefilename('resources/test_config.ini')))
Example #21
0
 def test_getSampleConfig_ExpectInitializedConfigWithDefaultValues(self):
     config = configuration.read(testhelper.getrelativefilename("../config.ini.sample"))
     self.assertEqual("lscm", config.scmcommand)
     self.assertEqual(config, configuration.get())
Example #22
0
 def test_read_minimumconfigfile_shouldrelyonfallbackvalues(self):
     configuration.setconfigfile(testhelper.getrelativefilename('resources/test_minimum_config.ini'))
     self._assertDefaultConfig(configuration.read())
Example #23
0
 def test_read_configfile_from_configuration(self):
     configuration.setconfigfile(testhelper.getrelativefilename('resources/test_config.ini'))
     self._assertTestConfig(configuration.read())
Example #24
0
import configuration

from guardianapi.apiClient import ApiClient

api_key = configuration.read('CAPI_KEY')
base_url=configuration.read('CAPI_BASE_URL')

client = ApiClient(base_url, api_key, edition="uk")
clientUS = ApiClient(base_url, api_key, edition='us')
clientAUS = ApiClient(base_url, api_key, edition='au')
import logging
import json
import urllib
import traceback

from google.appengine.api import urlfetch
from google.appengine.api import memcache

import pysistence as immutable

import defaults
import configuration

default_params = immutable.make_dict({
	'show-fields': ",".join(defaults.content_item_fields),
	'api-key': configuration.read('CAPI_KEY')
	})

def for_id(container_id):
	return ContainerDataSource(container_id)

def read_capi_item(internal_id):
	capi_base_url = configuration.read('CAPI_BASE_URL')

	item_url = "{0}/{1}?{2}".format(capi_base_url, internal_id, urllib.urlencode(default_params))
	#logging.info(item_url)
	
	cached_response = memcache.get(item_url)

	if cached_response:
		return cached_response
Example #26
0
import configuration

from guardianapi.apiClient import ApiClient

api_key = configuration.read('CAPI_KEY')
base_url = configuration.read('CAPI_BASE_URL')

client = ApiClient(base_url, api_key, edition="uk")
clientUS = ApiClient(base_url, api_key, edition='us')
clientAUS = ApiClient(base_url, api_key, edition='au')
Example #27
0
 def test_read_passedin_configfile_expect_override_user_password(self):
     configuration.setUser('newUser')
     configuration.setPassword('newPassword')
     self._assertTestConfig(configuration.read(testhelper.getrelativefilename('resources/test_config.ini')),
                            user='******', password='******')
Example #28
0
 def test_read_configfile_from_configuration(self):
     configuration.setconfigfile(testhelper.getrelativefilename('resources/test_config.ini'))
     self._assertTestConfig(configuration.read())
Example #29
0
import urllib
import logging

from google.appengine.api import urlfetch
from google.appengine.api import memcache

import pysistence as immutable

import defaults
import configuration

default_params = immutable.make_dict({
    'show-fields':
    ",".join(defaults.content_item_fields),
    'api-key':
    configuration.read('CAPI_KEY')
})


def read_item(internal_id, additional_params=None):
    capi_base_url = configuration.read('CAPI_BASE_URL')

    combined_params = default_params

    if additional_params:
        combined_params = default_params.using(**additional_params)

    item_url = "{0}/{1}?{2}".format(capi_base_url, internal_id,
                                    urllib.urlencode(combined_params))
    #logging.info(item_url)
Example #30
0
 def test_read_minimumconfigfile_shouldrelyonfallbackvalues(self):
     configuration.setconfigfile(testhelper.getrelativefilename('resources/test_minimum_config.ini'))
     self._assertDefaultConfig(configuration.read())
import os
import logging

import webapp2

from guardianapi.apiClient import ApiClient

import configuration
import handlers

# TODO: Hide me away somewhere warm and secret.
api_key = configuration.read("CAPI_KEY")
ophan_key = configuration.read("OPHAN_API_KEY")
base_url = configuration.read("CAPI_BASE_URL", "https://content.guardianapis.com/")
ophan_base_url = configuration.read("OPHAN_BASE_URL")
discussion_base_url = "https://discussion.guardianapis.com/discussion-api"

client = ApiClient(base_url, api_key, edition="uk")
clientUS = ApiClient(base_url, api_key, edition="us")
clientAUS = ApiClient(base_url, api_key, edition="au")

# Super dirty
# import now after the common functionality of this module is defined
# The result of script execution flow

import email_definitions as emails

app = webapp2.WSGIApplication(
    [
        ("/daily-email/(.+)", emails.uk.DailyEmail),
        ("/daily-email-us/(.+)", emails.us.DailyEmailUS),
Example #32
0
 def test_getSampleConfig_ExpectInitializedConfigWithDefaultValues(self):
     config = configuration.read(testhelper.getrelativefilename("../config.ini.sample"))
     self.assertEqual("lscm", config.scmcommand)
     self.assertEqual(config, configuration.get())
Example #33
0
 def test_read_passedin_configfile(self):
     self._assertTestConfig(configuration.read(testhelper.getrelativefilename('resources/test_config.ini')))
Example #34
0

from time import sleep
import serial
import io
import configuration
import dao

config = configuration.read()

conn = serial.Serial(config['serial']['device'], config['serial']['baud'], timeout=5, bytesize=config['serial']['bytesize'], parity=config['serial']['parity'], stopbits=config['serial']['stopbits'])

buf = io.TextIOWrapper(io.BufferedRWPair(conn, conn, 1))

while True:
    try:
        line = buf.readline()

        if line.startswith('='):
            values = line[1:].split(':')
            session_id = values.pop(0)
            millis = values.pop(0)

            for sensor_id, value in enumerate(values):
                reading = dao.Reading()
                reading.session_id = session_id
                reading.millis = millis
                reading.value = value
                reading.sensor_id = sensor_id + 1
                reading.save()
            
Example #35
0
 def test_read_passedin_configfile_expect_override_user_password(self):
     configuration.setUser('newUser')
     configuration.setPassword('newPassword')
     self._assertTestConfig(configuration.read(testhelper.getrelativefilename('resources/test_config.ini')),
                            user='******', password='******')
Example #36
0
def connection():
    base_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
    config = configuration.read()
    return sqlite3.connect(os.path.join(base_path, config['db']))
Example #37
0
import matplotlib.pyplot as plt
import numpy as np

import os
import sys
import itertools

fmuDir = sys.argv[1]

fig, axs = plt.subplots(6, 2, sharex=True, sharey=True)

slaveNames = ['Step', 'Gain', 'Subtraction']
sequences = list(itertools.permutations(range(3)))

fmus, connections = configuration.read(fmuDir, 'StepSubtraction.xml')
dt = .25
t0, tEnd = 0., 2.

axs[0, 0].set_title('Gain, y2')
axs[0, 1].set_title('Subtraction, y3')
for idx in range(6):
    sequenceIdx = tuple(slaveNames[i] for i in sequences[idx])

    print "The master is producing data for the sequence {0} -> {1}".format(
        sequences[idx], sequenceIdx)
    data = master.run(fmus, connections, sequenceIdx, dt, t0, tEnd)

    ax = axs[idx, 0]
    t2s, y2s = data[('Gain', 'y')]
    ax.step(t2s, t2s > 1, 'r--')