示例#1
0
def start_run(config):
    ALLOWED_OUTPUTS = {
        'output:mqtt': OutMQTT, 'output:pushbullet': Pushbullet}

    cfg_parse = configparser.ConfigParser()
    cfg_parse.read(config)
    cfg = {key: value for key, value in cfg_parse.items()}

    settings_mqtt = {}
    if cfg.get('settings:mqtt'):
        settings_mqtt = {
            key: value for key, value in cfg['settings:mqtt'].items()}

    settings_rules = {}
    if cfg.get('settings:rules'):
        settings_rules = {
            key: value for key, value in cfg['settings:rules'].items()}

    output = None
    for output_alias, output_cls in ALLOWED_OUTPUTS.items():
        if cfg.get(output_alias):
            settings_output = {
                key: value for key, value in cfg[output_alias].items()}
            output = output_cls(**settings_output)
            break

    sentinel = Sentinel(**settings_mqtt)
    sentinel.set_db(**settings_rules)
    sentinel.set_output(output)
    sentinel.start()
示例#2
0
def list_rule(config):
    cfg_parse = configparser.ConfigParser()
    cfg_parse.read(config)
    cfg = {key: value for key, value in cfg_parse.items()}

    settings_rules = {}
    if cfg.get('settings:rules'):
        settings_rules = {
            key: value for key, value in cfg['settings:rules'].items()}

    sentinel = Sentinel()
    sentinel.set_db(**settings_rules)
    rules = sentinel.list_rules()
    for rule in rules:
        click.echo(click.style(f'> {rule}', fg='green'))
示例#3
0
def add_rule(config, topic, operator, equated):
    cfg_parse = configparser.ConfigParser()
    cfg_parse.read(config)
    cfg = {key: value for key, value in cfg_parse.items()}

    settings_rules = {}
    if cfg.get('settings:rules'):
        settings_rules = {
            key: value for key, value in cfg['settings:rules'].items()}

    rule = Rule(topic=topic, operator=operator, equated=equated)
    sentinel = Sentinel()
    sentinel.set_db(**settings_rules)
    sentinel.add_rule(rule)
示例#4
0
def from_url(url, db=None, master_for_args={}, **kwargs):
    """
    Returns an active Redis client generated from the given database URL.

    Will attempt to extract the database id from the path url fragment, if
    none is provided.

    When url is starting with sentinels://, a sentinel object will be returned
    if no query parameter service_name in url, otherwise the master redis
    client will be returned. master_for_args will be applied to
    Sentinel.master_for.
    >>> import redis
    >>> redis.from_url('sentinels://node1:17700,node2:17700')
    Sentinel<sentinels=[node1:17700,node2:17700]>

    >>> redis.from_url(
        'sentinels://node1:17700,node2:17700?service_name=mymaster', db=1)
    StrictRedis<SentinelConnectionPool<service=mymaster(master)>

    >>> redis.from_url(
       'sentinels://node1:17700,node2:17700?service_name=mymaster&db=3', db=1)
    StrictRedis<SentinelConnectionPool<service=mymaster(master)>

    >>> redis.from_url(
        'sentinels://node1:17700,node2:17700?service_name=mymaster',
        db=1,
        master_for_args={'redis_class':redis.Redis})
    Redis<SentinelConnectionPool<service=mymaster(master)>
    """
    parse_result = urlparse(url)
    if parse_result.scheme == 'sentinels':
        from sentinel import Sentinel
        sentinel, db_from_url, service_name = Sentinel.from_url(url, **kwargs)

        if not service_name:
            return sentinel
        else:
            return sentinel.master_for(service_name,
                                       db=(db_from_url or db or 0),
                                       **master_for_args)
    from redis.client import Redis
    return Redis.from_url(url, db, **kwargs)
示例#5
0
def start_irun():
    custom_style = style_from_dict({
        Token.Separator: '#cc5454',
        Token.QuestionMark: '#673ab7 bold',
        Token.Selected: '#cc5454',  # default
        Token.Pointer: '#673ab7 bold',
        Token.Instruction: '',  # default
        Token.Answer: '#f44336 bold',
        Token.Question: '',
    })

    def call_single_question(question_type, msg, choices=None, default=None):
        question = [
            {
                'type': question_type,
                'name': 'question',
                'message': msg,
            }
        ]
        if choices:
            question[0]['choices'] = choices
        if default is not None:
            question[0]['default'] = default

        answer = prompt(question, style=custom_style)
        return answer['question']

    sentinel = Sentinel()

    # Q-Choose a database for the rules
    db_choices = ['SQLite3 [Default]', ]
    db_answer = call_single_question(
        question_type='list',
        msg='Choose a database for the rules',
        choices=db_choices
    )
    if db_answer == 'SQLite3 [Default]':
        sentinel.set_db('sqlite://sentinel.db')

    # Q-Choose an output service
    output_choices = ['MQTT', ]
    output_answer = call_single_question(
        question_type='list',
        msg='Choose an output service',
        choices=output_choices
    )
    if output_answer == 'MQTT':
        sentinel.set_output(OutMQTT())

    # Q-Do you want add rules?
    add_rule_answer = call_single_question(
        question_type='confirm',
        msg='Do you want to add rules?',
        default=True
    )

    quit_signal = not add_rule_answer
    while not quit_signal:
        # Get topic and ask about the operation type
        rule_topic_question = [
            {
                'type': 'input',
                'name': 'topic',
                'message': 'Topic',
            },
            {
                'type': 'list',
                'name': 'operation',
                'message': 'Choose a operation:',
                'choices': [
                    'Custom parameters',
                    'Data relay',
                ]
            },
        ]
        rule_topic_answer = prompt(rule_topic_question, style=custom_style)
        if rule_topic_answer['operation'] == 'Data relay':
            rule = Rule(rule_topic_answer['topic'])
            sentinel.add_rule(rule)
        else:
            # Q-If the received value is:
            rule_operator_choices = [
                '==', '!=', '>=',
                '<=', '<', '>',
            ]
            rule_operator_answer = call_single_question(
                question_type='list',
                msg='If the received value is:',
                choices=rule_operator_choices
            )

            # Q-Value {rule_operator_answer}:'
            rule_equated_msg = (
                f'Value {str(rule_operator_answer)}')
            rule_equated_answer = call_single_question(
                question_type='input',
                msg=rule_equated_msg
            )

            rule = Rule(
                topic=rule_topic_answer['topic'],
                operator=rule_operator_answer,
                equated=rule_equated_answer
            )
            sentinel.add_rule(rule)

        # Q-Do you want to add more rules?'
        more_rule_answer = call_single_question(
            question_type='confirm',
            msg='Do you want to add more rules?',
            default=False
        )
        quit_signal = not more_rule_answer

    sentinel.start()
import json
from hashlib import md5
from subprocess import Popen, PIPE
from os import listdir, getenv
from os.path import isfile, join, exists
from pathlib import Path

# your Auth key generated from app.sentinelengine.ai/authkeys
AUTH_KEY = getenv("SENTINEL_AUTH_KEY")
# your Device ID
DEVICE_ID = getenv("SENTINEL_DEVICE_ID")
# The sync folder
SYNC_DIR = getenv("SENTINEL_ENGINE_FILE_FOLDER")

# Initialise a Sentinel API client
SentinelClient = Sentinel(AUTH_KEY)


def loadFile(path):
    with open(join(SYNC_DIR, path), 'rb') as file:
        return file.read()


if exists(SYNC_DIR) is False:
    print("SYNC_DIR: %s missing, creating..." % SYNC_DIR)
    Path(SYNC_DIR).mkdir(parents=True, exist_ok=True)

while True:
    # 1. Generates md5 hashes for existing files
    md5s = [{
        "filename": fname,
def main (ifile = 'data/brdf_WW_1_A_1.kernelFiltered.dat', \
        confFile='config_files/sentinel_Def.conf', \
        solve = ['xlai','xkab','scen','xkw','xkm','xleafn','xs1']) :
  '''
  Show that we can use this same setup to solve 
  for the field data (MODIS) by using a different config file
  '''
  
  
  ifile = 'data/brdf_WW_1_A_1.kernelFiltered.dat'
  ofileSingle = 'output/brdf_WW_1_A_1.kernelFilteredSingle.dat'
  ofile = 'output/brdf_WW_1_A_1.kernelFiltered.dat'

  s = Sentinel(solve=solve,confFile=confFile)
  # as above, solve for initial estimate
  s.solveSingle(ifile,ofileSingle)
  s.paramPlot(s.loadData(ofileSingle),s.loadData(ofileSingle),\
                 filename='plots/testFieldDataSingle.png')
  s.smooth(ofileSingle,ofile=ofileSingle+'_smooth')
  s.paramPlot(s.loadData('input/truth.dat'),\
              s.loadData(ofileSingle+'_smooth'),\
              filename='plots/%s.png'%(ofileSingle+'_smooth'))

  gamma = np.median(np.sqrt(s.gammaSolve.values()) * \
        np.array(s.wScale.values()))
  gamma = int((np.sqrt(s.gammaSolve[solve[0]]) * \
        np.array(s.wScale[solve[0]]))+0.5)
  s.solveRegular(ifile,ofile,modelOrder=2,gamma=gamma, \
        initial=ofileSingle+'_smooth')
  s.paramPlot(s.loadData(ofileSingle+'_smooth'),\
              s.loadData(ofile + '_result.dat'),\
              filename='plots/%s_Gamma%08d.png'%(ofile,gamma))
示例#8
0
import json
import os
import time
from subprocess import Popen, PIPE, TimeoutExpired
from sentinel import Sentinel

# your Auth key generated from app.sentinelengine.ai/authkeys
AUTH_KEY = os.getenv("SENTINEL_AUTH_KEY")
# your Device ID
DEVICE_ID = os.getenv("SENTINEL_DEVICE_ID")

# Initialise a Sentinel API client
SentinelClient = Sentinel(AUTH_KEY)

while True:
    # 1. Get latest commands.
    commands = SentinelClient.pollCommands(DEVICE_ID)

    # 2. Execute commands.
    for command in commands:
        print("Executing command: [" + command['command'] + "]")
        process = Popen(command['command'], stdout=PIPE, stderr=PIPE)
        try:
            # will raise error and kill any process that runs longer than 60 seconds
            stdout, stderr = process.communicate(timeout=10)
        except TimeoutExpired as e:
            process.kill()
            stdout, stderr = process.communicate()
        # 3. Upload command output.
        print("Uploading output...")
        SentinelClient.uploadOutput(command['upload'], stdout, stderr)
示例#9
0
class PR(object):
    def __init__(self, source, target, review, build, number, title):
        assert isinstance(target, FQSHA), target
        assert isinstance(source, FQSHA), source
        assert number is None or isinstance(number, str)
        assert title is None or isinstance(title, str)
        assert review in ['pending', 'approved', 'changes_requested']
        self.source = source
        self.target = target
        self.review = review
        self.build = build
        self.number = number
        self.title = title

    keep = Sentinel()

    def copy(self,
             source=keep,
             target=keep,
             review=keep,
             build=keep,
             number=keep,
             title=keep):
        return PR(source=self.source if source is PR.keep else source,
                  target=self.target if target is PR.keep else target,
                  review=self.review if review is PR.keep else review,
                  build=self.build if build is PR.keep else build,
                  number=self.number if number is PR.keep else number,
                  title=self.title if title is PR.keep else title)

    def _maybe_new_shas(self, new_source=None, new_target=None):
        assert new_source is not None or new_target is not None
        assert new_source is None or isinstance(new_source, FQSHA)
        assert new_target is None or isinstance(new_target, FQSHA)
        if new_source and self.source != new_source:
            assert not self.is_merged()
            if new_target and self.target != new_target:
                log.info(
                    f'new source and target sha {new_target.short_str()} {new_source.short_str()} {self.short_str()}'
                )
                return self._new_target_and_source(new_target, new_source)
            else:
                log.info(
                    f'new source sha {new_source.short_str()} {self.short_str()}'
                )
                return self._new_source(new_source)
        else:
            if new_target and self.target != new_target:
                if self.is_merged():
                    log.info(
                        f'ignoring new target sha for merged PR {self.short_str()}'
                    )
                    return self
                else:
                    log.info(
                        f'new target sha {new_target.short_str()} {self.short_str()}'
                    )
                    return self._new_target(new_target)
            else:
                return self

    def _new_target_and_source(self, new_target, new_source):
        return self.copy(source=new_source,
                         target=new_target,
                         review='pending')._new_build(
                             # FIXME: if I already have an image, just use it
                             try_new_build(new_source, new_target))

    def _new_target(self, new_target):
        return self.copy(target=new_target)._new_build(
            determine_buildability(self.source, new_target))

    def _new_source(self, new_source):
        return self.copy(source=new_source, review='pending')._new_build(
            # FIXME: if I already have an image, just use it
            try_new_build(new_source, self.target))

    def _new_build(self, new_build):
        if self.build != new_build:
            self.notify_github(new_build)
            return self.copy(build=self.build.transition(new_build))
        else:
            return self

    def build_it(self):
        # FIXME: if I already have an image, just use it
        return self._new_build(try_new_build(self.source, self.target))

    # FIXME: this should be a verb
    def merged(self):
        return self._new_build(Merged(self.target.sha))

    def notify_github(self, build):
        log.info(f'notifying github of {build} for {self.short_str()}')
        json = {
            'state': build.gh_state(),
            'description': str(build),
            'context': CONTEXT
        }
        if isinstance(build, Failure) or isinstance(build, Mergeable):
            json['target_url'] = \
                f'https://storage.googleapis.com/{GCS_BUCKET}/ci/{self.source.sha}/{self.target.sha}/index.html'
        try:
            post_repo(self.target.ref.repo.qname,
                      'statuses/' + self.source.sha,
                      json=json,
                      status_code=201)
        except BadStatus as e:
            if e.status_code == 422:
                log.exception(
                    f'Too many statuses applied to {self.source.sha}! This is a '
                    f'dangerous situation because I can no longer block merging '
                    f'of failing PRs.')
            else:
                raise e

    @staticmethod
    def fresh(source, target, number=None, title=None):
        return PR(source, target, 'pending', Unknown(), number, title)

    def __str__(self):
        return json.dumps(self.to_json())

    def short_str(self):
        return (
            f'[PR {self.number}]{self.target.short_str()}..{self.source.short_str()};'
            f'{self.review};{self.build};')

    @staticmethod
    def from_json(d):
        assert 'target' in d, d
        assert 'source' in d, d
        assert 'review' in d, d
        assert 'build' in d, d
        assert 'number' in d, d
        assert 'title' in d, d
        return PR(
            FQSHA.from_json(d['source']),
            FQSHA.from_json(d['target']),
            d['review'],
            build_state_from_json(d['build']),
            d['number'],
            d['title'],
        )

    def to_json(self):
        return {
            'target': self.target.to_json(),
            'source': self.source.to_json(),
            'review': self.review,
            'build': self.build.to_json(),
            'number': self.number,
            'title': self.title
        }

    def is_mergeable(self):
        return (isinstance(self.build, Mergeable)
                and self.review == 'approved')

    def is_approved(self):
        return self.review == 'approved'

    def is_running(self):
        return isinstance(self.build, Building)

    def is_pending_build(self):
        return isinstance(self.build, Buildable)

    def is_merged(self):
        return isinstance(self.build, Merged)

    def update_from_github_push(self, push):
        assert isinstance(push, FQSHA)
        assert self.target.ref == push.ref, f'{push} {self.short_str()}'
        return self._maybe_new_shas(new_target=push)

    def update_from_github_pr(self, gh_pr):
        assert isinstance(gh_pr, GitHubPR)
        assert self.target.ref == gh_pr.target_ref
        assert self.source.ref == gh_pr.source.ref
        # this will build new PRs when the server restarts
        if gh_pr.target_sha:
            result = self._maybe_new_shas(new_source=gh_pr.source,
                                          new_target=FQSHA(
                                              gh_pr.target_ref,
                                              gh_pr.target_sha))
        else:
            result = self._maybe_new_shas(new_source=gh_pr.source)
        if self.title != gh_pr.title:
            log.info(
                f'found new title from github {gh_pr.title} {self.short_str()}'
            )
            result = result.copy(title=gh_pr.title)
        if self.number != gh_pr.number:
            log.info(
                f'found new PR number from github {gh_pr.title} {self.short_str()}'
            )
            result = result.copy(number=gh_pr.number)
        return result

    def update_from_github_review_state(self, review):
        if self.review != review:
            log.info(
                f'review state changing from {self.review} to {review} {self.short_str()}'
            )
            return self.copy(review=review)
        else:
            return self

    def update_from_github_status(self, build):
        if isinstance(self.build, Unknown):
            if self.target.sha == build.target_sha:
                log.info(
                    f'recovering from unknown build state via github. {build} {self.short_str()}'
                )
                return self.copy(build=build)
            else:
                log.info('ignoring github build state for wrong target. '
                         f'{build} {self.short_str()}')
                return self
        else:
            log.info(
                f'ignoring github build state. {build} {self.short_str()}')
            return self

    def refresh_from_batch_job(self, job):
        state = job.cached_status()['state']
        if state == 'Complete':
            return self.update_from_completed_batch_job(job)
        elif state == 'Cancelled':
            log.error(
                f'a job for me was cancelled {short_str_build_job(job)} {self.short_str()}'
            )
            job.delete()
            return self._new_build(try_new_build(self.source, self.target))
        else:
            assert state == 'Created', f'{state} {job.id} {job.attributes} {self.short_str()}'
            assert 'target' in job.attributes, job.attributes
            assert 'image' in job.attributes, job.attributes
            target = FQSHA.from_json(json.loads(job.attributes['target']))
            image = job.attributes['image']
            if target == self.target:
                return self._new_build(Building(job, image, target.sha))
            else:
                log.info(
                    f'found deploy job {job.id} for wrong target {target}, should be {self.target}'
                )
                job.delete()
                return self

    def update_from_completed_batch_job(self, job):
        assert isinstance(job, Job)
        job_status = job.cached_status()
        exit_code = job_status['exit_code']
        job_source = FQSHA.from_json(json.loads(job.attributes['source']))
        job_target = FQSHA.from_json(json.loads(job.attributes['target']))
        assert job_source.ref == self.source.ref
        assert job_target.ref == self.target.ref

        if job_target.sha != self.target.sha:
            log.info(f'notified of job for old target {job.id}'
                     # too noisy: f' {job.attributes} {self.short_str()}'
                     )
            x = self
        elif job_source.sha != self.source.sha:
            log.info(f'notified of job for old source {job.id}'
                     # too noisy: f' {job.attributes} {self.short_str()}'
                     )
            x = self
        elif exit_code == 0:
            log.info(
                f'job finished success {short_str_build_job(job)} {self.short_str()}'
            )
            x = self._new_build(Mergeable(self.target.sha))
        else:
            log.info(
                f'job finished failure {short_str_build_job(job)} {self.short_str()}'
            )
            x = self._new_build(
                Failure(exit_code, job.attributes['image'], self.target.sha))
        job.delete()
        return x
示例#10
0
def main (ifile = 'data/brdf_WW_1_A_1.kernelFiltered.dat', \
        confFile='config_files/sentinel_Def.conf', \
        solve = ['xlai','xkab','scen','xkw','xkm','xleafn','xs1']) :
    '''
  Show that we can use this same setup to solve 
  for the field data (MODIS) by using a different config file
  '''

    ifile = 'data/brdf_WW_1_A_1.kernelFiltered.dat'
    ofileSingle = 'output/brdf_WW_1_A_1.kernelFilteredSingle.dat'
    ofile = 'output/brdf_WW_1_A_1.kernelFiltered.dat'

    s = Sentinel(solve=solve, confFile=confFile)
    # as above, solve for initial estimate
    s.solveSingle(ifile, ofileSingle)
    s.paramPlot(s.loadData(ofileSingle),s.loadData(ofileSingle),\
                   filename='plots/testFieldDataSingle.png')
    s.smooth(ofileSingle, ofile=ofileSingle + '_smooth')
    s.paramPlot(s.loadData('input/truth.dat'),\
                s.loadData(ofileSingle+'_smooth'),\
                filename='plots/%s.png'%(ofileSingle+'_smooth'))

    gamma = np.median(np.sqrt(s.gammaSolve.values()) * \
          np.array(s.wScale.values()))
    gamma = int((np.sqrt(s.gammaSolve[solve[0]]) * \
          np.array(s.wScale[solve[0]]))+0.5)
    s.solveRegular(ifile,ofile,modelOrder=2,gamma=gamma, \
          initial=ofileSingle+'_smooth')
    s.paramPlot(s.loadData(ofileSingle+'_smooth'),\
                s.loadData(ofile + '_result.dat'),\
                filename='plots/%s_Gamma%08d.png'%(ofile,gamma))