Ejemplo n.º 1
0
    def __init__(self,
                 from_address,
                 to_address,
                 subject,
                 host,
                 username,
                 password,
                 port=465,
                 use_ssl=1,
                 log_type="email",
                 max_interval=HOUR,
                 settings=None):
        """
        SEND WARNINGS AND ERRORS VIA EMAIL

        settings = {
            "log_type":"email",
            "from_address": "*****@*****.**",
            "to_address": "*****@*****.**",
            "subject": "Problem in Pulse Logger",
            "host": "mail.mozilla.com",
            "port": 465,
            "username": "******",
            "password": "******",
            "use_ssl": 1
        }

        """
        assert settings.log_type == "email", "Expecing settings to be of type 'email'"
        self.settings = settings
        self.accumulation = []
        self.next_send = Date.now() + MINUTE
        self.locker = Lock()
        self.settings.max_interval = Duration(settings.max_interval)
Ejemplo n.º 2
0
 def __init__(self,
              host,
              user,
              password,
              database=None,
              port=5439,
              settings=None):
     self.settings = settings
     self.locker = Lock()
     self.connection = None
Ejemplo n.º 3
0
 def __init__(self, name, data, schema=None):
     #TODO: STORE THIS LIKE A CUBE FOR FASTER ACCESS AND TRANSFORMATION
     data = list(unwrap(data))
     Container.__init__(self, data, schema)
     if schema == None:
         self._schema = get_schema_from_list(data)
     else:
         self._schema = schema
     self.name = name
     self.data = data
     self.locker = Lock()  # JUST IN CASE YOU WANT TO DO MORE THAN ONE THING
Ejemplo n.º 4
0
    def __init__(self,
                 hg,
                 rate_limit,
                 use_cache=True,
                 cache=None,
                 settings=None):
        self.settings = settings
        self.failure_classification = {
            c.id: c.name
            for c in http.get_json(FAILURE_CLASSIFICATION_URL)
        }
        self.repo = {c.id: c.name for c in http.get_json(REPO_URL)}
        self.hg = hg
        self.cache = elasticsearch.Cluster(cache).get_or_create_index(cache)
        self.locker = Lock()
        self.pending = {}

        self.rate_locker = Lock()
        self.request_times = [0] * rate_limit
        self.request_pointer = 0
Ejemplo n.º 5
0
    def __init__(self, file):
        assert file

        from pyLibrary.env.files import File

        self.file = File(file)
        if self.file.exists:
            self.file.backup()
            self.file.delete()

        self.file_lock = Lock("file lock for logging")
Ejemplo n.º 6
0
 def __init__(self,
              from_address,
              to_address,
              subject,
              region,
              aws_access_key_id=None,
              aws_secret_access_key=None,
              log_type="ses",
              settings=None):
     assert settings.log_type == "ses", "Expecing settings to be of type 'ses'"
     self.settings = settings
     self.accumulation = []
     self.next_send = Date.now() + MINUTE
     self.locker = Lock()
Ejemplo n.º 7
0
    def __init__(self, name, params, cwd=None, env=None, debug=False):
        self.name = name
        self.service_stopped = Signal("stopped signal for " +
                                      convert.string2quote(name))
        self.stdin = Queue("stdin for process " + convert.string2quote(name),
                           silent=True)
        self.stdout = Queue("stdout for process " + convert.string2quote(name),
                            silent=True)
        self.stderr = Queue("stderr for process " + convert.string2quote(name),
                            silent=True)

        try:
            self.debug = debug or DEBUG
            self.service = service = subprocess.Popen(params,
                                                      stdin=subprocess.PIPE,
                                                      stdout=subprocess.PIPE,
                                                      stderr=subprocess.PIPE,
                                                      bufsize=-1,
                                                      cwd=cwd,
                                                      env=env)

            self.stopper = Signal()
            self.stopper.on_go(self._kill)
            self.thread_locker = Lock()
            self.children = [
                Thread.run(self.name + " waiter",
                           self._monitor,
                           parent_thread=self),
                Thread.run(self.name + " stdin",
                           self._writer,
                           service.stdin,
                           self.stdin,
                           please_stop=self.stopper,
                           parent_thread=self),
                Thread.run(self.name + " stdout",
                           self._reader,
                           service.stdout,
                           self.stdout,
                           please_stop=self.stopper,
                           parent_thread=self),
                Thread.run(self.name + " stderr",
                           self._reader,
                           service.stderr,
                           self.stderr,
                           please_stop=self.stopper,
                           parent_thread=self),
            ]
        except Exception, e:
            Log.error("Can not call", e)
Ejemplo n.º 8
0
    def __init__(self, host, port=9200, explore_metadata=True, settings=None):
        """
        settings.explore_metadata == True - IF PROBING THE CLUSTER FOR METADATA IS ALLOWED
        settings.timeout == NUMBER OF SECONDS TO WAIT FOR RESPONSE, OR SECONDS TO WAIT FOR DOWNLOAD (PASSED TO requests)
        """
        if hasattr(self, "settings"):
            return

        self.settings = settings
        self.cluster_state = None
        self._metadata = None
        self.metadata_locker = Lock()
        self.debug = settings.debug
        self.version = None
        self.path = settings.host + ":" + unicode(settings.port)
        self.get_metadata()
Ejemplo n.º 9
0
    def __init__(self, _file):
        """
        file - USES FILE FOR PERSISTENCE
        """
        self.file = File.new_instance(_file)
        self.lock = Lock("lock for persistent queue using file " +
                         self.file.name)
        self.please_stop = Signal()
        self.db = Dict()
        self.pending = []

        if self.file.exists:
            for line in self.file:
                try:
                    delta = convert.json2value(line)
                    apply_delta(self.db, delta)
                except:
                    pass
            if self.db.status.start == None:  # HAPPENS WHEN ONLY ADDED TO QUEUE, THEN CRASH
                self.db.status.start = 0
            self.start = self.db.status.start

            # SCRUB LOST VALUES
            lost = 0
            for k in self.db.keys():
                try:
                    if k != "status" and int(k) < self.start:
                        self.db[k] = None
                        lost += 1
                except Exception:
                    pass  # HAPPENS FOR self.db.status, BUT MAYBE OTHER PROPERTIES TOO
            if lost:
                Log.warning("queue file had {{num}} items lost", num=lost)

            if DEBUG:
                Log.note("Persistent queue {{name}} found with {{num}} items",
                         name=self.file.abspath,
                         num=len(self))
        else:
            self.db.status = Dict(start=0, end=0)
            self.start = self.db.status.start
            if DEBUG:
                Log.note("New persistent queue {{name}}",
                         name=self.file.abspath)
Ejemplo n.º 10
0
    def __init__(self, stream, length, _shared=None):
        """
        :param stream:  THE STREAM WE WILL GET THE BYTES FROM
        :param length:  THE MAX NUMBER OF BYTES WE ARE EXPECTING
        :param _shared: FOR INTERNAL USE TO SHARE THE BUFFER
        :return:
        """
        self.position = 0
        file_ = TemporaryFile()
        if not _shared:
            self.shared = Dict(length=length,
                               locker=Lock(),
                               stream=stream,
                               done_read=0,
                               file=file_,
                               buffer=mmap(file_.fileno(), length))
        else:
            self.shared = _shared

        self.shared.ref_count += 1
Ejemplo n.º 11
0
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski ([email protected])
#
from __future__ import unicode_literals
from __future__ import division

from pyLibrary import convert
from pyLibrary.aws.s3 import key_prefix
from pyLibrary.debugs.logs import Log
from pyLibrary.thread.threads import Lock

from testlog_etl import key2path

is_done_lock = Lock()
is_done = set()

def process_test_result(source_key, source, destination, please_stop=None):
    path = key2path(source_key)
    destination.delete({"and": [
        {"term": {"etl.source.id": path[1]}},
        {"term": {"etl.source.source.id": path[0]}}
    ]})

    lines = source.read_lines()

    keys = []
    data = []
    for l in lines:
        record = convert.json2value(l)
Ejemplo n.º 12
0
 def __init__(self, duration=DAY, lock=False):
     self.timeout = duration
     if lock:
         self.locker = Lock()
     else:
         self.locker = _FakeLock()
 def __init__(self, hg):
     self.repo = hg
     self.locker = Lock()
     self.unknown_branches = set()
Ejemplo n.º 14
0
 def __init__(self):
     self.data = {}  # MAP FROM TABLE NAME TO COLUMNS
     self.locker = Lock()
     self.count = 0
Ejemplo n.º 15
0
from socket import timeout as socket_timeout

from kombu import Connection, Producer, Exchange
from pytz import timezone
from mozillapulse.utils import time_to_string

from pyLibrary.debugs import constants
from pyLibrary import jsons
from pyLibrary.debugs.exceptions import Except, suppress_exception
from pyLibrary.debugs.logs import Log
from pyLibrary.dot import wrap, coalesce, Dict, set_default
from pyLibrary.meta import use_settings
from pyLibrary.thread.threads import Thread, Lock
from mozillapulse.consumers import GenericConsumer

count_locker = Lock()
count = 0


class Consumer(Thread):
    @use_settings
    def __init__(
            self,
            exchange,  # name of the Pulse exchange
            topic,  # message name pattern to subscribe to  ('#' is wildcard)
            target=None,  # WILL BE CALLED WITH PULSE PAYLOADS AND ack() IF COMPLETE$ED WITHOUT EXCEPTION
            target_queue=None,  # (aka self.queue) WILL BE FILLED WITH PULSE PAYLOADS
            host='pulse.mozilla.org',  # url to connect,
            port=5671,  # tcp port
            user=None,
            password=None,
Ejemplo n.º 16
0
                try:
                    is_ok = self._dispatch_work(todo)
                    if is_ok:
                        self.work_queue.commit()
                    else:
                        self.work_queue.rollback()
                except Exception, e:
                    self.work_queue.rollback()
                    Log.warning(
                        "could not processs {{key}}.  Returned back to work queue.",
                        key=todo.key,
                        cause=e)


sinks_locker = Lock()
sinks = []  # LIST OF (settings, sink) PAIRS

''


def get_container(settings):
    if isinstance(settings, (MultiDayIndex, aws.s3.Bucket)):
        return settings

    if settings == None:
        return DummySink()
    elif settings.type == "redshift":
        for e in sinks:
            try:
                fuzzytestcase.assertAlmostEqual(e[0], settings)