예제 #1
0
import requests

requests.packages.urllib3.disable_warnings(
    requests.packages.urllib3.exceptions.InsecureRequestWarning
)

import cPickle as pickle
from datetime import datetime, timedelta
import hashlib
import ConfigParser

import logging
from commonHelpers.logger import logger

logger = logger.getChild("mephisto")

from influxdb import InfluxDBClient
import Client

from influxdb import InfluxDBClient
import mysql.connector

config = ConfigParser.ConfigParser()
config.read("config.cfg")

password = config.get("credentials", "password")
username = config.get("credentials", "username")
database = config.get("credentials", "database")

예제 #2
0
#!/usr/bin/env python

import ROOT
from yieldsTable import yieldsTable

import logging
from commonHelpers.logger import logger
from commonHelpers import process_names

logger = logger.getChild("yieldsTable")

import os
import pprint
import argparse
import traceback

parser = argparse.ArgumentParser(
    description="Print yieldstables using a configfile.")
parser.add_argument('configfile', help='python based configfile', nargs="?")
parser.add_argument('--debug',
                    action='store_true',
                    help='print debug messages')
parser.add_argument('--no-raw',
                    action='store_true',
                    help='do not print raw yields')
args = parser.parse_args()

if args.debug:
    logging.getLogger("yieldsTable").setLevel(logging.DEBUG)

if not args.configfile:
예제 #3
0
requests.packages.urllib3.disable_warnings(
    requests.packages.urllib3.exceptions.InsecureRequestWarning
)

import cPickle as pickle
from datetime import datetime, timedelta
import hashlib
import ConfigParser
import argparse

from commonHelpers import notifications

import logging
from commonHelpers.logger import logger

logger = logger.getChild("QMonit")

from influxdb import InfluxDBClient
import Client

parser = argparse.ArgumentParser(description="Write job stats")
parser.add_argument(
    "-timestamp",
    default=None,
    help="Fixed timestamp to use instead of current one (UTC in YYYY-mm-dd hh:mm:ss format)",
)
parser.add_argument("--debug", action="store_true", help="print debug messages")
parser.add_argument(
    "--testDB", action="store_true", help="Upload to test DB instead of production DB"
)
parser.add_argument("--skipSubmit", action="store_true", help="do not upload to DB")
예제 #4
0
import os
import json

from . import JSONScraper

from commonHelpers.logger import logger

logger = logger.getChild(__name__)


class AGIS(JSONScraper):
    def __init__(self, *args, **kwargs):
        super(AGIS, self).__init__(*args, **kwargs)

    def convert(
        self,
        data,
        sort_field="panda_queue",
        should_be_sorted_by="panda_queue",
        *args,
        **kwargs
    ):
        """Convert the AGIS data to the desired format of being ordered by Panda queues

        :param data: data to be converted in the desired format"""

        json_data = {}

        if isinstance(data, dict):
            for key, d in data.items():
                if sort_field in d:
예제 #5
0
#!/usr/bin/env python
import ROOT
import os
import argparse
import pprint

import logging
from commonHelpers.logger import logger
logger = logger.getChild("machete")

parser = argparse.ArgumentParser(description="""
Sanity check ROOT tree
""")
parser.add_argument("-file", help="ROOT file")
parser.add_argument('--signal', help='Use signal', action='store_true')

args = parser.parse_args()

tf = ROOT.TFile.Open(args.file, "READ") if isinstance(
    args.file, basestring) else args.file

trees_list = []
checklist = []
processes = []

for key in tf.GetListOfKeys():
    trees_list.append(key.GetName())

logger.info("{} trees in {}".format(len(trees_list), args.file))

for treename in trees_list: