예제 #1
0
  def __init__(self, args):
    self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)

    username = args[Constant.WH_DB_USERNAME_KEY]
    password = args[Constant.WH_DB_PASSWORD_KEY]
    JDBC_DRIVER = args[Constant.WH_DB_DRIVER_KEY]
    JDBC_URL = args[Constant.WH_DB_URL_KEY]
    self.input_table_file = args[Constant.ORA_SCHEMA_OUTPUT_KEY]
    self.input_field_file = args[Constant.ORA_FIELD_OUTPUT_KEY]
    self.input_sample_file = args[Constant.ORA_SAMPLE_OUTPUT_KEY]

    self.db_id = args[Constant.DB_ID_KEY]
    self.wh_etl_exec_id = args[Constant.WH_EXEC_ID_KEY]
    self.conn_mysql = zxJDBC.connect(JDBC_URL, username, password, JDBC_DRIVER)
    self.conn_cursor = self.conn_mysql.cursor()

    if Constant.INNODB_LOCK_WAIT_TIMEOUT in args:
      lock_wait_time = args[Constant.INNODB_LOCK_WAIT_TIMEOUT]
      self.conn_cursor.execute("SET innodb_lock_wait_timeout = %s;" % lock_wait_time)

    self.logger.info("Load Oracle Metadata into {}, db_id {}, wh_exec_id {}"
                     .format(JDBC_URL, self.db_id, self.wh_etl_exec_id))

    self.dict_dataset_table = 'dict_dataset'
    self.field_comments_table = 'field_comments'
    self.dict_field_table = 'dict_field_detail'
    self.dict_field_comment_table = 'dict_dataset_field_comment'
    self.dict_dataset_sample_table = 'dict_dataset_sample'
예제 #2
0
  def __init__(self, args):
    self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)

    username = args[Constant.WH_DB_USERNAME_KEY]
    password = args[Constant.WH_DB_PASSWORD_KEY]
    JDBC_DRIVER = args[Constant.WH_DB_DRIVER_KEY]
    JDBC_URL = args[Constant.WH_DB_URL_KEY]

    self.db_id = args[Constant.JOB_REF_ID_KEY]
    self.wh_etl_exec_id = args[Constant.WH_EXEC_ID_KEY]
    self.conn_mysql = zxJDBC.connect(JDBC_URL, username, password, JDBC_DRIVER)
    self.conn_cursor = self.conn_mysql.cursor()

    if Constant.INNODB_LOCK_WAIT_TIMEOUT in args:
      lock_wait_time = args[Constant.INNODB_LOCK_WAIT_TIMEOUT]
      self.conn_cursor.execute("SET innodb_lock_wait_timeout = %s;" % lock_wait_time)

    temp_dir = FileUtil.etl_temp_dir(args, "ORACLE")
    self.input_table_file = os.path.join(temp_dir, args[Constant.ORA_SCHEMA_OUTPUT_KEY])
    self.input_field_file = os.path.join(temp_dir, args[Constant.ORA_FIELD_OUTPUT_KEY])
    self.input_sample_file = os.path.join(temp_dir, args[Constant.ORA_SAMPLE_OUTPUT_KEY])

    self.collect_sample = False
    if Constant.ORA_LOAD_SAMPLE in args:
      self.collect_sample = FileUtil.parse_bool(args[Constant.ORA_LOAD_SAMPLE], False)

    self.logger.info("Load Oracle Metadata into {}, db_id {}, wh_exec_id {}"
                     .format(JDBC_URL, self.db_id, self.wh_etl_exec_id))
예제 #3
0
  def __init__(self, hdfs_uri, kerberos=False, kerberos_principal=None, keytab_file=None):
    """
    :param hdfs_uri: hdfs://hadoop-name-node:port
    :param kerberos: optional, if kerberos authentication is needed
    :param kerberos_principal: optional, [email protected]
    :param keytab_file: optional, absolute path to keytab file
    """
    self.logger = LoggerFactory.getLogger(self.__class__.__name__)

    self.logger.info("keytab_file: " + keytab_file)

    hdfs_conf = Configuration()
    if hdfs_uri.startswith('hdfs://'):
      hdfs_conf.set(Hdfs.FS_DEFAULT_NAME_KEY, hdfs_uri)
    elif hdfs_uri > "":
      self.logger.error("%s is an invalid uri for hdfs namenode ipc bind." % hdfs_uri)

    if kerberos:  #  init kerberos and keytab
      if not kerberos_principal or not keytab_file or kerberos_principal == '' or keytab_file == '':
        print "Kerberos Principal and Keytab File Name/Path are required!"

      hdfs_conf.set("hadoop.security.authentication", "kerberos")
      hdfs_conf.set("dfs.namenode.kerberos.principal.pattern", "*")
      UserGroupInformation.setConfiguration(hdfs_conf)
      UserGroupInformation.loginUserFromKeytab(kerberos_principal, keytab_file)

    self.fs = Hdfs.get(hdfs_conf)

    requests.packages.urllib3.disable_warnings()
    self.logger.info("Initiated SchemaUrlHelper")
예제 #4
0
  def __init__(self, args):
    self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
    self.app_id = int(args[Constant.JOB_REF_ID_KEY])
    self.wh_exec_id = long(args[Constant.WH_EXEC_ID_KEY])
    self.wh_con = zxJDBC.connect(args[Constant.WH_DB_URL_KEY],
                                 args[Constant.WH_DB_USERNAME_KEY],
                                 args[Constant.WH_DB_PASSWORD_KEY],
                                 args[Constant.WH_DB_DRIVER_KEY])
    self.wh_cursor = self.wh_con.cursor()
    self.aw_con = self.get_connection(args[Constant.AW_DB_URL_KEY],
                                      args[Constant.AW_DB_PORT_KEY],
                                      args[Constant.AW_DB_NAME_KEY],
                                      args[Constant.AW_DB_USERNAME_KEY],
                                      args[Constant.AW_DB_PASSWORD_KEY],
                                      args[Constant.AW_DB_DRIVER_KEY])
    self.aw_cursor = self.aw_con.cursor()
    self.lookback_period = args[Constant.AW_EXEC_ETL_LOOKBACK_KEY]
    self.app_folder = args[Constant.WH_APP_FOLDER_KEY]
    self.metadata_folder = self.app_folder + "/" + str(SchedulerType.APPWORX) + "/" + str(self.app_id)
    self.last_execution_unix_time = None
    self.get_last_execution_unix_time()

    if not os.path.exists(self.metadata_folder):
      try:
        os.makedirs(self.metadata_folder)
      except Exception as e:
        self.logger.error(e)
예제 #5
0
def setLogLevel(level):
    """setLogLevel(level) changes the log level to level, where level is a string (ERROR, WARN, INFO, DEBUG, and TRACE) """

    logger = LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME)
    levels = {"ERROR": Level.ERROR, "WARN": Level.WARN, "INFO": Level.INFO, "DEBUG": Level.DEBUG, "TRACE": Level.TRACE}
    l = levels[level.upper()]
    logger.setLevel(l)
예제 #6
0
 def __init__(self, args):
   self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
   self.wh_con = zxJDBC.connect(args[Constant.WH_DB_URL_KEY],
                                args[Constant.WH_DB_USERNAME_KEY],
                                args[Constant.WH_DB_PASSWORD_KEY],
                                args[Constant.WH_DB_DRIVER_KEY])
   self.wh_cursor = self.wh_con.cursor()
   self.app_id = int(args[Constant.JOB_REF_ID_KEY])
예제 #7
0
	def __init__(self, name='max',
				 labelList=('maxx','maxy','maxval', 'sum'),
				 keyxlabel='maxx', 
				 keyylabel='maxy', 
				 formatString='Maximum value found to be at %f,%f (maxx,maxy) was %f (maxval). Sum was %f (sum)'
				 ):
		self.logger = LoggerFactory.getLogger("SumMaxPositionAndValue:%s" % name)
		TwodDataSetProcessor.__init__(self, name, labelList, keyxlabel, keyylabel, formatString)
예제 #8
0
 def __init__(self, args):
   self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
   self.wh_con = zxJDBC.connect(args[Constant.WH_DB_URL_KEY],
                                args[Constant.WH_DB_USERNAME_KEY],
                                args[Constant.WH_DB_PASSWORD_KEY],
                                args[Constant.WH_DB_DRIVER_KEY])
   self.wh_cursor = self.wh_con.cursor()
   self.wh_exec_id = long(args[Constant.WH_EXEC_ID_KEY])
   self.app_folder = args[Constant.WH_APP_FOLDER_KEY]
 def __init__(self, args):
   self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
   self.elasticsearch_index_url = args[Constant.WH_ELASTICSEARCH_URL_KEY]
   self.elasticsearch_port = args[Constant.WH_ELASTICSEARCH_PORT_KEY]
   self.wh_con = zxJDBC.connect(args[Constant.WH_DB_URL_KEY],
                                args[Constant.WH_DB_USERNAME_KEY],
                                args[Constant.WH_DB_PASSWORD_KEY],
                                args[Constant.WH_DB_DRIVER_KEY])
   self.wh_cursor = self.wh_con.cursor(1)
예제 #10
0
 def __init__(self):
   self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
   username = args[Constant.HIVE_METASTORE_USERNAME]
   password = args[Constant.HIVE_METASTORE_PASSWORD]
   jdbc_driver = args[Constant.HIVE_METASTORE_JDBC_DRIVER]
   jdbc_url = args[Constant.HIVE_METASTORE_JDBC_URL]
   self.conn_hms = zxJDBC.connect(jdbc_url, username, password, jdbc_driver)
   self.curs = self.conn_hms.cursor()
   dependency_instance_file = args[Constant.HIVE_DEPENDENCY_CSV_FILE_KEY]
   self.instance_writer = FileWriter(dependency_instance_file)
예제 #11
0
 def __init__(self, args, scheduler_type):
   self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
   self.app_id = int(args[Constant.APP_ID_KEY])
   self.wh_con = zxJDBC.connect(args[Constant.WH_DB_URL_KEY],
                                args[Constant.WH_DB_USERNAME_KEY],
                                args[Constant.WH_DB_PASSWORD_KEY],
                                args[Constant.WH_DB_DRIVER_KEY])
   self.wh_cursor = self.wh_con.cursor()
   self.app_folder = args[Constant.WH_APP_FOLDER_KEY]
   self.metadata_folder = self.app_folder + "/" + str(scheduler_type) + "/" + str(self.app_id)
예제 #12
0
	def __init__(self, path, iFileLoader=None, fileLoadTimout=None, printNfsTimes=False, wait_for_exposure_callable=None):
		self.logger = LoggerFactory.getLogger("LazyDataSetProvider:%s" % path)
		self.path = path
		self.iFileLoader = iFileLoader
		self.fileLoadTimout = fileLoadTimout
		self.printNfsTimes = printNfsTimes
		self.wait_for_exposure_callable = wait_for_exposure_callable
		
		self.configureLock = threading.Lock()
		self.dataset = None
예제 #13
0
 def __init__(self, args):
   self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
   self.wh_con = zxJDBC.connect(args[Constant.WH_DB_URL_KEY],
                                args[Constant.WH_DB_USERNAME_KEY],
                                args[Constant.WH_DB_PASSWORD_KEY],
                                args[Constant.WH_DB_DRIVER_KEY])
   self.wh_cursor = self.wh_con.cursor()
   self.app_id = int(args[Constant.JOB_REF_ID_KEY])
   self.group_app_id = int(args[Constant.LDAP_GROUP_APP_ID_KEY])
   self.app_folder = args[Constant.WH_APP_FOLDER_KEY]
   self.metadata_folder = self.app_folder + "/" + str(self.app_id)
   self.ceo_user_id = args[Constant.LDAP_CEO_USER_ID_KEY]
예제 #14
0
 def __init__(self, args):
   self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
   self.app_id = int(args[Constant.JOB_REF_ID_KEY])
   self.wh_exec_id = long(args[Constant.WH_EXEC_ID_KEY])
   self.wh_con = zxJDBC.connect(args[Constant.WH_DB_URL_KEY],
                                args[Constant.WH_DB_USERNAME_KEY],
                                args[Constant.WH_DB_PASSWORD_KEY],
                                args[Constant.WH_DB_DRIVER_KEY])
   self.wh_cursor = self.wh_con.cursor()
   self.look_back_days = args[Constant.AW_LINEAGE_ETL_LOOKBACK_KEY]
   self.last_execution_unix_time = None
   self.get_last_execution_unix_time()
예제 #15
0
  def __init__(self):
    self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
    requests.packages.urllib3.disable_warnings()
    self.app_id = int(args[Constant.APP_ID_KEY])
    self.wh_exec_id = long(args[Constant.WH_EXEC_ID_KEY])
    self.project_writer = FileWriter(args[Constant.GIT_PROJECT_OUTPUT_KEY])
    self.repo_writer = FileWriter(args[Constant.PRODUCT_REPO_OUTPUT_KEY])
    self.repo_owner_writer = FileWriter(args[Constant.PRODUCT_REPO_OWNER_OUTPUT_KEY])

    self.multiproduct = {}
    self.git_repo = {}
    self.product_repo = []
예제 #16
0
  def __init__(self):
    self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)

    # connection
    username = args[Constant.HIVE_METASTORE_USERNAME]
    password = args[Constant.HIVE_METASTORE_PASSWORD]
    jdbc_driver = args[Constant.HIVE_METASTORE_JDBC_DRIVER]
    jdbc_url = args[Constant.HIVE_METASTORE_JDBC_URL]
    self.conn_hms = zxJDBC.connect(jdbc_url, username, password, jdbc_driver)
    self.curs = self.conn_hms.cursor()

    # variable
    self.dataset_dict = {}
예제 #17
0
def getLogLevel(loggerName='ROOT'):
    logger = LoggerFactory.getLogger(loggerName)
    if logger.getLevel():
        level = str(logger.getLevel())
    else:
        level = None
        
    if logger.getEffectiveLevel():
        effectiveLevel = str(logger.getEffectiveLevel())
    else:
        effectiveLevel = None
        
    return { 'level': level, 'effectiveLevel': effectiveLevel }
예제 #18
0
  def __init__(self, args):
    self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)

    # connection
    self.username = args[Constant.HIVE_METASTORE_USERNAME]
    self.password = args[Constant.HIVE_METASTORE_PASSWORD]
    self.jdbc_driver = args[Constant.HIVE_METASTORE_JDBC_DRIVER]
    self.jdbc_url = args[Constant.HIVE_METASTORE_JDBC_URL]
    self.connection_interval = int(args[Constant.HIVE_METASTORE_RECONNECT_TIME])
    self.logger.info("DB re-connection interval: %d" % self.connection_interval)

    self.db_whitelist = args[Constant.HIVE_DATABASE_WHITELIST_KEY] if Constant.HIVE_DATABASE_WHITELIST_KEY in args else "''"
    self.db_blacklist = args[Constant.HIVE_DATABASE_BLACKLIST_KEY] if Constant.HIVE_DATABASE_BLACKLIST_KEY in args else "''"
    self.logger.info("DB whitelist: " + self.db_whitelist)
    self.logger.info("DB blacklist: " + self.db_blacklist)

    self.conn_hms = None
    self.connect_time = None
    self.db_connect(True)

    hdfs_namenode_ipc_uri = args.get(Constant.HDFS_NAMENODE_IPC_URI_KEY, None)
    kerberos_principal = args.get(Constant.KERBEROS_PRINCIPAL_KEY, None)
    keytab_file = args.get(Constant.KERBEROS_KEYTAB_FILE_KEY, None)

    kerberos_auth = False
    if Constant.KERBEROS_AUTH_KEY in args:
      kerberos_auth = FileUtil.parse_bool(args[Constant.KERBEROS_AUTH_KEY], False)

    self.table_whitelist_enabled = False
    if Constant.HIVE_TABLE_WHITELIST_ENABLED in args:
      self.table_whitelist_enabled = FileUtil.parse_bool(args[Constant.HIVE_TABLE_WHITELIST_ENABLED], False)

    self.table_blacklist_enabled = False
    if Constant.HIVE_TABLE_BLACKLIST_ENABLED in args:
      self.table_blacklist_enabled = FileUtil.parse_bool(args[Constant.HIVE_TABLE_BLACKLIST_ENABLED], False)

    self.schema_url_helper = SchemaUrlHelper.SchemaUrlHelper(hdfs_namenode_ipc_uri, kerberos_auth, kerberos_principal, keytab_file)

    # global variables
    self.databases = None
    self.db_dict = {}  # name : index
    self.table_dict = {}  # fullname : index
    self.dataset_dict = {}  # name : index
    self.instance_dict = {}  # name : index
    self.serde_param_columns = []
    # counting statistics
    self.external_url = 0
    self.hdfs_count = 0
    self.schema_registry_count = 0
예제 #19
0
  def __init__(self, args):
    self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
    self.args = args
    self.app_id = int(args[Constant.APP_ID_KEY])
    self.group_app_id = int(args[Constant.LDAP_GROUP_APP_ID_KEY])
    self.wh_exec_id = long(args[Constant.WH_EXEC_ID_KEY])
    self.app_folder = args[Constant.WH_APP_FOLDER_KEY]
    self.metadata_folder = self.app_folder + "/" + str(self.app_id)
    if not os.path.exists(self.metadata_folder):
      try:
        os.makedirs(self.metadata_folder)
      except Exception as e:
        self.logger.error(e)

    self.ldap_user = set()
    self.group_map = dict()
    self.group_flatten_map = dict()
예제 #20
0
    def __init__(self, name, rootNamespace=None, scannablesToRead=[], readFromNexus=False):
        """Create a MetadataCollector Scannable, for use with SRSDataWriters
        """
        self.name = name
        self.inputNames = []
        self.extraNames = []
        self.outputFormat = []

        self.logger = LoggerFactory.getLogger("metadata")
        if scannablesToRead and readFromNexus:
            self.logger.warn("%s: When readFromNexus=True the specified scannablesToRead are ignored!" % name)

        self.readFromNexus = readFromNexus
        self.rootNamespaceDict = rootNamespace
        self.scannables_to_read = scannablesToRead
        self.verbose = False
        self.quiet = False
        self.prepend_keys_with_scannable_names = True
예제 #21
0
  def __init__(self, hdfs_uri, kerberos=False, kerberos_principal=None, keytab_file=None):
    """
    :param hdfs_uri: hdfs://hadoop-name-node:port
    :param kerberos: optional, if kerberos authentication is needed
    :param kerberos_principal: optional, [email protected]
    :param keytab_file: optional, user.keytab or ~/.kerberos/user.keytab
    """

    self.logger = LoggerFactory.getLogger(self.__class__.__name__)

    hdfs_conf = Configuration()
    if hdfs_uri.startswith('hdfs://'):
      hdfs_conf.set(Hdfs.FS_DEFAULT_NAME_KEY, hdfs_uri)
    elif hdfs_uri > "":
      self.logger.error("%s is an invalid uri for hdfs namenode ipc bind." % hdfs_uri)

    if kerberos == True:  #  init kerberos and keytab
      if not kerberos_principal or not keytab_file or kerberos_principal == '' or keytab_file == '':
        print "Kerberos Principal and Keytab File Name/Path are required!"

      keytab_path = keytab_file
      if keytab_file.startswith('/'):
        if os.path.exists(keytab_file):
          keytab_path = keytab_file
          print "Using keytab at %s" % keytab_path
      else:  # try relative path
        all_locations = [os.getcwd(), expanduser("~") + "/.ssh",
            expanduser("~") + "/.kerberos", expanduser("~") + "/.wherehows",
            os.getenv("APP_HOME"), os.getenv("WH_HOME")]
        for loc in all_locations:
          if os.path.exists(loc + '/' + keytab_file):
            keytab_path = loc + '/' + keytab_file
            print "Using keytab at %s" % keytab_path
            break

      hdfs_conf.set("hadoop.security.authentication", "kerberos")
      hdfs_conf.set("dfs.namenode.kerberos.principal.pattern", "*")
      UserGroupInformation.setConfiguration(hdfs_conf)
      UserGroupInformation.loginUserFromKeytab(kerberos_principal, keytab_path)

    self.fs = Hdfs.get(hdfs_conf)

    requests.packages.urllib3.disable_warnings()
예제 #22
0
  def __init__(self, file_content=None, log_file_name=None):
    self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
    self.text = None

    if log_file_name is not None:
      self.log_file_name = log_file_name
      file_ext = os.path.splitext(self.log_file_name)[1][1:]
      self.logger.info(file_ext)
      self.logger.info(self.log_file_name)
      if file_ext == 'gz':
        try:
          self.text = gzip.GzipFile(mode='r', filename=self.log_file_name).read()
        except:
          self.logger.error('exception')
          self.logger.error(str(sys.exc_info()[0]))
      else:
        self.text = open(self.log_file_name,'r').read()
    else:
      self.text = file_content
예제 #23
0
 def __init__(self, args):
   self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
   self.app_id = int(args[Constant.JOB_REF_ID_KEY])
   self.wh_exec_id = long(args[Constant.WH_EXEC_ID_KEY])
   self.aw_con = zxJDBC.connect(args[Constant.WH_DB_URL_KEY],
                                args[Constant.WH_DB_USERNAME_KEY],
                                args[Constant.WH_DB_PASSWORD_KEY],
                                args[Constant.WH_DB_DRIVER_KEY])
   self.aw_cursor = self.aw_con.cursor()
   self.remote_hadoop_script_dir = args[Constant.AW_REMOTE_HADOOP_SCRIPT_DIR]
   self.local_script_path = args[Constant.AW_LOCAL_SCRIPT_PATH]
   self.remote_script_path = args[Constant.AW_REMOTE_SCRIPT_PATH]
   self.aw_archive_dir = args[Constant.AW_ARCHIVE_DIR]
   # self.aw_log_url = args[Constant.AW_LOG_URL]
   self.bteq_source_target_override = args[Constant.AW_BTEQ_SOURCE_TARGET_OVERRIDE]
   self.metric_override = args[Constant.AW_METRIC_OVERRIDE]
   self.skip_already_parsed = args[Constant.AW_SKIP_ALREADY_PARSED]
   self.look_back_days = args[Constant.AW_LINEAGE_ETL_LOOKBACK_KEY]
   self.last_execution_unix_time = None
   self.get_last_execution_unix_time()
예제 #24
0
    def __init__(self, args):
        self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)

        username = args[Constant.WH_DB_USERNAME_KEY]
        password = args[Constant.WH_DB_PASSWORD_KEY]
        JDBC_DRIVER = args[Constant.WH_DB_DRIVER_KEY]
        JDBC_URL = args[Constant.WH_DB_URL_KEY]
        self.database_scm_repo_file = args[Constant.DATABASE_SCM_REPO_OUTPUT_KEY]

        self.app_id = args[Constant.APP_ID_KEY]
        self.wh_etl_exec_id = args[Constant.WH_EXEC_ID_KEY]
        self.conn_mysql = zxJDBC.connect(JDBC_URL, username, password, JDBC_DRIVER)
        self.conn_cursor = self.conn_mysql.cursor()

        if Constant.INNODB_LOCK_WAIT_TIMEOUT in args:
            lock_wait_time = args[Constant.INNODB_LOCK_WAIT_TIMEOUT]
            self.conn_cursor.execute("SET innodb_lock_wait_timeout = %s;" % lock_wait_time)

        self.logger.info("Load Code Search CSV into {}, app_id {}, wh_exec_id {}"
                         .format(JDBC_URL, self.app_id, self.wh_etl_exec_id))
예제 #25
0
  def __init__(self, args):
    self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
    self.app_id = int(args[Constant.JOB_REF_ID_KEY])
    self.wh_exec_id = long(args[Constant.WH_EXEC_ID_KEY])
    self.oz_con = zxJDBC.connect(args[Constant.OZ_DB_URL_KEY],
                                 args[Constant.OZ_DB_USERNAME_KEY],
                                 args[Constant.OZ_DB_PASSWORD_KEY],
                                 args[Constant.OZ_DB_DRIVER_KEY])
    self.oz_cursor = self.oz_con.cursor()
    self.lookback_period = args[Constant.OZ_EXEC_ETL_LOOKBACK_MINS_KEY]
    self.app_folder = args[Constant.WH_APP_FOLDER_KEY]
    self.metadata_folder = self.app_folder + "/" + str(SchedulerType.OOZIE) + "/" + str(self.app_id)
    self.oz_version = 4.0
    if not os.path.exists(self.metadata_folder):
      try:
        os.makedirs(self.metadata_folder)
      except Exception as e:
        self.logger.error(e)

    self.get_oozie_version()
예제 #26
0
def getLogLevel(loggerName='ROOT'):
    """Return log level.
    
    Optional Args:
        loggerName: Name of a specific 'logger'. Default value is 'ROOT'.
        
    Returns:
        Dictionary containing both the 'level' and 'effectiveLevel' of the 
        given logger.
    """
    logger = LoggerFactory.getLogger(loggerName)
    if logger.getLevel():
        level = str(logger.getLevel())
    else:
        level = None
        
    if logger.getEffectiveLevel():
        effectiveLevel = str(logger.getEffectiveLevel())
    else:
        effectiveLevel = None
        
    return { 'level': level, 'effectiveLevel': effectiveLevel }
예제 #27
0
  def __init__(self, args):
    self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)

    username = args[Constant.WH_DB_USERNAME_KEY]
    password = args[Constant.WH_DB_PASSWORD_KEY]
    JDBC_DRIVER = args[Constant.WH_DB_DRIVER_KEY]
    JDBC_URL = args[Constant.WH_DB_URL_KEY]
    self.mp_gitli_project_file = args[Constant.GIT_PROJECT_OUTPUT_KEY]
    self.product_repo_file = args[Constant.PRODUCT_REPO_OUTPUT_KEY]
    self.product_repo_owner_file = args[Constant.PRODUCT_REPO_OWNER_OUTPUT_KEY]

    self.app_id = args[Constant.APP_ID_KEY]
    self.wh_etl_exec_id = args[Constant.WH_EXEC_ID_KEY]
    self.conn_mysql = zxJDBC.connect(JDBC_URL, username, password, JDBC_DRIVER)
    self.conn_cursor = self.conn_mysql.cursor()

    if Constant.INNODB_LOCK_WAIT_TIMEOUT in args:
      lock_wait_time = args[Constant.INNODB_LOCK_WAIT_TIMEOUT]
      self.conn_cursor.execute("SET innodb_lock_wait_timeout = %s;" % lock_wait_time)

    self.logger.info("Load Multiproduct Metadata into {}, app_id {}, wh_exec_id {}"
                     .format(JDBC_URL, self.app_id, self.wh_etl_exec_id))
예제 #28
0
def tryToLoadDataset(path, iFileLoader):
	# Return None if the dataset could not be loaded for any reason
	logger = LoggerFactory.getLogger("DatasetProvider.tryToLoadDataset")

	if not os.path.exists(path):
		logger.info('Path {} does not exist, returning None', path)
		return None

	try:
		dataset = loadImageIntoSFH(path, iFileLoader)[0] # a dataset
		if not len(dataset.shape) == 2:
			logger.error('Expected 2 dimensions but found %r when sanity checking image %r using loader %r' % (dataset.shape, path, iFileLoader))
			print "*" * 80
			print "DatasetProvider.tryToLoadDataset got a dataset with ", dataset.shape, " dimensions."
			print "The analysis code will try again to load the image, and unless it times out everything is *OKAY*"
			print "Please call DASC support to report this (tricky to track down) bug"
			print "*" * 80
			return None
		logger.debug('Returning dataset {} from path {}', dataset, path)
		return dataset
	except:
		logger.error('Error loading or sanity checking image %r using loader %r :\n %s' % (path, iFileLoader, ''.join(traceback.format_exception(*sys.exc_info()))))
		return None
예제 #29
0
  def __init__(self, wh_etl_exec_id='0'):
    self.logger = LoggerFactory.getLogger("%s[%s]" % (self.__class__.__name__, wh_etl_exec_id))

    # set up connection
    username = args[Constant.WH_DB_USERNAME_KEY]
    password = args[Constant.WH_DB_PASSWORD_KEY]
    JDBC_DRIVER = args[Constant.WH_DB_DRIVER_KEY]
    JDBC_URL = args[Constant.WH_DB_URL_KEY]
    self.conn_mysql = zxJDBC.connect(JDBC_URL, username, password, JDBC_DRIVER)
    self.conn_cursor = self.conn_mysql.cursor()

    if Constant.INNODB_LOCK_WAIT_TIMEOUT in args:
      lock_wait_time = args[Constant.INNODB_LOCK_WAIT_TIMEOUT]
      self.conn_cursor.execute("SET innodb_lock_wait_timeout = %s;" % lock_wait_time)

    temp_dir = FileUtil.etl_temp_dir(args, "HIVE")
    self.input_schema_file = os.path.join(temp_dir, args[Constant.HIVE_SCHEMA_CSV_FILE_KEY])
    self.input_field_file = os.path.join(temp_dir, args[Constant.HIVE_FIELD_METADATA_KEY])
    self.input_instance_file = os.path.join(temp_dir, args[Constant.HIVE_INSTANCE_CSV_FILE_KEY])
    self.input_dependency_file = os.path.join(temp_dir, args[Constant.HIVE_DEPENDENCY_CSV_FILE_KEY])

    self.db_id = args[Constant.JOB_REF_ID_KEY]
    self.wh_etl_exec_id = args[Constant.WH_EXEC_ID_KEY]
예제 #30
0
    def __init__(self, args):
        self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
        self.elasticsearch_server_url = args[Constant.ELASTICSEARCH_URL_KEY]
        self.elasticsearch_port = args[Constant.ELASTICSEARCH_PORT_KEY]

        if Constant.ELASTICSEARCH_INDEX_KEY not in args:
            self.elasticsearch_index = "wherehows"
        else:
            self.elasticsearch_index = args[Constant.ELASTICSEARCH_INDEX_KEY]

        self.index_mapping_file = args[Constant.WH_ELASTICSEARCH_INDEX_MAPPING_FILE]

        self.bulk_chunk_size = int(args[Constant.ELASTICSEARCH_BULK_INSERT_SIZE]) # bulk insert size to elastic search engine
        self.es_url_request_timeout = int(args[Constant.ELASTICSEARCH_URL_REQUEST_TIMEOUT]) # url to post data to elastic search engine request time out
        self.max_retry_times = int(args[Constant.WH_DB_MAX_RETRY_TIMES]) # max times for db re-connection when lost during fetching source data


        self.base_url = self.elasticsearch_server_url + ':' + str(self.elasticsearch_port) + '/'
        self.logger.info(self.base_url)

        self.old_index = []
        self.new_index = []

        self.databaseConnect(args)
from org.slf4j import LoggerFactory
LOG = LoggerFactory.getLogger("jython.Startup")

from java.lang import System
import sys, platform

LOG.warn("")
if hasattr(sys.version_info, "major"):
    LOG.warn("Jython version: {}.{}.{}.{}".format(sys.version_info.major, sys.version_info.minor, sys.version_info.micro, sys.version_info.releaselevel))
else:
    LOG.warn("Jython version: {}".format(sys.version))
LOG.warn("Operating system: {}".format(System.getProperty("os.name")))
LOG.warn("OS Version: {}".format(System.getProperty("os.version")))
LOG.warn("Architecture: {}".format(platform.uname()[5]))
LOG.warn("Java version: {}".format(sys.platform))
LOG.warn("sys.path: {}".format(sys.path))
LOG.warn("")
'''

Process presence contact change event. A presence event sets uses the Open event to signal occupancy
The Closed event ends the occupancy. Area is considered occupied until the event ends. No other items 
can change the occupancy state of the area.

Convert Open/Closed events to begin/end events for the area item 

'''

from org.slf4j import Logger, LoggerFactory

log = LoggerFactory.getLogger("org.eclipse.smarthome.model.script.Rules")

import personal.occupancy.areas.events.event_base
reload(personal.occupancy.areas.events.event_base)
from personal.occupancy.areas.events.event_base import Event_Base


class Event_Contact_Presence(Event_Base):
    def process_changed_event(self, event):

        Event_Base.process_changed_event(self, event)

        item_state = str(event.itemState)

        if item_state == "OPEN":  #begin event
            self.begin_event(event)

        elif item_state == "CLOSED":  # off, an end event, only change occupancy settings if specified
            self.end_event(event)
예제 #33
0
 def __init__(self):
     self.logger = LoggerFactory.getLogger('jython script : ' +
                                           self.__class__.__name__)
     self.base_url = args[Constant.BASE_URL_KEY]
     self.code_search_committer_writer = FileWriter(
         args[Constant.DATABASE_SCM_REPO_OUTPUT_KEY])
예제 #34
0
    def __init__(
            self,
            name,
            pgm_grat_pitch,
            pgm_mirr_pitch,
            pgmpvroot,
            id_energy,
            idpvroot,
            move_pgm=True,
            move_id=True):  # motors, maybe also detector to set the delay time
        self.logger = LoggerFactory.getLogger(
            "ContinuousPgmGratingIDGapEnergyMoveController:%s" % name)
        self.verbose = False
        self.setName(name)
        self._start_event = threading.Event()
        self._movelog_time = datetime.now()
        #PGM
        self._pgm_grat_pitch = pgm_grat_pitch
        self._pgm_mirr_pitch = pgm_mirr_pitch
        self._pgm_grat_pitch_speed_orig = None
        self._pgm_runupdown_time = None
        self._move_pgm = move_pgm
        self._move_id = move_id

        self.pvs = PvManager(
            {
                'grating_density': 'NLINES',
                'cff': 'CFF',
                'grating_offset': 'GRTOFFSET',
                'plane_mirror_offset': 'MIROFFSET',
                'pgm_energy': 'ENERGY',
                'grating_pitch': 'GRT:PITCH',
                'mirror_pitch': 'MIR:PITCH',
                'energy_calibration_gradient': 'MX',
                'energy_calibration_reference': 'REFERENCE'
            }, pgmpvroot)
        if installation.isLive():
            self.pvs.configure()
        #ID
        self._id_energy = id_energy
        self._id_gap = self._id_energy.id_gap
        self._id_gap_speed_orig = None
        self._id_runupdown_time = None
        self.idpvs = PvManager({
            'vel': 'BLGSETVEL',
            'acc': 'IDGSETACC'
        }, idpvroot)
        if installation.isLive():
            self.idpvs.configure()

        self.grating_pitch_positions = []
        self.mirror_pitch_positions = []
        self.pgm_energy_positions = []
        self._start_time = None
        self.idspeedfactor = 1.0
        self.pgmspeedfactor = 1.0
        self.idstartdelaytime = 0.0
        self._move_start = 0.0
        self._move_end = 1.0
        self._move_step = 0.1
        self._triggerPeriod = 0.0
        self._pgm_runupdown_time = 0.0
 def __init__(this):
     this.logger = LoggerFactory.getLogger('jython script : ' +
                                           this.__class__.__name__)
예제 #36
0
 def __init__(self):
     self.logger = LoggerFactory.getLogger("meetup.Client")
     self.logger.error("meetup.Client Created ==================")
     return
예제 #37
0
#
# THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS
# FOR A PARTICULAR PURPOSE. THIS CODE AND INFORMATION ARE NOT SUPPORTED BY XEBIALABS.
#
import mimetypes
import random
import string
import sys
import json
from xlrelease.HttpRequest import HttpRequest
import org.slf4j.LoggerFactory as LoggerFactory

logger = LoggerFactory.getLogger("Blazemeter")

_BOUNDARY_CHARS = string.digits + string.ascii_letters

# Encode multipart form data to upload files via POST
def encode_multipart(fields, files, boundary=None):
    r"""Encode dict of form fields and dict of files as multipart/form-data.
    Return tuple of (body_string, headers_dict). Each value in files is a dict
    with required keys 'filename' and 'content', and optional 'mimetype' (if
    not specified, tries to guess mime type or uses 'application/octet-stream').

    >>> body, headers = encode_multipart({'FIELD': 'VALUE'},
    ...                                  {'FILE': {'filename': 'F.TXT', 'content': 'CONTENT'}},
    ...                                  boundary='BOUNDARY')
    >>> print('\n'.join(repr(l) for l in body.split('\r\n')))
    '--BOUNDARY'
    'Content-Disposition: form-data; name="FIELD"'
    ''
예제 #38
0
 def __init__(self, server, username, password):
     self.logger = LoggerFactory.getLogger("com.xebialabs.bitbucket-plugin")
     creds = CredentialsFallback(server, username,
                                 password).getCredentials()
     self.http_request = HttpRequest(server, creds['username'],
                                     creds['password'])
예제 #39
0
 def __init__(self, scannable_to_read):
     self.logger = LoggerFactory.getLogger("PositionInputStreamImplementer")
     self.scannable=scannable_to_read
     self.verbose=True
예제 #40
0
def setLogLevel(loggerName="console", logLevel="DEBUG"):
    loggerMap = {}
    logLevel = logLevel.upper()
    loggerContext = LoggerFactory.getILoggerFactory()
    loggerList = loggerContext.getLoggerList()
    for loggerItem in loggerList:
        if (loggerItem.getName() == loggerName):
            myLogger.info("Setting %s to %s" % (loggerName, logLevel))
            loggerItem.setLevel(logLevels.toLevel(logLevel))
            myLogger.info("%s = %s" % (loggerName, logLevel))
        #myLogger.error("%s != %s" % (loggerItem.getName(), loggerName))
    return


myLogger = LoggerFactory.getLogger("logmanager")
verb = "GET"

if (request):
    if (request.query):
        if (request.query['verb']):
            verb = request.query['verb']

if (verb == "SET"):
    loggerName = request.query['logger']
    logLevel = request.query['level']
    myLogger.info("Setting %s to %s" % (loggerName, logLevel))
    setLogLevel(loggerName, logLevel)

loggerMap = getLogLevel()
#loggerMap = {}
import requests
from requests.auth import HTTPBasicAuth
import json
import time
import datetime
#import com.xebialabs.xlrelease.api.v1
#import com.xebialabs.xlrelease.domain
from java.time import LocalDate, ZoneId
# beter way to do this
import java.util.Date
import dateutil.parser
import org.slf4j.LoggerFactory as LoggerFactory
# import com.xebialabs.xlrelease.api.v1 as releaseApi
import com.xebialabs.xlrelease.api.XLReleaseServiceHolder as XLReleaseServiceHolder

logger = LoggerFactory.getLogger("Planner")
releaseApi = XLReleaseServiceHolder.getReleaseApi()
phaseApi = XLReleaseServiceHolder.getPhaseApi()
taskApi = XLReleaseServiceHolder.getTaskApi()

MANUAL_TASK_DURATION = 60 * 60 * 1000
AUTOMATED_TASK_DURATION = 1 * 60 * 1000

hardcodedVars = [
    "Release-Ready", "CodeFrozen-Flag", "PendSecurityScan", "Dependency-Flag",
    "NoOpenDefects-Flag", "BXImpact-Flag"
]


class Planner(object):
    def __init__(self):
예제 #42
0
 def getLogger(cls):
     if grinder:
         logger = grinder.logger
         return logger
     return LoggerFactory.getLogger('root')
예제 #43
0
 def __init__(self):  # motors, maybe also detector to set the delay time
     self.logger = LoggerFactory.getLogger("TrajectoryControllerHelper")
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import sys
import time
import com.xhaus.jyson.JysonCodec as json
from xlrelease.HttpRequest import HttpRequest
from quickbuild.QuickBuild import QuickBuild
from org.slf4j import Logger
from org.slf4j import LoggerFactory

logger = LoggerFactory.getLogger("quickbuild")

logger.error("===============================================================")
qb = QuickBuild.create_client(qbServer)
logger.warn(">> configurationId       = %s " % configurationId)
logger.warn(">> respectBuildCondition = %s " % respectBuildCondition)
for prop in variables.keys():
    logger.warn(">> %s  =  %s " % (prop, variables[prop]))

#qb = QuickBuild.create_client( params )

logger.error("===============================================================")
if len(variables) > 0:
    buildVariables = "\n<variables>"
    for prop in variables.keys():
        buildVariables = "%s\n<entry>\n<string>%s</string>\n<string>%s</string>\n</entry>" % (
예제 #45
0
 def __init__(self):
   self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
예제 #46
0
# Origin of script: https://community.openhab.org/t/solved-jython-jsr223-not-properly-initialised-during-startup/46031/6
# Issue: https://github.com/eclipse/smarthome/issues/4324
# This script works around an init issue above that causes this error:
#   TypeError: can't set attributes of built-in/extension type 'NoneType' in <script> at line number 3

import time

from org.slf4j import Logger, LoggerFactory

log = LoggerFactory.getLogger("org.eclipse.smarthome.automation")

log.info("jsr223: checking for initialised context")

time.sleep(30)

while True:
    try:
        scriptExtension.importPreset("RuleSupport")
        if automationManager != None:
            break
    except:
        pass

    log.info(
        "jsr223: context not initialised yet. waiting 10 sec before checking again"
    )
    time.sleep(40)

log.info("jsr223: done")
#from requests.auth import HTTPBasicAuth
import json
import sets
import datetime
import time
import dateutil.parser
import dateutil.parser as dp
from java.time import LocalDate, ZoneId
import com.xebialabs.xlrelease.api.XLReleaseServiceHolder as XLReleaseServiceHolder
import com.xebialabs.xlrelease.api.v1.forms.ReleasesFilters as ReleasesFilters
# import com.xebialabs.xlrelease.api.v1.FolderApi as folderApi
# import com.xebialabs.xlrelease.api.v1.FolderApi as folderApi

import org.slf4j.LoggerFactory as LoggerFactory

logger = LoggerFactory.getLogger("David Tile")

import planner.planner
reload(planner.planner)
from planner.planner import Planner

the_planner = Planner()

HTTP_SUCCESS = sets.Set([200, 201, 202, 203, 204, 205, 206, 207, 208])
HTTP_ERROR = sets.Set([
    400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 412, 413, 414, 415
])

filterReleaseTags = list(releaseTags)
filterReleaseFolders = list(folderFilter)
filterStatuses = ["FAILED", "COMPLETED", "SKIPPED", "ABORTED"]
예제 #48
0
 def __init__(self, server, username, password):
     self.logger = LoggerFactory.getLogger("com.xebialabs.bitbucket-plugin")
     if username not in [None, ""] or password not in [None, ""]:
         self.http_request = HttpRequest(server, username, password)
     else:
         self.http_request = HttpRequest(server)
예제 #49
0
#
# Copyright 2020 XEBIALABS
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

import json

import org.slf4j.LoggerFactory as LoggerFactory

logger = LoggerFactory.getLogger("Counter")

InfrastructureCount = 0
DeploymentCount = 0
infraSum = []
appSum = []


def get_deployables():
    master_list = []
    applications = repositoryService.query(None, "Applications", "", "", None,
                                           None, 0, 0)
    deployment_list = []
    deployables = []
    applications = get_directories(applications)
    master_list = convert(applications)
    for x in master_list:
        deployment_list = (repositoryService.query(None, x['id'], "", "", None,
                                                   None, 0, 0))
        deployment_list = remove_CompositePackages(deployment_list)
        x['deployment_list'] = convert(deployment_list)
예제 #50
0
 def __init__(self):
     self.logger = LoggerFactory.getLogger("ScriptLogger")
예제 #51
0
from gda.device.detector import DetectorBase
from gda.configuration.properties import LocalProperties
import gda.data as data
from gda.jython import InterfaceProvider
import rpiComms
import time
from org.slf4j import LoggerFactory

logger = LoggerFactory.getLogger(__name__ + '.py')


class rpiCameraScannable(DetectorBase):
    ## scan info - ScanInformation scanInfo = InterfaceProvider.getCurrentScanInformationHolder().getCurrentScanInformation();
    def __init__(self, name):
        logger.debug("Camera Setup")
        self.pin = -1
        self.device = name
        self.setName(name)
        self.currentPosition = 0
        self.lastPosition = 0  # required
        self.busyStatus = False
        beamlineName = LocalProperties.get("gda.beamline.name")
        self.numTracker = data.NumTracker(beamlineName)
        rpiComms.rpiCommunicator.scannables.append(self)

    def collectData(self):
        self.busyStatus = True
        self.lastPosition = self.currentPosition
        rpiComms.commController.outgoingQueue.put("-1,c" + self.device +
                                                  ",CAPTURE,None,0//")
예제 #52
0
# Simulator for Dispatcher's AutoActiveTrains
#   while auto train(s) are "moving", repeatedly activate "next" allocated block, and deactivate "last" occupied block
#   waits for debounce time plus a bit, to allow signals, etc. to respond.
#   Runs as a background thread, ends itself when no trains are found in Dispatcher Active Trains list.

# NOTE: to enable logging, add "log4j.category.jmri.jmrit.jython.exec=DEBUG" to default.lcf

import jmri
import time
from org.slf4j import Logger
from org.slf4j import LoggerFactory

log = LoggerFactory.getLogger(
    "jmri.jmrit.jython.exec.AutoActiveTrains_Simulator")


# create a new class to run as thread
class AutoActiveTrains_Simulator(jmri.jmrit.automat.AbstractAutomaton):
    #   def init(self):

    def handle(self):
        DF = jmri.jmrit.dispatcher.DispatcherFrame.instance()
        trainsList = DF.getActiveTrainsList()  #loop thru all trains
        if (
                trainsList.size() == 0
        ):  # kill the thread if no trains found TODO: add something outside to restart
            log.info("AutoActiveTrains_Simulator thread ended")
            return False  # no trains, end
        totDelay = 0  # keep track of delay time to give CPU some time for other stuff
        for i in range(trainsList.size()):
            at = trainsList.get(i)  #: :type at: ActiveTrain
예제 #53
0
# Add some static files to serve. These take priority over regex matches.
handler.static("/base.source",
               "templates/base.template",
               contentType="text/plain; charset=utf-8")
handler.static("/base.source.cache",
               "templates/base.template",
               contentType="text/plain; charset=utf-8",
               cache=True)


# Create a logger, (kind of hacky code)
class PythonCode(JavaObject):
    pass


log = LoggerFactory.getLogger(PythonCode)


@handler.path("/name/(?P<name>\\w+)")
@template("hello.template")
def handle_name_page(pyRequest):

    name = pyRequest.getStrGroup("name", "Admin")

    pyRequest.out.println(pyRequest.template.render({"name": name}))


@handler.path("/socketio")
@template("socketio.template")
def handle_socketIO_front(pyRequest):
    plugins = __bird__.getActiveMockingBirdREPL().getPlugins()
예제 #54
0
 def __init__(self, wh_etl_exec_id='0'):
   self.logger = LoggerFactory.getLogger("%s[%s]" % (self.__class__.__name__, wh_etl_exec_id))
예제 #55
0
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#

from bitbucket.Bitbucket import BitbucketClient
import json
import time
from java.time import LocalDate, ZonedDateTime
import org.slf4j.Logger as Logger
import org.slf4j.LoggerFactory as LoggerFactory

logger = LoggerFactory.getLogger("com.xebialabs.bitbucket-plugin")


def convertRFC3339ToDate(timestamp):
    zonedDateTime = ZonedDateTime.parse(timestamp)
    return zonedDateTime.toLocalDate()


bitbucket = BitbucketClient.get_client(server, username, password)
commits = bitbucket.bitbucket_querycommits(locals())

# Compile data for summary view
commitsByDay = {}
for commit in commits:
    logger.warn("commit date %s" % commit["date"])
    commitDate = convertRFC3339ToDate(commit["date"])
예제 #56
0
 def __init__(self, exec_id):
   self.logger = LoggerFactory.getLogger(self.__class__.__name__ + ':' + str(exec_id))
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
 
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
 
 
@RestController
@RequestMapping("/kafka")
public class CollectController {
    protected final Logger logger = LoggerFactory.getLogger(this.getClass());
    @Autowired
    private KafkaTemplate kafkaTemplate;
 
    @RequestMapping(value = "/send")
    public String sendKafka(HttpServletRequest request, HttpServletResponse response,String message) {
        try {
            logger.info("kafka的消息={}", message);
            kafkaTemplate.send("test", "key", message);//test作为topic,key配置为key,message再页面进行发送,进行测试
            logger.info("发送kafka成功.");
            return "发送kafka成功";
        } catch (Exception e) {
            logger.error("发送kafka失败", e);
            return "发送kafka失败";
        }
    }
예제 #58
0
class OmnikOpenhab(object):
    """
    Get data from Omniksol inverter and store the data in OpenHAB items.
    """
    global logger
    logger = LoggerFactory.getLogger(
        "org.eclipse.smarthome.model.script.rules")

    def __init__(self, config_file):
        #logger.info("omnikOPENHAB - Starting")
        # Load the settings
        path = "/etc/openhab2/automation/jsr223/"
        config_files = path + config_file
        self.config = ConfigParser.RawConfigParser()
        self.config.read(config_files)

    def getInverters(self):
        #Get number of inverters
        inverterCount = len(self.config.sections()) - 2
        #logger.info("omnikOPENHAB - Invertercount: {0}".format(inverterCount))
        #Reset totals to zero
        OmnikOpenhab.total_e_today = 0
        OmnikOpenhab.total_e_total = 0
        OmnikOpenhab.total_p_ac = 0
        #For each inverter, get data and add to total
        for i in range(1, inverterCount + 1):
            #logger.info("omnikOPENHAB - In the inverterloop, value of index: {0}".format(i))
            msg = self.run(i)
            #logger.info("omnikOPENHAB - Value of msg is {0}".format(msg))
            #Assume daytime (for processing data)
            day = 1
            # If retrieved data is not a InverterMsg object: No updates
            #if 'timed' in format(msg):
            if isinstance(msg, InverterMsg):
                #logger.info("omnikOPENHAB - Day time -- valid data")
                self.add(msg)
            else:
                day = 0
                #logger.info("omnikOPENHAB - Timed out - BREAKING")
                break

        #Only process durig day time
        if day:
            etotal = self.config.get('openhab_items', 'etotal')
            etoday = self.config.get('openhab_items', 'etoday')
            epower = self.config.get('openhab_items', 'epower')
            logger.info(
                "omnikOPENHAB - Items updated: {0}: {1}, {2}: {3}, {4}: {5}, ".
                format(etotal, OmnikOpenhab.total_e_total, etoday,
                       OmnikOpenhab.total_e_today, epower,
                       OmnikOpenhab.total_p_ac))
            events.postUpdate(str(etotal), str(OmnikOpenhab.total_e_total))
            events.postUpdate(str(etoday), str(OmnikOpenhab.total_e_today))
            events.postUpdate(str(epower), str(OmnikOpenhab.total_p_ac))
        else:
            logger.info(
                "omnikOPENHAB - No data (after sunset?), not updating database"
            )

        #logger.info("omnikOPENHAB - End")

    def add(self, msg):
        #logger.info("omnikOPENHAB - Adding data")
        OmnikOpenhab.total_e_today += msg.e_today
        OmnikOpenhab.total_e_total += msg.e_total
        OmnikOpenhab.total_p_ac += msg.p_ac(1) + msg.p_ac(2) + msg.p_ac(3)

    def run(self, inverternr):
        """Get information from inverter and store is configured outputs."""
        # Connect to inverter
        msg = ''
        ip = self.config.get('inverter' + str(inverternr), 'ip')
        port = self.config.get('inverter' + str(inverternr), 'port')
        for res in socket.getaddrinfo(ip, port, socket.AF_INET,
                                      socket.SOCK_STREAM):
            family, socktype, proto, canonname, sockadress = res
            try:
                #logger.info("omnikOPENHAB - connecting to {0} port {1}".format(ip, port))
                inverter_socket = socket.socket(family, socktype, proto)
                inverter_socket.settimeout(10)
                inverter_socket.connect(sockadress)
                #logger.info("omnikOPENHAB - Retrieved data..")
            except socket.error as msg:
                return (msg)
                #logger.info("omnikOPENHAB - Could not connect to inverter.")
            wifi_serial = self.config.getint('inverter' + str(inverternr),
                                             'wifi_sn')
            inverter_socket.sendall(OmnikOpenhab.generate_string(wifi_serial))
            data = inverter_socket.recv(1024)
            inverter_socket.close()
            msg = InverterMsg(data)
        return (msg)

    def override_config(self, section, option, value):
        """Override config settings"""
        self.config.set(section, option, value)

    @staticmethod
    def generate_string(serial_no):
        """Create request string for inverter.

        The request string is build from several parts. The first part is a
        fixed 4 char string; the second part is the reversed hex notation of
        the s/n twice; then again a fixed string of two chars; a checksum of
        the double s/n with an offset; and finally a fixed ending char.

        Args:
            serial_no (int): Serial number of the inverter

        Returns:
            str: Information request string for inverter
        """
        response = '\x68\x02\x40\x30'
        double_hex = hex(serial_no)[2:] * 2
        hex_list = [
            double_hex[i:i + 2].decode('hex')
            for i in reversed(range(0, len(double_hex), 2))
        ]
        cs_count = 115 + sum([ord(c) for c in hex_list])
        checksum = hex(cs_count)[-2:].decode('hex')
        response += ''.join(hex_list) + '\x01\x00' + checksum + '\x16'
        return response
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
'''
Calls Snyk API to query for projects owned by OrgId
'''
v = vars()

import org.slf4j.Logger as Logger
import org.slf4j.LoggerFactory as LoggerFactory
import httplib
import ssl
from com.xhaus.jyson import JysonCodec as json

logger = LoggerFactory.getLogger('snyk-plugin')
logger.debug("Starting GetProjects.py")

base_url = str(valueProvider.server.url)
token = str(valueProvider.server.token)
orgId = str(valueProvider.server.orgId)
snyk_host = str(base_url.split("/")[2])
path = '/' + str(base_url.split("/")[3]) + '/' + str(
    base_url.split("/")[4]) + '/org/' + orgId + '/projects'
headers = {
    "Content-Type": "application/json",
    "Authorization": "token {}".format(token)
}

uv_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
connection = httplib.HTTPSConnection(snyk_host, context=uv_context)
예제 #60
0
#
# Copyright 2020 XEBIALABS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import stash.Stash
if classReload:
    reload(stash.Stash)
from stash.Stash import StashClient
import org.slf4j.LoggerFactory as LoggerFactory
import org.slf4j.Logger as Logger

logger = LoggerFactory.getLogger("Stash")
stash = StashClient.get_client(server, username, password)
method = str(task.getTaskType()).lower().replace('.', '_')
logger.error("Call Stash Method %s" % method)

call = getattr(stash, method)
response = call(locals())
for key, value in response.items():
    locals()[key] = value