示例#1
0
 queryInfo["architecture"]=opts.arch
 queryInfo["release_cycle"]=opts.release
 queryInfo["from"]=0
 if opts.page_size<1:
   info_request = True
   queryInfo["page_size"]=2
 else:
   queryInfo["page_size"]=opts.page_size
 
 if opts.update and opts.update_opts:
   queryInfo["update_opts"]=" AND ( "+opts.update_opts+" )"
 total_hits = 0
 if not opts.json: print queryInfo
 while True:
   queryInfo["from"] = ent_from
   es_data = get_payload(query_url, format (query_datsets,**queryInfo))
   content = json.loads(es_data)
   content.pop("_shards", None)
   total_hits = content['hits']['total']
   if info_request:
     info_request = False
     queryInfo["page_size"]=total_hits
     continue
   hits = len(content['hits']['hits'])
   if hits==0: break
   ent_from = ent_from + hits
   if not transfer:
     json_out.append(content)
     if not opts.json: print "Found %s hits from %s to %s out of total %s" % (hits, ent_from-hits, ent_from, total_hits)
   else:
     print "Found %s hits from %s to %s out of total %s" % (hits, ent_from-hits, ent_from, total_hits)
示例#2
0
    queryInfo["end_time"] = int(time() * 1000)
    queryInfo["start_time"] = queryInfo["end_time"] - int(
        86400 * 1000 * opts.days)
    queryInfo["architecture"] = opts.arch
    queryInfo["release_cycle"] = opts.release
    queryInfo["from"] = 0
    if opts.page_size < 1:
        info_request = True
        queryInfo["page_size"] = 2
    else:
        queryInfo["page_size"] = opts.page_size

    total_hits = 0
    while True:
        queryInfo["from"] = ent_from
        es_data = get_payload(query_url, format(query_datsets, **queryInfo))
        content = json.loads(es_data)
        content.pop("_shards", None)
        total_hits = content['hits']['total']
        if info_request:
            info_request = False
            queryInfo["page_size"] = total_hits
            continue
        hits = len(content['hits']['hits'])
        if hits == 0: break
        ent_from = ent_from + hits
        json_out.append(content)
        if ent_from >= total_hits: break
    print json.dumps(json_out,
                     indent=2,
                     sort_keys=True,
        tree = ET.parse(logFile)
        root = tree.getroot()
        payload['@timestamp'] = root.find('startTime').text
        payload['slave_node'] = root.find('builtOn').text
        build_result = root.find('result')
        if build_result is not None:
          payload['build_result'] = build_result.text
          payload['build_duration'] = int(int(root.find('duration').text)/1000)
          payload['job_status'] = 'Finished'
          os.system('touch "' + flagFile + '"')
        else:
          payload['job_status'] = 'Running'
          all_local.append(id)
        send_payload(index,document,id,json.dumps(payload))
      except Exception as e:
        print "Xml parsing error" , e
running_builds_elastic=list()
content = get_payload(query_url,query_running_builds)
if content == "":
  running_builds_elastic = []
else:
  content_hash = json.loads(content)
  last=int(len(content_hash["hits"]["hits"]))
  for i in range(0,last):
    running_builds_elastic.append(str(content_hash["hits"]["hits"][i]["_id"]))
for build in running_builds_elastic:
  if build not in all_local:
    send_payload(index,document,build,'{"job_status":"Failed"}')
    print "job status marked as Failed"

                else:
                    payload['job_status'] = 'Running'
                    all_local.append(id)
                weekindex = "jenkins-jobs-" + str(
                    int((((int(jstime) / 1000) / 86400) + 4) / 7))
                send_payload(
                    weekindex,
                    document,
                    id,
                    json.dumps(payload),
                    passwd_file=
                    "/var/lib/jenkins/secrets/github_hook_secret_cmsbot")
            except Exception as e:
                print "Xml parsing error", logFile, e
running_builds_elastic = {}
content = get_payload('jenkins-*', query_running_builds)
if content == "":
    running_builds_elastic = []
else:
    content_hash = json.loads(content)
    if (not 'hits' in content_hash) or (not 'hits' in content_hash['hits']):
        print "ERROR: ", content
        sys.exit(1)
    for hit in content_hash['hits']['hits']:
        if hit["_index"].startswith("jenkins-jobs-") or hit[
                "_index"].startswith("cmssdt-jenkins-jobs-"):
            try:
                print "Running:", hit["_source"]['job_name'], hit["_source"][
                    'build_number'], hit["_index"], hit['_id']
            except:
                pass
示例#5
0
    def getWorkflowStatsFromES(self, release='*', arch='*', lastNdays=7, page_size=0):

        query_url = 'http://cmses-master02.cern.ch:9200/relvals_stats_*/_search'

        query_datsets = """
        {
          "query": {
            "filtered": {
              "query": {
                "bool": {
                  "should": [
                    {
                      "query_string": {
                        "query": "release:%(release_cycle)s AND architecture:%(architecture)s", 
                        "lowercase_expanded_terms": false
                      }
                    }
                  ]
                }
              },
              "filter": {
                "bool": {
                  "must": [
                    {
                      "range": {
                        "@timestamp": {
                          "from": %(start_time)s,
                          "to": %(end_time)s
                        }
                      }
                    }
                  ]
                }
              }
            }
          },
          "from": %(from)s,
          "size": %(page_size)s
        }
        """
        datasets = {}
        ent_from = 0
        json_out = []
        info_request = False
        queryInfo = {}

        queryInfo["end_time"] = int(time() * 1000)
        queryInfo["start_time"] = queryInfo["end_time"] - int(86400 * 1000 * lastNdays)
        queryInfo["architecture"] = arch
        queryInfo["release_cycle"] = release
        queryInfo["from"] = 0

        if page_size < 1:
            info_request = True
            queryInfo["page_size"] = 2
        else:
            queryInfo["page_size"] = page_size

        total_hits = 0

        while True:
            queryInfo["from"] = ent_from
            es_data = get_payload(query_url, self._format(query_datsets, **queryInfo))  # here
            content = json.loads(es_data)
            content.pop("_shards", None)
            total_hits = content['hits']['total']
            if info_request:
                info_request = False
                queryInfo["page_size"] = total_hits
                continue
            hits = len(content['hits']['hits'])
            if hits == 0: break
            ent_from = ent_from + hits
            json_out.append(content)
            if ent_from >= total_hits:
                break

        return json_out[0]['hits']['hits']
示例#6
0
        payload['slave_node'] = root.find('builtOn').text
        build_result = root.find('result')
        if build_result is not None:
          payload['build_result'] = build_result.text
          payload['build_duration'] = int(int(root.find('duration').text)/1000)
          payload['job_status'] = 'Finished'
          os.system('touch "' + flagFile + '"')
        else:
          payload['job_status'] = 'Running'
          all_local.append(id)
        weekindex="jenkins-jobs-"+str(int((((int(jstime)/1000)/86400)+4)/7))
        send_payload(weekindex,document,id,json.dumps(payload), passwd_file="/var/lib/jenkins/secrets/github_hook_secret_cmsbot")
      except Exception as e:
        print "Xml parsing error" , e
running_builds_elastic={}
content = get_payload(query_url,query_running_builds)
if content == "":
  running_builds_elastic = []
else:
  content_hash = json.loads(content)
  for hit in content_hash['hits']['hits']:
    if hit["_index"]=="jenkins" or hit["_index"].startswith("jenkins-jobs-"):
      running_builds_elastic[hit['_id']]=hit
for build in running_builds_elastic:
  if build not in all_local:
    hit = running_builds_elastic[build]
    hit["job_status"]="Failed"
    resend_payload(hit,passwd_file="/var/lib/jenkins/secrets/github_hook_secret_cmsbot")
    print "job status marked as Failed"

示例#7
0
def getWorkflowStatsFromES(release='*', arch='*', lastNdays=7, page_size=0):

    query_url = 'http://cmses-master02.cern.ch:9200/relvals_stats_*/_search'

    query_datsets = """
        {
          "query": {
            "filtered": {
              "query": {
                "bool": {
                  "should": [
                    {
                      "query_string": {
                        "query": "release:%(release_cycle)s AND architecture:%(architecture)s", 
                        "lowercase_expanded_terms": false
                      }
                    }
                  ]
                }
              },
              "filter": {
                "bool": {
                  "must": [
                    {
                      "range": {
                        "@timestamp": {
                          "from": %(start_time)s,
                          "to": %(end_time)s
                        }
                      }
                    }
                  ]
                }
              }
            }
          },
          "from": %(from)s,
          "size": %(page_size)s
        }
        """
    datasets = {}
    ent_from = 0
    json_out = []
    info_request = False
    queryInfo = {}

    queryInfo["end_time"] = int(time() * 1000)
    queryInfo["start_time"] = queryInfo["end_time"] - int(
        86400 * 1000 * lastNdays)
    queryInfo["architecture"] = arch
    queryInfo["release_cycle"] = release
    queryInfo["from"] = 0

    if page_size < 1:
        info_request = True
        queryInfo["page_size"] = 2
    else:
        queryInfo["page_size"] = page_size

    total_hits = 0

    while True:
        queryInfo["from"] = ent_from
        es_data = get_payload(query_url, _format(query_datsets,
                                                 **queryInfo))  # here
        content = json.loads(es_data)
        content.pop("_shards", None)
        total_hits = content['hits']['total']
        if info_request:
            info_request = False
            queryInfo["page_size"] = total_hits
            continue
        hits = len(content['hits']['hits'])
        if hits == 0: break
        ent_from = ent_from + hits
        json_out.append(content)
        if ent_from >= total_hits:
            break

    return json_out[0]['hits']['hits']
示例#8
0
        payload['slave_node'] = root.find('builtOn').text
        build_result = root.find('result')
        if build_result is not None:
          payload['build_result'] = build_result.text
          payload['build_duration'] = int(int(root.find('duration').text)/1000)
          payload['job_status'] = 'Finished'
          os.system('touch "' + flagFile + '"')
        else:
          payload['job_status'] = 'Running'
          all_local.append(id)
        weekindex="jenkins-jobs-"+str(int((((int(jstime)/1000)/86400)+4)/7))
        send_payload(weekindex,document,id,json.dumps(payload))
      except Exception as e:
        print("Xml parsing error",logFile , e)
running_builds_elastic={}
content = get_payload('jenkins-*',query_running_builds)
if content == "":
  running_builds_elastic = []
else:
  content_hash = json.loads(content)
  if (not 'hits' in content_hash) or (not 'hits' in content_hash['hits']):
    print("ERROR: ",content)
    sys.exit(1)
  for hit in content_hash['hits']['hits']:
    if hit["_index"].startswith("jenkins-jobs-") or hit["_index"].startswith("cmssdt-jenkins-jobs-"):
      try:print("Running:",hit["_source"]['job_name'],hit["_source"]['build_number'],hit["_index"],hit['_id'])
      except: pass
      running_builds_elastic[hit['_id']]=hit
for build in running_builds_elastic:
  if build not in all_local:
    hit = running_builds_elastic[build]