Exemplo n.º 1
0
 def get_video_info_by_html(self):
     """
     The method is using yfconfig to get information of video including title, video_id, data and thumbnail
     :rtype: dict
     """
     video_page = get(
         f'https://www.youtube.com/channel/{self.target_id}/live')
     try:
         ytplayer_config = json.loads(
             re.search(r'ytplayer.config\s*=\s*([^\n]+?});',
                       video_page).group(1))
         player_response = json.loads(
             ytplayer_config['args']['player_response'])
         video_details = player_response['videoDetails']
         # assert to verity live status
         assert video_details['isLive']
         title = video_details['title']
         vid = video_details['videoId']
         target = f"https://www.youtube.com/watch?v={vid}"
         thumbnails = video_details['thumbnail']['thumbnails'][-1]['url']
         return {
             'Title': title,
             'Ref': vid,
             'Date': strftime("%Y-%m-%d", localtime(time())),
             'Target': target,
             'Thumbnails': thumbnails
         }
     except KeyError:
         self.logger.exception('Get keys error')
         return False
Exemplo n.º 2
0
 def check(self):
     try:
         html = get(
             f'https://www.youtube.com/channel/{self.target_id}/featured')
         if '"label":"LIVE NOW"' in html:
             # vid = self.get_videoid_by_channel_id()
             # get_live_info = self.getlive_vid(vid)
             retry_num = 0
             while retry_num < 3:
                 video_dict = self.get_video_info_by_html()
                 if video_dict:
                     break
                 else:
                     sleep(0.5)
                     retry_num += 1
             else:
                 video_dict = self.get_videoid_by_channel_id(self.target_id)
             video_dict['Provide'] = self.module
             # 确定视频不重复
             if self.vid != video_dict['Ref']:
                 self.vid = video_dict['Ref']
                 process_video(video_dict)
             else:
                 self.logger.warning('Found A Repeated Video. Drop it')
                 sleep(1)
         else:
             if 'Upcoming live streams' in html:
                 self.logger.info(
                     f'{self.target_id}: Found A Live Upcoming')
             else:
                 self.logger.info(f'{self.target_id}: Not found Live')
     except Exception:
         self.logger.exception('Check Failed')
Exemplo n.º 3
0
 def is_live(self):
     html = get(f'https://www.openrec.tv/user/{self.target_id}')
     dom = etree.HTML(html)
     try:
         is_live = dom.xpath(
             '/html/body/div[1]/div[2]/div[18]/div[2]/div/div[3]/ul/li[1]/div/text()'
         )[0]
     except IndexError:
         return None
     if 'Live' in is_live:
         info = dom.xpath(
             '/html/body/div[1]/div[2]/div[18]/div[2]/div/div[3]/ul/li[1]/ul/li/a[2]'
         )[0]
         ref = info.xpath('@href')[0]
         title = dom.xpath(
             '/html/body/div[1]/div[2]/div[18]/div[2]/div/div[3]/ul/li[1]/ul/li/a[2]/text()'
         )[0]
         target = ref
         date = time.strftime("%Y-%m-%d", time.localtime())
         live_dict = {
             'Title': title,
             'Ref': ref,
             'Target': target,
             'Date': date
         }
         return live_dict
     return None
Exemplo n.º 4
0
 def get_video_info_by_html(self):
     """
     The method is using yfconfig to get information of video including title, video_id, data and thumbnail
     :rtype: dict
     """
     video_page = get(
         f'https://www.youtube.com/channel/{self.target_id}/live')
     try:
         ytplayer_config = json.loads(
             re.search(r'ytplayer.config\s*=\s*([^\n]+?});',
                       video_page).group(1))
         player_response = json.loads(
             ytplayer_config['args']['player_response'])
         video_details = player_response['videoDetails']
         title = video_details['title']
         vid = video_details['videoId']
         target = f"https://www.youtube.com/watch?v={vid}"
         thumbnails = video_details['thumbnail']['thumbnails'][-1]['url']
         # date = player_response['playabilityStatus']['liveStreamability']['liveStreamabilityRenderer']['offlineSlate'] \
         #     ['liveStreamOfflineSlateRenderer']['scheduledStartTime']
         return {
             'Title': title,
             'Ref': vid,
             'Date': strftime("%Y-%m-%d", localtime(time())),
             'Target': target,
             'Thumbnails': thumbnails
         }
     except KeyError:
         self.logger.exception()
Exemplo n.º 5
0
 def __init__(self, api_url, data_dict):
     fields = [
         'id',
         'code',
         'location_code',
         'start_date',
         'sample_rate',
         'sample_rate_unit',
         'azimuth',
         'azimuth_unit',
         'depth',
         'depth_unit',
         'chains',
         'equipments',
         'network',
     ]
     super(Channel, self).__init__(api_url, fields, data_dict)
     self.station = None
     self.parameters = []
     # Equipment preparation
     self.equipment_links = self.equipments
     self.equipments = []
     # Fetch equipment regarding chains
     for chain_link in self.chains:
         c_data = get(chain_link)
         c = Chain(api_url, c_data)
         if c.equipment:
             e_data = get(c.equipment)
             e = Equipment(api_url, e_data)
             e.chain_order = c.type
             e.channel = self
             self.equipments.append(e)
     # Fetch network
     self.network_link = self.network
     if self.network_link:
         n_data = get(self.network_link)
         if n_data:
             self.network = Network(api_url, n_data)
     # Fix missing degree unit
     if not self.azimuth_unit:
         self.azimuth_unit = default_degree_unit
     # Fix missing rate unit
     if not self.sample_rate_unit:
         self.sample_rate_unit = default_sample_unit
     # Fix missing meter unit
     if not self.depth_unit:
         self.depth_unit = default_elevation_unit
Exemplo n.º 6
0
def get_img():
    """
    获得图片
    """
    ua = request.headers.get('User-Agent')
    image = image_remote(ua)
    api_param = api_param_parse(request.args)
    count = get(api_param, 'count', '1')
    is_phone_ = is_phone(ua)
    key = 'image.phone' if is_phone_ == True else 'image.pc'
    local_image = get(dataset.data, key, [])
    result = []
    if len(local_image) <= int(count):
        result = img_list(is_phone_, image, count)
        dataset.insert_list(key, result)
    else:
        result = random.sample(dataset.get(key), int(count))
    return json.dumps(result)
Exemplo n.º 7
0
 def check(self):
     self.vinfo = self.get_temp_vid(self.vinfo)
     self.vid = self.vinfo['Vid']
     html = get("https://www.youtube.com/watch?v=" f"{self.vid}")
     if r'"isLive\":true' in html:
         video_dict = self.getlive_title(self.vid)
         process_video(video_dict)
         self.db.delete(self.vinfo)
     else:
         self.logger.info(f'Not found Live')
Exemplo n.º 8
0
 def _fetch_addresses(self, object_id):
     """
     Get addresses from the given equipment.
     """
     res = []
     ip_url = '%s%s/?format=json' % (self.api_url, api_ip_name)
     data = get('%s&equipment=%s' % (ip_url, object_id))
     if data:
         for element in data:
             address = Address(self.api_url, element)
             res.append(address)
     return res
Exemplo n.º 9
0
 def _fetch_services(self, object_id):
     """
     Get service from the given equipment.
     """
     res = []
     service_url = '%s%s/?format=json' % (self.api_url, api_service_name)
     data = get('%s&equipment=%s' % (service_url, object_id))
     if data:
         for element in data:
             service = Service(self.api_url, element)
             res.append(service)
     return res
Exemplo n.º 10
0
def img_list(is_phone_, image, count=1):
    """
    根据数量获得随机图组
    """
    result = []
    while len(result) < int(count):
        ir = requests.get(image, allow_redirects=False)
        text = json.loads(ir.text)
        img_url = get(text, 'imgurl', '')
        min_size = 100 if is_phone_ else 200
        if img_remote_size(img_url) > min_size:
            result.append(img_url)
    return result
Exemplo n.º 11
0
 def check(self):
     html = get(
         f'https://www.youtube.com/channel/{self.target_id}/featured')
     if '"label":"LIVE NOW"' in html:
         # vid = self.get_videoid_by_channel_id()
         # get_live_info = self.getlive_vid(vid)
         video_dict = self.get_videoid_by_channel_id(self.target_id)
         video_dict['Provide'] = self.module
         process_video(video_dict)
     else:
         if 'Upcoming live streams' in html:
             self.logger.info(f'{self.target_id}: Found A Live Upcoming')
         else:
             self.logger.info(f'{self.target_id}: Not found Live')
Exemplo n.º 12
0
def policy_gradient(d, g):
    d['day'] = ut.get(d, 'date')
    t = g
    rate = 0.00002
    delta = 0.05
    res = pd.DataFrame(index=d.day.unique(),
                       columns=['pnl_naive', 'pnl_dynamic', 'q'])
    for day in d.day.unique():
        batch = d[d.day == day]
        pnl = backtest(batch, t, g)
        grad = 0.5 * (backtest(batch, t + delta, g) -
                      backtest(batch, t - delta, g)) / delta
        t = max(t + rate * grad, 0)
        res.loc[day] = [backtest(batch, g, g), pnl, t]
        #print('Day: ' + day.strftime(format='%Y-%m-%d') + ' PNL: ' + str(pnl) + ' t:'+str(t))
    return res
Exemplo n.º 13
0
 def check(self):
     try:
         html = get(
             f'https://www.youtube.com/channel/{self.target_id}/featured')
         if '"label":"LIVE NOW"' in html:
             video_dict = self.get_video_info_by_html()
             if video_dict:
                 video_dict['Provide'] = self.module
                 process_video(video_dict)
         else:
             if 'Upcoming live streams' in html:
                 self.logger.info(
                     f'{self.target_id}: Found A Live Upcoming')
             else:
                 self.logger.info(f'{self.target_id}: Not found Live')
     except Exception:
         self.logger.exception('Check Failed')
Exemplo n.º 14
0
    def seek(self):
        make = {}
        table = tools.get(self.loop)  # 경우의 수를 모두 구하여 키를 만듬

        for val in table:  # 여기서 모든 경우의 수를 다 구한다
            make[val] = {"time": 0, "money": 0}
            for n in range(len(val)):
                make[val]["time"] += self.data[n][self.iterTable[int(
                    val[n])]][0]
                make[val]["money"] += self.data[n][self.iterTable[int(
                    val[n])]][1]

        temp = []
        for key in make.keys():
            #print(n, "OVER" if make[n]['time'] > self.maxTime else make[n]['time'])
            if make[key]['time'] <= self.maxTime:
                temp.append([key, make[key]["money"]])

        return max(temp, key=lambda x: x[1])[1]
Exemplo n.º 15
0
 def __init__(self, api_url, data_dict):
     fields = [
         'id',
         'code',
         'latitude',
         'latitude_unit',
         'longitude',
         'longitude_unit',
         'elevation',
         'elevation_unit',
         'name',
         'region',
         'county',
         'country',
         'town',
         'geology',
         'operator',
     ]
     super(Station, self).__init__(api_url, fields, data_dict)
     self.channels = []
     self.networks = []
     self.equipments = []
     # Fetch operator if needed
     self.operator_link = self.operator
     self.operator = ''
     if self.operator_link:
         data = get(self.operator_link)
         if data:
             self.operator = Organism(api_url, data)
     # Fix unit for longitude, latitude, azimuth
     for field in ['latitude', 'longitude']:
         field_unit = '_'.join([field, 'unit'])
         if not getattr(self, field_unit, None):
             setattr(self, field_unit, default_degree_unit)
     # Then elevation
     if not self.elevation_unit:
         self.elevation_unit = default_elevation_unit
     # Geology
     if not self.geology:
         self.geology = 'Unknown'
Exemplo n.º 16
0
  def open_link(self, device = "inst0", autoid=True):
    res = self._make_call(
      create_link,
      Create_LinkParms(
        clientId = self.get_unique_id(),
        lockDevice = 0,
        lock_timeout = VXI11_DEFAULT_TIMEOUT,
        device=device,
      ),
      self.p.pack_Create_LinkParms,
      self.un_p.unpack_Create_LinkResp,
      timeout=VXI11_DEFAULT_TIMEOUT,
    )

    # check for errors, this raises exceptions on errors
    VXI11Error.check(res.error)

    self.links[res.lid] = (device,res)
    LinkClass = Link
    link = Link(LinkResp=res,client=self)
    if autoid:
      import tools # delayed to allow complete loading
      return tools.get( link.query('*IDN?') )(link)
    return link
Exemplo n.º 17
0
 def get(self, key):
     result = get(self.data, key, None)
     return result
Exemplo n.º 18
0
in_list = []
in_list_JES_up = []
in_list_JES_down = []
in_list_PU_up = []
in_list_PU_down = []

for c in categories:
    for s in signals:
        in_list.append((filename, 'signal_histos/%s_%s' % (c, s))),
        in_list_JES_up.append((filename, 'signal_histos/%s_%s_JES_up' % (c, s))),
        in_list_JES_down.append((filename, 'signal_histos/%s_%s_JES_down' % (c, s))),
        in_list_PU_up.append((filename, 'signal_histos/%s_%s_PU_up' % (c, s))),
        in_list_PU_down.append((filename, 'signal_histos/%s_%s_PU_down' % (c, s))),

std_plots = tools.get(in_list)
JES_up_plots = tools.get(in_list_JES_up)
JES_down_plots = tools.get(in_list_JES_down)
PU_up_plots = tools.get(in_list_PU_up)
PU_down_plots = tools.get(in_list_PU_down)

print ""
print "=========================================="
print "JES Down/Up"
print "=========================================="

for i in range(0,len(std_plots)):
    if i%5 == 0: print ""
    JES_up_ratio = JES_up_plots[i].Integral()/std_plots[i].Integral()
    JES_down_ratio = JES_down_plots[i].Integral()/std_plots[i].Integral()
    print "JES %s: %5.3f/%5.3f" % (std_plots[i].GetName(), JES_down_ratio, JES_up_ratio)
category = 'cAll'
vbf_samples = ['H2Mu_VBF', 'H2Mu_VBF_120', 'H2Mu_VBF_130']
gg_samples = ['H2Mu_gg', 'H2Mu_gg_120', 'H2Mu_gg_130']

filename = dirname+filename
gg_getlist  = []
vbf_getlist = []

for sample in gg_samples:
    gg_getlist.append((filename, 'signal_histos/%s_%s' % (category, sample)))

for sample in vbf_samples:
    vbf_getlist.append((filename, 'signal_histos/%s_%s' % (category, sample)))

print "\n =========== GET ================== \n" 
gg_hlist = tools.get(gg_getlist)
vbf_hlist = tools.get(vbf_getlist)

# normalize histograms
for h in gg_hlist:
    h.Scale(1/h.Integral())

for h in vbf_hlist:
    h.Scale(1/h.Integral())

# -- ggf -----------------------
gg_h125 = gg_hlist[0]
gg_h120 = gg_hlist[1]
gg_h130 = gg_hlist[2]

gg_h125.SetTitle("GGF_M125");
##############################################
# make_bdt_score_overlay.py                  #
##############################################

import tools as tools
from categories_and_samples import *
from ROOT import *

#============================================
# code
#============================================

filename = '../../UFDimuAnalysis_v2/bin/rootfiles/validate_bdt_score_-1_1_categories2_36814_dyAMC-J_minpt20_b1_sig-xlumi1.root'
in_list = []
c = 'cAll'

for s in net_samples:
    if 'Data' in s or 'Diboson' in s or 'VH' in s:
        continue
    in_list.append( (filename, 'net_histos/%s_%s' % (c, s)) )

plot_list = tools.get(in_list)

i=2
for h in plot_list:
    h.Scale(1/h.Integral())
    i+=1

tools.overlay(plot_list, title="BDT Score", savename="bdt_score_overlay_%s" % c, 
              draw="fill-transparent", xtitle="BDT score", ytitle="", ldim=[0.5, 0.7, 0.88, 0.88], yrange =(0,0.15))
Exemplo n.º 21
0
from skimage.color import rgb2gray
from skimage.transform import resize
import sys
from tools import get

#root directory
root_directory=sys.argv[1]
copy_root_directory="Pkl/"
#make Pkl direcory
os.makedirs(copy_root_directory,exist_ok=True)
#get all the user_directory in the directory
users_directory=os.listdir(root_directory)


#image characterist
image_size=int(get("image_size"))
num_characters=int(get("num_characters"))


def hotfixLabel(n):
	label=[0]*num_characters
	label[n]=1
	return label

total_captured=0;
flag=True
for user in users_directory:
	#print("Processing Directory:"+str(user))
	# define images and labels
	images=[]
	labels=[]
Exemplo n.º 22
0
          ]

directory = '/home/puno/h2mumu/UFDimuAnalysis_v2/bin/rootfiles/'
roch_filename = directory+'validate_UNBLINDED_dimu_mass_Roch_110_160_categories3_tree_categorization_final_36814_dyAMC_minpt10.root'
kamu_filename = directory+'validate_UNBLINDED_dimu_mass_KaMu_110_160_categories3_tree_categorization_final_36814_dyAMC_minpt10.root'

roch_list = []
kamu_list = []

for c in categories:
    for s in signals:
        roch_list.append((roch_filename, 'signal_histos/%s_%s' % (c, s))),
        kamu_list.append((kamu_filename, 'signal_histos/%s_%s' % (c, s))),

print "\n =========== GET ================== \n" 
roch_plots = tools.get(roch_list)
kamu_plots = tools.get(kamu_list)

#print "\n =========== FIT ================ \n" 
#
#ggf_ratios = [] 
#ggf_mean_roch = [] 
#ggf_mean_kamu = [] 
#ggf_sigma_roch = [] 
#ggf_sigma_kamu = [] 
#
#vbf_ratios = [] 
#vbf_mean_roch = [] 
#vbf_mean_kamu = [] 
#vbf_sigma_roch = [] 
#vbf_sigma_kamu = [] 
Exemplo n.º 23
0
              'c_01_Jet_Loose_OO',
              'c_01_Jet_Loose_OE',
              'c_01_Jet_Loose_EE'
              ]

category = 'c_01_Jet_Tight_BB'
samples = ['VH', 'H2Mu_VBF', 'H2Mu_gg', 'Diboson_plus', 'TTbar_Plus_SingleTop', 'Drell_Yan', 'Net_Data']

filename = dirname+filenames[2]
getlist = []

#for sample in samples:
#    getlist.append((filename, 'net_histos/%s_%s' % (category, sample)))

getlist.append( (filename, 'Net_DY_HT') )
getlist.append( (filename, 'ZJets_AMC') )

print "\n =========== GET ================== \n" 
hlist = tools.get(getlist)
print "\n =========== REBIN ================ \n" 
hmc = tools.add(hlist[0:-1])
hdata = hlist[-1]
newBins = tools.getRebinEdges(hdata, hmc, max_err=0.1)
print newBins
rebin_hlist = tools.rebin(hlist, newBins) #rebinned to var bin width
srebin_hlist = tools.scaleByBinWidth(rebin_hlist, newBins) # rebinned to var bin width and wide bins are scaled down
srebin_hlist[0].SetTitle('ZJets_MG');

print "\n =========== STACK AND RATIO ====== \n" 
tools.stackAndRatio(srebin_hlist, title='Drell_Yan_MC_AMC_vs_Madgraph', ytitleratio='AMC/MG', yrange=(1e3, 1e7))
Exemplo n.º 24
0
samples = [
    'VH', 'H2Mu_VBF', 'H2Mu_gg', 'Diboson_plus', 'TTbar_Plus_SingleTop',
    'Drell_Yan', 'Net_Data'
]

filename = dirname + filenames[2]
getlist = []

#for sample in samples:
#    getlist.append((filename, 'net_histos/%s_%s' % (category, sample)))

getlist.append((filename, 'Net_DY_HT'))
getlist.append((filename, 'ZJets_AMC'))

print "\n =========== GET ================== \n"
hlist = tools.get(getlist)
print "\n =========== REBIN ================ \n"
hmc = tools.add(hlist[0:-1])
hdata = hlist[-1]
newBins = tools.getRebinEdges(hdata, hmc, max_err=0.1)
print newBins
rebin_hlist = tools.rebin(hlist, newBins)  #rebinned to var bin width
srebin_hlist = tools.scaleByBinWidth(
    rebin_hlist,
    newBins)  # rebinned to var bin width and wide bins are scaled down
srebin_hlist[0].SetTitle('ZJets_MG')

print "\n =========== STACK AND RATIO ====== \n"
tools.stackAndRatio(srebin_hlist,
                    title='Drell_Yan_MC_AMC_vs_Madgraph',
                    ytitleratio='AMC/MG',
Exemplo n.º 25
0
smean_mc = []
sres_data = []
sres_mc=[]

calibs = ['PF', 'Roch', 'KaMu']

# Make 
for c in calibs:
    #print "   /// (%d) Looking at sample %s ..." % (dy_bug, s)
    smean_data.append( (filename, 'plots/mean_Net_Data_mass_%s_%s' % (c, varname)) )
    smean_mc.append(   (filename, 'plots/mean_ZJets_AMC_mass_%s_%s' % (c, varname)) )

    sres_data.append( (filename, 'plots/resolution_Net_Data_mass_%s_%s' % (c, varname)) )
    sres_mc.append(   (filename, 'plots/resolution_ZJets_AMC_mass_%s_%s' % (c, varname)) )

mean_data = tools.get(smean_data)
mean_mc = tools.get(smean_mc)
res_data = tools.get(sres_data)
res_mc = tools.get(sres_mc)
    
mean_all = []
mean_all.extend(mean_data)
mean_all.extend(mean_mc)

res_all = []
res_all.extend(res_data)
res_all.extend(res_mc)

mean_minmax = (999999,-999999)
res_minmax = (999999,-999999)
Exemplo n.º 26
0
 def insert_list(self, key, values):
     target = get(self.data, key, [])
     target.extend(values)
     self.insert(key, target)
Exemplo n.º 27
0
import numpy as np
import matplotlib.pyplot as plt
import pickle
import os
import sys
from collections import deque
import model
from tools import get
import Loader

#Specify which user is writing
user=int(sys.argv[1])

#image parameters
image_size=int(get("image_size"))
num_characters=int(get("num_characters"))

# prepare code to fetch datasets
users=deque(os.listdir("Pickles/Pkl"))
def load_next_batch():
    #return image
    file_c=users.popleft()
    print("Batch Name:",file_c)
    users.append(file_c)
    pickle_file="Pickles/Pkl/"+file_c
    pickle_file=open(pickle_file,"rb")
    save=pickle.load(pickle_file)
    image_r=save["images"].reshape([-1,image_size*image_size])
    label_r=save["labels"]
    if(len(label_r)==0):
        print("Error in loading .. Continuing for the nest iteration")
    for s in net_samples:
        if s == 'Drell_Yan' and dy_bug:
            s += '_'
        #print "   /// (%d) Looking at sample %s ..." % (dy_bug, s)
        # Histos to get for this category
        in_list.append((filename, 'net_histos/%s_%s' % (c, s)))

        # Up and Down uncertainty histograms to get for this category, e.g. JES_up, JES_down, ...
        if s == 'Net_Data':
            continue  # Data does not have up/down uncertainty histograms, only MC

        for key, value in in_unc_map.iteritems():
            value.append((filename, 'net_histos/%s_%s_%s' % (c, s, key)))

    # list of histograms to make the stack and ratio plots for this category
    hlist = tools.get(in_list)

    # map to lists of up/down uncertainty histograms for this category
    # 'JES_up' -> list of JES_up histos for this category
    for in_key, in_value in in_unc_map.iteritems():
        unc_hist_map[in_key] = tools.get(in_value)

    #hmc = tools.add(hlist[0:-1])
    #hdata = hlist[-1]
    #newBins = tools.getRebinEdges(hdata, hmc, max_err=0.05)
    #print newBins
    #rebin_hlist = tools.rebin(hlist, newBins) #rebinned to var bin width
    #srebin_hlist = tools.scaleByBinWidth(rebin_hlist, newBins) # rebinned to var bin width and wide bins are scaled down

    print "\n =========== STACK AND RATIO ====== \n"
Exemplo n.º 29
0
    for s in net_samples:
        if s == 'Drell_Yan' and dy_bug:
            s+='_'
        #print "   /// (%d) Looking at sample %s ..." % (dy_bug, s)
        # Histos to get for this category
        in_list.append( (filename, 'net_histos/%s_%s' % (c, s)) )

        # Up and Down uncertainty histograms to get for this category, e.g. JES_up, JES_down, ...
        if s == 'Net_Data': 
            continue       # Data does not have up/down uncertainty histograms, only MC

        for key,value in in_unc_map.iteritems():
            value.append( (filename, 'net_histos/%s_%s_%s' % (c, s, key)) ) 
    
    # list of histograms to make the stack and ratio plots for this category
    hlist = tools.get(in_list)

    # map to lists of up/down uncertainty histograms for this category
    # 'JES_up' -> list of JES_up histos for this category
    for in_key, in_value in in_unc_map.iteritems():
        unc_hist_map[in_key] = tools.get(in_value) 

    #hmc = tools.add(hlist[0:-1])
    #hdata = hlist[-1]
    #newBins = tools.getRebinEdges(hdata, hmc, max_err=0.05)
    #print newBins
    #rebin_hlist = tools.rebin(hlist, newBins) #rebinned to var bin width
    #srebin_hlist = tools.scaleByBinWidth(rebin_hlist, newBins) # rebinned to var bin width and wide bins are scaled down
    
    print "\n =========== STACK AND RATIO ====== \n" 
    
Exemplo n.º 30
0
category = 'root'
vbf_samples = ['H2Mu_VBF', 'H2Mu_VBF_120', 'H2Mu_VBF_130']
gg_samples = ['H2Mu_gg', 'H2Mu_gg_120', 'H2Mu_gg_130']

filename = dirname+filename
gg_getlist  = []
vbf_getlist = []

for sample in gg_samples:
    gg_getlist.append((filename, 'signal_histos/%s_%s' % (category, sample)))

for sample in vbf_samples:
    vbf_getlist.append((filename, 'signal_histos/%s_%s' % (category, sample)))

print "\n =========== GET ================== \n" 
gg_hlist = tools.get(gg_getlist)
vbf_hlist = tools.get(vbf_getlist)

# normalize histograms
for h in gg_hlist:
    h.Scale(1/h.Integral())

for h in vbf_hlist:
    h.Scale(1/h.Integral())

# -- ggf -----------------------
gg_h125 = gg_hlist[0]
gg_h120 = gg_hlist[1]
gg_h130 = gg_hlist[2]

gg_h125.SetTitle("GGF_M125");
calibs = ['PF', 'Roch', 'KaMu']

# Make
for c in calibs:
    #print "   /// (%d) Looking at sample %s ..." % (dy_bug, s)
    smean_data.append(
        (filename, 'plots/mean_Net_Data_mass_%s_%s' % (c, varname)))
    smean_mc.append(
        (filename, 'plots/mean_ZJets_AMC_mass_%s_%s' % (c, varname)))

    sres_data.append(
        (filename, 'plots/resolution_Net_Data_mass_%s_%s' % (c, varname)))
    sres_mc.append(
        (filename, 'plots/resolution_ZJets_AMC_mass_%s_%s' % (c, varname)))

mean_data = tools.get(smean_data)
mean_mc = tools.get(smean_mc)
res_data = tools.get(sres_data)
res_mc = tools.get(sres_mc)

mean_all = []
mean_all.extend(mean_data)
mean_all.extend(mean_mc)

res_all = []
res_all.extend(res_data)
res_all.extend(res_mc)

mean_minmax = (999999, -999999)
res_minmax = (999999, -999999)
Exemplo n.º 32
0
#============================================
# code
#============================================

print "\n /// Overlaying backgrounds for %s... \n" % filename

# List of histograms to make the overlay from -----------------
in_list = []
for c in bdt_categories:
    for s in net_samples_all:
        if s == 'Net_Data':
            in_list.append((filename, 'net_histos/%s_%s' % (c, s))),

print "\n =========== GET ================== \n"
hlist = tools.get(in_list)
edges = [x for x in range(110, 161) if x % 2 == 0]
for e in edges:
    print e
hlist = tools.rebin(hlist, edges)

inclusive = 0

# Normalize all of the backgrounds to the same scale
for i, h in enumerate(hlist):
    h.Scale(1 / h.Integral())
    #h.SetName(bdt_categories[i])
    h.SetTitle(bdt_categories[i])
    if 'cAll' in h.GetName():
        h.SetTitle("Inclusive")
        inclusive = h
Exemplo n.º 33
0
]

h = 'H2Mu_gg'

i = 0
for c in categories:
    in_list = [
        ('../../bin/rootfiles/validate_M120_blinded_dimu_mass_PF_110_160_nolow_categories3_tree_categorization_final_36814_dyMG.root',
         'net_histos/%s_%s_120' % (c, h)),
        ('../../bin/rootfiles/validate_M130_blinded_dimu_mass_PF_110_160_nolow_categories3_tree_categorization_final_36814_dyMG.root',
         'net_histos/%s_%s_130' % (c, h)),
        ('../../bin/rootfiles/validate_blinded_dimu_mass_PF_110_160_nolow_categories3_tree_categorization_final_36814_dyMG.root',
         'net_histos/%s_%s' % (c, h))
    ]

    plot_list = tools.get(in_list)
    plot_list[0].SetName('M120')
    plot_list[0].SetTitle('M120')
    plot_list[1].SetName('M130')
    plot_list[1].SetTitle('M130')
    plot_list[2].SetName('M125')
    plot_list[2].SetTitle('M125')

    tools.overlay(plot_list,
                  title="c%d_120_125_130_%s" % (i, h),
                  savename="c%d_120_125_130_%s" % (i, h),
                  xtitle="dimu_mass",
                  ytitle="Events / 1 GeV",
                  ldim=[0.5, 0.7, 0.88, 0.88])
    i += 1
Exemplo n.º 34
0
searched_site_code = 'CHMF'

# Configuration
server = 'localhost'
port = '8002'
if port:
    server = ':'.join([server, port])
api_url = 'http://%s/api/v1/' % server
site_url = api_url + 'sites/?format=json'
channel_url = api_url + 'channels/?format=json'
equipment_url = api_url + 'equipments/?format=json'
params_url = api_url + 'channel_parameters/?format=json'

# Search Charmoille site
url = site_url + '&code=%s' % searched_site_code
stations = get(url)

# Then BROWSE data to fetch ALL NEEDED information
result = []
for station in stations:
    # Fetch its ID and some info
    s = Station(api_url, station)
    # Search linked channels
    url = channel_url + '&station=%s' % s.code
    channels = get(url)
    for channel in channels:
        c = Channel(api_url, channel)
        c.station = s  # Add a link from channel to station
        s.channels.append(c)
        if c.network and c.network.code not in s.networks:
            s.networks.append(c.network.code)
Exemplo n.º 35
0
    name = key.GetName()
    #print "(%d) Looping over %s ..." % (dy_bug, key)
    if 'Drell_Yan_' in name and name[-1] == '_':
        dy_bug = True
        break

# List of histograms to make the table from -----------------
in_list = []
for c in bdt_categories:
    for s in net_samples_all:
        if s == 'Drell_Yan' and dy_bug:
            s+='_'
        in_list.append((filename, 'net_histos/%s_%s' % (c, s))),

print "\n =========== GET ================== \n"
hlist = tools.get(in_list)

fwhm_widths = []
ffwhm_widths = []
fwhm_bounds = []
ffwhm_bounds = []
signal_net  = []
signal_fwhm = []
bkg_fwhm    = []

vbf_net  = []
vbf_fwhm = []
ggf_net  = []
ggf_fwhm = []
vh_net   = []
vh_fwhm  = []
Exemplo n.º 36
0
]

directory = '/home/puno/h2mumu/UFDimuAnalysis_v2/bin/rootfiles/'
roch_filename = directory + 'validate_UNBLINDED_dimu_mass_Roch_110_160_categories3_tree_categorization_final_36814_dyAMC_minpt10.root'
kamu_filename = directory + 'validate_UNBLINDED_dimu_mass_KaMu_110_160_categories3_tree_categorization_final_36814_dyAMC_minpt10.root'

roch_list = []
kamu_list = []

for c in categories:
    for s in signals:
        roch_list.append((roch_filename, 'signal_histos/%s_%s' % (c, s))),
        kamu_list.append((kamu_filename, 'signal_histos/%s_%s' % (c, s))),

print "\n =========== GET ================== \n"
roch_plots = tools.get(roch_list)
kamu_plots = tools.get(kamu_list)

#print "\n =========== FIT ================ \n"
#
#ggf_ratios = []
#ggf_mean_roch = []
#ggf_mean_kamu = []
#ggf_sigma_roch = []
#ggf_sigma_kamu = []
#
#vbf_ratios = []
#vbf_mean_roch = []
#vbf_mean_kamu = []
#vbf_sigma_roch = []
#vbf_sigma_kamu = []