예제 #1
0
 def __init__(self, bot: Red) -> None:
     self.bot = bot
     self._session: aiohttp.ClientSession
     self.config = Config.get_conf(self,
                                   176070082584248320,
                                   force_registration=True)
     self.config.register_guild(
         blocklist_mode=False,
         file_extensions=[".txt", ".log"],
         listen_to_bots=False,
         listen_to_self=False,
     )
     # state:
     #  - `True` - allowed
     #  - `False` - blocked
     #  - `None` - not set (the default)
     self.config.register_channel(state=None)
     # gists:
     #  list of IDs of all uploaded gists for the files uploaded by user
     #  the only purpose of this are data deletion requests
     self.config.register_user(gists=[])
     # message_cache:
     #  {message_id: (user_id, gist_id, bot_message_id)}
     self._message_cache: MutableMapping[int,
                                         Tuple[int, str,
                                               int]] = cachetools.Cache(
                                                   maxsize=10_000)
     self._guild_cache: Dict[int, GuildData] = {}
예제 #2
0
 def run():
     events = NanoEventsFactory.from_root(
         os.path.abspath("tests/samples/nano_dy.root"),
         persistent_cache=array_log,
     ).events()
     jets = events.Jet
     met = events.MET
     jets["pt_raw"] = (1 - jets["rawFactor"]) * jets["pt"]
     jets["mass_raw"] = (1 - jets["rawFactor"]) * jets["mass"]
     jets["pt_gen"] = ak.values_astype(
         ak.fill_none(jets.matched_gen.pt, 0.0), np.float32)
     jets["rho"] = ak.broadcast_arrays(events.fixedGridRhoFastjetAll,
                                       jets.pt)[0]
     jec_cache = cachetools.Cache(np.inf)
     weakref.finalize(jec_cache, jec_finalized.set)
     corrected_jets = jet_factory.build(jets, lazy_cache=jec_cache)
     corrected_met = met_factory.build(met,
                                       corrected_jets,
                                       lazy_cache=jec_cache)
     print(corrected_met.pt_orig)
     print(corrected_met.pt)
     for unc in jet_factory.uncertainties() + met_factory.uncertainties():
         print(unc, corrected_met[unc].up.pt)
         print(unc, corrected_met[unc].down.pt)
     for unc in jet_factory.uncertainties():
         print(unc, corrected_jets[unc].up.pt)
     print("Finalized:", array_log.finalized)
예제 #3
0
    def process(self, events):
        isRealData = not hasattr(events, "genWeight")

        if isRealData:
            # Nominal JEC are already applied in data
            return self.process_shift(events, None)

        import cachetools
        jec_cache = cachetools.Cache(np.inf)
        nojer = "NOJER" if self._skipJER else ""
        fatjets = fatjet_factory[f"{self._year}mc{nojer}"].build(
            add_jec_variables(events.FatJet, events.fixedGridRhoFastjetAll),
            jec_cache)
        jets = jet_factory[f"{self._year}mc{nojer}"].build(
            add_jec_variables(events.Jet, events.fixedGridRhoFastjetAll),
            jec_cache)
        met = met_factory.build(events.MET, jets, {})

        shifts = [
            ({
                "Jet": jets,
                "FatJet": fatjets,
                "MET": met
            }, None),
        ]
        if self.systematics:
            shifts += [
                ({
                    "Jet": jets.JES_jes.up,
                    "FatJet": fatjets.JES_jes.up,
                    "MET": met.JES_jes.up
                }, "JESUp"),
                ({
                    "Jet": jets.JES_jes.down,
                    "FatJet": fatjets.JES_jes.down,
                    "MET": met.JES_jes.down
                }, "JESDown"),
                ({
                    "Jet": jets,
                    "FatJet": fatjets,
                    "MET": met.MET_UnclusteredEnergy.up
                }, "UESUp"),
                ({
                    "Jet": jets,
                    "FatJet": fatjets,
                    "MET": met.MET_UnclusteredEnergy.down
                }, "UESDown"),
            ]
            if not self._skipJER:
                shifts.extend([
                    ({
                        "Jet": jets.JER.up,
                        "FatJet": fatjets.JER.up,
                        "MET": met.JER.up
                    }, "JERUp"),
                    ({
                        "Jet": jets.JER.down,
                        "FatJet": fatjets.JER.down,
                        "MET": met.JER.down
                    }, "JERDown"),
                ])
        # HEM15/16 issue
        # if self._year == "2018":
        #     _runid = (events.run >= 319077)
        #     j_mask = ak.where((jets.phi > -1.57) & (jets.phi < -0.87) &
        #                       (jets.eta > -2.5) & (jets.eta < 1.3), 0.8, 1)
        #     fj_mask = ak.where((fatjets.phi > -1.57) & (fatjets.phi < -0.87) &
        #                        (fatjets.eta > -2.5) & (fatjets.eta < 1.3),
        #                        0.8, 1)
        #     shift_jets = copy.deepcopy(jets)
        #     shift_fatjets = copy.deepcopy(fatjets)
        #     for collection, mask in zip([shift_jets, shift_fatjets], [j_mask, fj_mask]):
        #         collection["pt"] = mask * collection.pt
        #         collection["mass"] = mask * collection.mass
        #     shifts.extend([
        #         ({"Jet": shift_jets, "FatJet": shift_fatjets, "MET": met}, "HEM18"),
        #     ])

        return processor.accumulate(
            self.process_shift(update(events, collections), name)
            for collections, name in shifts)
예제 #4
0
import cachetools

from notion import client

import auth


@cachetools.cached(cachetools.Cache(maxsize=1))
def notion_client() -> client.NotionClient:
    return client.NotionClient(token_v2=auth.get_token())
예제 #5
0
def test_corrected_jets_factory():
    import os
    from coffea.jetmet_tools import CorrectedJetsFactory, CorrectedMETFactory, JECStack

    events = None
    from coffea.nanoevents import NanoEventsFactory

    factory = NanoEventsFactory.from_root(
        os.path.abspath("tests/samples/nano_dy.root"))
    events = factory.events()

    jec_stack_names = [
        "Summer16_23Sep2016V3_MC_L1FastJet_AK4PFPuppi",
        "Summer16_23Sep2016V3_MC_L2Relative_AK4PFPuppi",
        "Summer16_23Sep2016V3_MC_L2L3Residual_AK4PFPuppi",
        "Summer16_23Sep2016V3_MC_L3Absolute_AK4PFPuppi",
        "Spring16_25nsV10_MC_PtResolution_AK4PFPuppi",
        "Spring16_25nsV10_MC_SF_AK4PFPuppi",
    ]
    for key in evaluator.keys():
        if "Summer16_23Sep2016V3_MC_UncertaintySources_AK4PFPuppi" in key:
            jec_stack_names.append(key)

    jec_inputs = {name: evaluator[name] for name in jec_stack_names}
    jec_stack = JECStack(jec_inputs)

    name_map = jec_stack.blank_name_map
    name_map["JetPt"] = "pt"
    name_map["JetMass"] = "mass"
    name_map["JetEta"] = "eta"
    name_map["JetA"] = "area"

    jets = events.Jet

    jets["pt_raw"] = (1 - jets["rawFactor"]) * jets["pt"]
    jets["mass_raw"] = (1 - jets["rawFactor"]) * jets["mass"]
    jets["pt_gen"] = ak.values_astype(ak.fill_none(jets.matched_gen.pt, 0),
                                      np.float32)
    jets["rho"] = ak.broadcast_arrays(events.fixedGridRhoFastjetAll,
                                      jets.pt)[0]
    name_map["ptGenJet"] = "pt_gen"
    name_map["ptRaw"] = "pt_raw"
    name_map["massRaw"] = "mass_raw"
    name_map["Rho"] = "rho"

    jec_cache = cachetools.Cache(np.inf)

    print(name_map)

    tic = time.time()
    jet_factory = CorrectedJetsFactory(name_map, jec_stack)
    toc = time.time()

    print("setup corrected jets time =", toc - tic)

    tic = time.time()
    prof = pyinstrument.Profiler()
    prof.start()
    corrected_jets = jet_factory.build(jets, lazy_cache=jec_cache)
    prof.stop()
    toc = time.time()

    print("corrected_jets build time =", toc - tic)

    print(prof.output_text(unicode=True, color=True, show_all=True))

    tic = time.time()
    print("Generated jet pt:", corrected_jets.pt_gen)
    print("Original jet pt:", corrected_jets.pt_orig)
    print("Raw jet pt:", jets.pt_raw)
    print("Corrected jet pt:", corrected_jets.pt)
    print("Original jet mass:", corrected_jets.mass_orig)
    print("Raw jet mass:", jets["mass_raw"])
    print("Corrected jet mass:", corrected_jets.mass)
    print("jet eta:", jets.eta)
    for unc in jet_factory.uncertainties():
        print(unc)
        print(corrected_jets[unc].up.pt)
        print(corrected_jets[unc].down.pt)
    toc = time.time()

    print("build all jet variations =", toc - tic)

    # Test that the corrections were applied correctly
    from coffea.jetmet_tools import (
        FactorizedJetCorrector,
        JetResolution,
        JetResolutionScaleFactor,
    )

    scalar_form = ak.without_parameters(jets["pt_raw"]).layout.form
    corrector = FactorizedJetCorrector(
        **{name: evaluator[name]
           for name in jec_stack_names[0:4]})
    corrs = corrector.getCorrection(JetEta=jets["eta"],
                                    Rho=jets["rho"],
                                    JetPt=jets["pt_raw"],
                                    JetA=jets["area"])
    reso = JetResolution(
        **{name: evaluator[name]
           for name in jec_stack_names[4:5]})
    jets["jet_energy_resolution"] = reso.getResolution(
        JetEta=jets["eta"],
        Rho=jets["rho"],
        JetPt=jets["pt_raw"],
        form=scalar_form,
        lazy_cache=jec_cache,
    )
    resosf = JetResolutionScaleFactor(
        **{name: evaluator[name]
           for name in jec_stack_names[5:6]})
    jets["jet_energy_resolution_scale_factor"] = resosf.getScaleFactor(
        JetEta=jets["eta"], lazy_cache=jec_cache)

    # Filter out the non-deterministic (no gen pt) jets
    def smear_factor(jetPt, pt_gen, jersf):
        return (ak.full_like(jetPt, 1.0) +
                (jersf[:, 0] - ak.full_like(jetPt, 1.0)) *
                (jetPt - pt_gen) / jetPt)

    test_gen_pt = ak.concatenate(
        [corrected_jets.pt_gen[0, :-2], corrected_jets.pt_gen[-1, :-1]])
    test_raw_pt = ak.concatenate([jets.pt_raw[0, :-2], jets.pt_raw[-1, :-1]])
    test_pt = ak.concatenate(
        [corrected_jets.pt[0, :-2], corrected_jets.pt[-1, :-1]])
    test_eta = ak.concatenate([jets.eta[0, :-2], jets.eta[-1, :-1]])
    test_jer = ak.concatenate([
        jets.jet_energy_resolution[0, :-2], jets.jet_energy_resolution[-1, :-1]
    ])
    test_jer_sf = ak.concatenate([
        jets.jet_energy_resolution_scale_factor[0, :-2],
        jets.jet_energy_resolution_scale_factor[-1, :-1],
    ])
    test_jec = ak.concatenate([corrs[0, :-2], corrs[-1, :-1]])
    test_corrected_pt = ak.concatenate(
        [corrected_jets.pt[0, :-2], corrected_jets.pt[-1, :-1]])
    test_corr_pt = test_raw_pt * test_jec
    test_pt_smear_corr = test_corr_pt * smear_factor(test_corr_pt, test_gen_pt,
                                                     test_jer_sf)

    # Print the results of the "by-hand" calculations and confirm that the values match the expected values
    print("\nConfirm the CorrectedJetsFactory values:")
    print("Jet pt (gen)", test_gen_pt.tolist())
    print("Jet pt (raw)", test_raw_pt.tolist())
    print("Jet pt (nano):", test_pt.tolist())
    print("Jet eta:", test_eta.tolist())
    print("Jet energy resolution:", test_jer.tolist())
    print("Jet energy resolution sf:", test_jer_sf.tolist())
    print("Jet energy correction:", test_jec.tolist())
    print("Corrected jet pt (ref)", test_corr_pt.tolist())
    print("Corrected & smeared jet pt (ref):", test_pt_smear_corr.tolist())
    print("Corrected & smeared jet pt:", test_corrected_pt.tolist(), "\n")
    assert ak.all(np.abs(test_pt_smear_corr - test_corrected_pt) < 1e-6)

    name_map["METpt"] = "pt"
    name_map["METphi"] = "phi"
    name_map["JetPhi"] = "phi"
    name_map["UnClusteredEnergyDeltaX"] = "MetUnclustEnUpDeltaX"
    name_map["UnClusteredEnergyDeltaY"] = "MetUnclustEnUpDeltaY"

    tic = time.time()
    met_factory = CorrectedMETFactory(name_map)
    toc = time.time()

    print("setup corrected MET time =", toc - tic)

    met = events.MET
    tic = time.time()
    # prof = pyinstrument.Profiler()
    # prof.start()
    corrected_met = met_factory.build(met,
                                      corrected_jets,
                                      lazy_cache=jec_cache)
    # prof.stop()
    toc = time.time()

    # print(prof.output_text(unicode=True, color=True, show_all=True))

    print("corrected_met build time =", toc - tic)

    tic = time.time()
    print(corrected_met.pt_orig)
    print(corrected_met.pt)
    prof = pyinstrument.Profiler()
    prof.start()
    for unc in jet_factory.uncertainties() + met_factory.uncertainties():
        print(unc)
        print(corrected_met[unc].up.pt)
        print(corrected_met[unc].down.pt)
    prof.stop()
    toc = time.time()

    print("build all met variations =", toc - tic)

    print(prof.output_text(unicode=True, color=True, show_all=True))
예제 #6
0
 def cache(self, minsize):
     return cachetools.Cache(maxsize=minsize)
예제 #7
0
import cachetools
import spotipy
import spotipy.oauth2

import auth

CACHE_PATH = '.cache'
REDIRECT_URI = 'https://localhost:5000/callback'
SCOPES = ' '.join([
    'user-library-modify',
    'user-library-read',
])


@cachetools.cached(cachetools.Cache(1))
def spotify_client() -> spotipy.Spotify:
    client_id = auth.get_client_id()
    client_secret = auth.get_client_secret(client_id)

    auth_manager = spotipy.oauth2.SpotifyOAuth(
        client_id=client_id,
        client_secret=client_secret,
        redirect_uri=REDIRECT_URI,
        scope=SCOPES,
        cache_path=CACHE_PATH,
        open_browser=False,
    )

    return spotipy.Spotify(auth_manager=auth_manager)
예제 #8
0
 def __init__(self, mode, maxsize, ttl):
     self.mode = mode
     if mode == CACHE_DELETE_COMPLETE:
         self.cache = cachetools.Cache(maxsize=maxsize)
     else:
         self.cache = cachetools.TTLCache(maxsize=maxsize, ttl=ttl)
예제 #9
0
class Page(models.Model):
    # Meta info
    upper = models.ForeignKey('self',
                              related_name='children',
                              on_delete=models.PROTECT,
                              null=True,
                              blank=True)

    created = models.DateTimeField(auto_now_add=True)
    created_by = models.ForeignKey(User,
                                   related_name='created_pages',
                                   on_delete=models.PROTECT,
                                   null=False)
    updated = models.DateTimeField(auto_now=True)
    updated_by = models.ForeignKey(User, on_delete=models.PROTECT, null=False)

    # Main data
    url = models.CharField(max_length=256, null=False, unique=True)
    public = models.BooleanField(default=True)
    template = models.CharField(default='',
                                max_length=256,
                                null=False,
                                blank=True)
    layout = models.CharField(default='',
                              max_length=256,
                              null=False,
                              blank=True)
    other_settings = models.TextField(default='{}')

    class Meta:
        unique_together = (
            'upper',
            'url',
        )

    def get_text(self, lang):
        texts = {t.language: t for t in self.texts.all()}
        if lang in texts:
            return texts[lang]
        elif LANG_NO in texts:
            return texts[LANG_NO]
        else:
            return None

    def as_dict(self):
        return {
            'pk': self.pk,
            'url': self.url,
            'public': self.public,
            'template_page': self.template,
            'template_layout': self.layout,
            'upper': self.upper.pk if self.upper else None,
            'other_settings': json.loads(self.other_settings),
            'langs': [],
            'lang_no': False,
            'titles': {},
        }

    @property
    @cachetools.cached(cache=cachetools.Cache(maxsize=32))
    def other_settings_(self):
        return json.loads(self.other_settings)

    @classmethod
    @cachetools.cached(cache=cachetools.TTLCache(maxsize=8, ttl=10))
    def get_all(cls):
        result = {p.pk: p.as_dict() for p in Page.objects.all()}
        for t in PageText.objects.all():
            if t.language == LANG_NO:
                result[t.page.pk]['lang_no'] = True
            else:
                result[t.page.pk]['langs'].append(t.language)
            result[t.page.pk]['titles'][t.language] = t.title
        return result

    def __str__(self):
        return repr(self)

    def __repr__(self):
        result = f'{self.pk}. Page "{self.url}" {self.updated.strftime(dater)}'\
            f' ({self.texts.count()}/)'\
            f' {"_" if not self.template else self.template }'\
            f' : {"_" if not self.layout else self.layout }'
        return result
예제 #10
0
def testLRUCacheTools():
    cache_test(cachetools.Cache(1000))
예제 #11
0
        # Note: Children are read-only
        nodes = list()

        for valid_action in self.valid_actions():
            env_copy = self.lazy_copy()
            env_copy.make_action(valid_action)
            nodes.append(env_copy)

        return nodes

    def lazy_copy(self):
        other = super(Node, self).lazy_copy()
        return Node(balls=other.balls, is_child_turn=other.is_child_turn)


@cachetools.cached(cache=cachetools.Cache(int(1e5)),
                   key=lambda n, d: (GameState.get_state_id(n), d))
def minimax(node, depth):
    score = node.score()
    isFinished = node.isFinished()
    maximizingPlayer = node.is_child_turn

    if depth == 0 or isFinished:
        return score

    children = node.children()

    if maximizingPlayer:
        value = -1
        for child in children:
            value = max(value, minimax(child, depth - 1))