예제 #1
0
 def setUp(self):
     super().setUp()
     with db.atomic() as txn:
         profile = Profile.create(username='******',
                                  hashed_password='******')
         self.token = AuthToken.create(profile=profile,
                                       token='thevalidtoken').token
예제 #2
0
def update_period_times(tt):
    with db.atomic(), pt_lock:
        period_times[:] = tt
        Period.delete()
        for a, b in chunks(tt, 2):
            Period.create(start_time=a, end_time=b)
    cr.update_ev(get_cron())
예제 #3
0
def main():
    keywords = ['php', 'shmeo', 'files']

    for k in keywords:
        image_urls = get_google_images(k)
        parse_time = datetime.now()

        imgs = []

        for image in image_urls:
            img_data = {'keyword': k, 'image_url': image, 'date': parse_time}

            imgs.append(GoogleImage(**img_data))

            # GoogleImage.create(**img_data)

            # GoogleImage.create(
            #     keyword=k,
            #     image_url=image,
            #     date=parse_time
            # )

        with db.atomic():
            GoogleImage.bulk_create(imgs)

        print('parsed', k, len(imgs))
예제 #4
0
def load_period_times():
    pt = []
    with db.atomic():
        for i in Period.select():
            pt.append(i.start_time)
            pt.append(i.end_time)
    with pt_lock:
        period_times[:] = pt
    cr = cron.Cron(get_cron())
    cr.start()
예제 #5
0
 def setUp(self):
     super().setUp()
     with db.atomic() as txn:
         #don't need to create a game, migrations/v1.py gets run before every test
         #Coin.create(id=1, name='Bitcoin', symbol='BTC')
         Game.create(name='Game',
                     starting_cash=10000.00,
                     shareable_link='aaaabbbbccccdddd',
                     shareable_code='aaaa',
                     ends_at=(datetime.utcnow().replace(tzinfo=pytz.utc) +
                              timedelta(days=7)).isoformat())
         profile = Profile.create(username='******',
                                  hashed_password='******')
         self.token = AuthToken.create(profile=profile,
                                       token='thevalidtoken').token
예제 #6
0
파일: apod.py 프로젝트: AllanDaemon/wpd
    def db_status_fill(cls):
        from db import db, ApodStatus

        db.drop_tables([ApodStatus])
        db.create_tables([ApodStatus])

        from datetime import datetime
        with db.atomic():
            print()
            for page_name, status in reversed(STATUS.items()):
                print(f'***Inserting into db page {page_name}', end='\r')
                d = cls.page_name2date(page_name)
                ApodStatus.create(date=d,
                                  f_name=page_name,
                                  status=status.name,
                                  status_int=status.value)
            print('Done')
        db.commit()
예제 #7
0
파일: dz.py 프로젝트: vvscode/py--notes
    def parsing(self):
        with db.atomic():
            while len(self.PARSED_URLS) < 30:
                min_level = min(self.queue_urls.values())
                for url, level in self.queue_urls.items():
                    if level == min_level:
                        self.queue_urls.pop(url)
                        break
                print('Scan:', url, level)

                self.PARSED_URLS.add(url)

                links = self.olx_ad_parser(url)

                for link in links:
                    if not self.link_filter(link):
                        continue

                    if (link not in self.queue_urls) and ('/obyavlenie/'
                                                          in link):
                        self.queue_urls[link] = level + 1
예제 #8
0
 def setUp(self):
     super().setUp()
     with db.atomic() as txn:
         self.game = Game.create(
             name='Game',
             starting_cash=10000.00,
             shareable_link='aaaabbbbccccdddd',
             shareable_code='aaaa',
             ends_at=(datetime.utcnow().replace(tzinfo=pytz.utc) +
                      timedelta(days=7)).isoformat())
         profile = Profile.create(username='******',
                                  hashed_password='******')
         GameProfile.create(game=self.game, profile=profile, cash=-1.0)
         Message.create(
             game=self.game.id,
             profile=profile.id,
             content="first message",
             # use default value for created_on
         )
         self.token = AuthToken.create(profile=profile,
                                       token='thevalidtoken').token
예제 #9
0
def main():
    workers = int(os.environ.get('WORKERS', "1"))
    pypi_fetcher_dir = os.environ.get('pypi_fetcher', '/tmp/pypi_fetcher')
    ensure_pypi_fetcher(pypi_fetcher_dir)
    init_db()
    build_base(store=os.environ.get('STORE', None))
    P = Package
    with Measure('Get processed pkgs from DB'):
        processed = set((p.name, p.version)
                        for p in P.select(P.name, P.version).distinct())
        print(f"DB contains {len(processed)} pkgs at this time")
    for bucket in LazyBucketDict.bucket_keys():
        with Measure("getting jobs"):
            jobs = get_jobs(pypi_fetcher_dir, bucket, processed, amount=1000)
            if not jobs:
                continue
        with Measure('batch'):
            if workers > 1:
                pool_results = utils.parallel(extract_requirements, (jobs, ),
                                              workers=workers,
                                              use_processes=False)
            else:
                pool_results = [extract_requirements(args) for args in jobs]
        results = []
        for i, res in enumerate(pool_results):
            if isinstance(res, Exception):
                print(f"Problem with {jobs[i].name}:{jobs[i].version}")
                if isinstance(res, sp.CalledProcessError):
                    print(res.stderr)
                traceback.print_exception(res, res, res.__traceback__)
            else:
                for r in res:
                    results.append(r)
        sleep(1)
        with db.atomic():
            with Measure('bulk insert'):
                Package.bulk_create([Package(**r) for r in results])
        if os.environ.get('CLEANUP', None):
            cleanup()
예제 #10
0
    async def parse_clan_raid_logs(self,
                                   clan_details,
                                   message_stream=sys.stdout):
        clan_name, clan_id, aliases = clan_details
        kol = self.bot.kol

        clan = Clan(kol, id=clan_id)
        await clan.join()

        try:
            current = await clan.get_raids()
        except ClanPermissionsError:
            message_stream.write(
                "Skipping {} due to lack of basement permissions".format(
                    clan_name))
            return

        try:
            previous = await clan.get_previous_raids()
        except:
            previous = []
            pass

        tasks = []
        created_raids = []
        updated_raids = []

        for data in tqdm(
                current + previous,
                desc="Discovering previous raid logs in {}".format(clan_name),
                file=message_stream,
                unit="raid logs",
                leave=False):
            raid = Raid.get_or_none(id=data.id)

            raids_list = updated_raids

            if raid is None:
                raid = Raid(id=data.id,
                            name=data.name,
                            clan_id=clan_id,
                            clan_name=clan_name)
                raids_list = created_raids

            if data.events is None and raid.end is None:
                raid.start = data.start
                raid.end = data.end
                tasks += [asyncio.ensure_future(clan.get_raid_log(data.id))]

            if raid.is_dirty():
                raids_list.append(raid)

        Raid.bulk_create(created_raids, batch_size=50)
        Raid.bulk_update(updated_raids,
                         fields=[Raid.start, Raid.end],
                         batch_size=50)

        raids_data = current + [
            await t for t in tqdm(
                asyncio.as_completed(tasks),
                desc="Loading previous raid logs in {}".format(clan_name),
                unit="raid logs",
                total=len(tasks),
                leave=False,
                file=message_stream,
                ascii=False)
        ]

        with tqdm(raids_data,
                  desc="Parsing raid logs in {}".format(clan_name),
                  unit="raids",
                  file=message_stream,
                  ascii=False) as p:
            for data in p:
                raid = Raid.get_or_none(id=data.id)

                if raid is None:
                    p.write("Something went wrong with raid {}".format(
                        data.id))
                    continue

                logs = []

                for category, events in data.events:
                    category = category.rstrip(":")
                    for event in events:
                        turns = int(event.data.pop("turns", 0))

                        event_data = json.dumps(event.data, sort_keys=True)

                        log = Log.get_or_none(Log.raid == raid,
                                              Log.category == category,
                                              Log.action == event.action,
                                              Log.username == event.username,
                                              Log.user_id == event.user_id,
                                              Log.data == event_data)

                        if log is None:
                            log = Log(
                                raid=raid,
                                category=category,
                                action=event.action,
                                username=event.username,
                                user_id=event.user_id,
                                turns=turns,
                                data=event_data,
                            )
                        elif log.turns != turns:
                            log.turns = turns
                            log.last_updated = time()

                        logs.append(log)

                with db.atomic():
                    Log.delete().where(Log.raid == raid).execute()
                    Log.bulk_create(logs, batch_size=50)
                    raid.summary = json.dumps(data.summary)
                    raid.save()