Example #1
0
 def init_tables(self):
     try:
         db.create_tables([Player, Match, Guild, GuildPlayer])
     except Exception as e:
         logging.exception(
             f"[DB] Couldn't create tables, it may already exist in db : {e})"
         )
Example #2
0
    def update_player_elo(self,
                          p1: Player,
                          p2: Player,
                          score_p1,
                          score_p2,
                          guild_id=None):
        k = ELO_CONSTANT
        try:
            # update general elo
            p1_elo = p1.elo
            p1.elo = p1.elo + k * (
                score_p1 - self.calculate_expected_score(p1_elo, p2.elo))
            p2.elo = p2.elo + k * (
                score_p2 - self.calculate_expected_score(p2.elo, p1_elo))
            # update guild elo
            if guild_id:
                p1g = self.get_guild_player_by_id(player_id=p1.id,
                                                  guild_id=guild_id)
                p2g = self.get_guild_player_by_id(player_id=p2.id,
                                                  guild_id=guild_id)
                p1g_elo = p1g.elo
                p1g.elo = p1g.elo + k * (
                    score_p1 - self.calculate_expected_score(p1g_elo, p2g.elo))
                p2g.elo = p2g.elo + k * (
                    score_p2 - self.calculate_expected_score(p2g.elo, p1g_elo))

                p1g.save(only=[GuildPlayer.elo])
                p2g.save(only=[GuildPlayer.elo])

            p1.save(only=[Player.elo])
            p2.save(only=[Player.elo])
            logging.info(f'[DB] Player elos:{p1.id}, {p2.id} has been updated')
        except Exception as e:
            logging.exception(
                f'[DB] Error while updating player elos:{p1.id}, {p2.id} {e}')
Example #3
0
def main():
    try:
        retry_count = 0
        image_count = 0
        while retry_count < 4 and image_count < 87:
            r = requests.get(url, headers=headers).json()['value']
            image_count = len(r)
            logging.info('%s items found.' % image_count)
            if image_count < 87:
                logging.warning('Image count < 87')
                retry_count += 1
                sleep(5)

    except JSONDecodeError as e:
        logging.exception("JSONDecodeError")
    else:
        path_split = r[0]['ImageLink'].split('?')[0].split('/')
        request_datetime = ('%s%s' % (path_split[3], path_split[4])).replace(
            '-', '')
        dest_dir = os.path.join(images_dir, request_datetime)

        if os.path.isdir(dest_dir):
            logging.info('Deleting %s' % dest_dir)
            shutil.rmtree(dest_dir)

        os.makedirs(dest_dir)

        meta_df = []

        logging.info('Downloading images to %s' % dest_dir)
        for item in r:
            dest_path = os.path.join(
                dest_dir, item['ImageLink'].split('?')[0].split('/')[-1])

            download_image(item['ImageLink'], dest_path)

            path_split = item['ImageLink'].split('?')[0].split('/')
            meta_data = [
                item['CameraID'], item['Latitude'], item['Longitude'],
                path_split[3], path_split[-1].split('_')[1],
                path_split[-1].split('.')[0],
                Image.open(dest_path).size
            ]

            meta_df.append(meta_data)

        metadata_dir = os.path.join(data_dir, 'traffic-images-metadata')

        if not os.path.isdir(metadata_dir):
            logging.info('Creating %s' % metadata_dir)
            os.makedirs(metadata_dir)
        metadata_path = os.path.join(metadata_dir,
                                     '%s_images.csv' % request_datetime)

        pd.DataFrame(meta_df,
                     columns=[
                         'CameraID', 'Latitude', 'Longitude', 'Date', 'Time',
                         'Filename', 'Dimensions'
                     ]).to_csv(metadata_path, index=False, header=False)
        logging.info('Saved metadata to %s' % metadata_path)
  def get_videolinks(self, episode_ids: Dict[int, str]) -> Dict[int, str]:
    # get video source html page
    source_info_path = NineAnime.EPISODE_INFO
    url_decoder = VideoURLDecoder()
    logging.debug('headers:\n%s', self.request.headers)
    videolinks = {}
    for current_ep, episode_hash in episode_ids.items():
      if not episode_hash:
        logging.info(f'Hash not found for episode {current_ep}! Skipping.')
        continue

      logging.debug("Episode %s data-id=%s", current_ep, episode_hash)
      # sensitive code
      content = self.request.get(source_info_path, {
          'id': episode_hash})
      try:
        source_html_url = url_decoder.get(json.loads(content)['url'])
        logging.info(f'Link for episode {current_ep}')
      except Exception:
        logging.exception(f'source_info_url response:\n{content}')
        return
      videolinks[current_ep] = source_html_url
      # to avoid being blocked by spamming
      duration = random.uniform(0.2, 1)
      time.sleep(duration)

    return videolinks
Example #5
0
def create_batch_archives_and_send_to_s3(s3_buckets):
    elasticsearch_docs = []
    with TemporaryDirectory() as temporary_directory:
        buckets_to_sync = set()

        # this step only creates the archives and deletes original files
        # the archive creation and s3 sync decouple is done so to avoid race conditions to S3_SYNC_BUCKETS_FOLDER
        for bucket in s3_buckets:
            bucket_directory = os.path.join(S3_BATCH_BUCKETS_FOLDER, bucket)
            for directory, subdirectories, files in os.walk(bucket_directory):
                if files:
                    logging.debug(directory)
                    logging.debug(files)
                    bucket_elasticsearch_docs = create_tar_archives(bucket, directory, files, temporary_directory)
                    if bucket_elasticsearch_docs:
                        elasticsearch_docs += bucket_elasticsearch_docs
                        buckets_to_sync.add(bucket)

        # this step syncs buckets to s3 using s3_client.upload_file for performance
        for bucket in buckets_to_sync:
            try:
                sync_bucket_folder_and_delete_files(bucket, temporary_directory)
            except Exception as e:
                logging.exception(e)
    return elasticsearch_docs
Example #6
0
 def get_guild_stats(self, guild_id):
     try:
         players = self.get_guild_players_by_id(guild_id)
         return [model_to_dict(p) for p in players]
     except Exception as e:
         logging.exception(
             f'[DB] Error while getting guild stats:{guild_id} {e}')
Example #7
0
 def add_guild(self, guild_id):
     try:
         with db.atomic():
             guild = Guild.create(id=guild_id)
             # logging.info(f"new guild -> : {g_id}")
             return guild
     except Exception as e:
         logging.exception(f'[DB] Error while adding guild : {e}')
Example #8
0
def log_error_and_upload_manifests_to_s3(error, elasticsearch_docs):
    logging.error("Exception caught while sending manifests to elasticsearch")
    logging.exception(error)
    logging.info("Uploading manifests to s3 fallback bucket")
    s3_client.put_object(Bucket=MANIFEST_FALLBACK_BUCKET,
                         Key=os.path.join(f"s3-batch/manifests/{datetime.utcnow().strftime('%Y-%m-%d')}.json.gz"),
                         Body=gzip.compress(json.dumps(elasticsearch_docs).encode("utf-8")),
                         ACL="private")
Example #9
0
    def update_players(self, match: Match):
        try:
            white_pl = self.get_player_by_id(match.white_player_id)
            black_pl = self.get_player_by_id(match.black_player_id)

            white_pl.matches += 1
            black_pl.matches += 1
            white_pl.last_match_id = match.id
            black_pl.last_match_id = match.id
            white_pl.last_match_date = match.match_date
            black_pl.last_match_date = match.match_date
            res = match.result_code
            w_score = int()
            b_score = int()
            if res == "1/2-1/2":
                w_score = 0.5
                b_score = 0.5
                white_pl.draws += 1
                black_pl.draws += 1
            elif res == "1-0":
                w_score = 1
                b_score = 0
                white_pl.wins += 1
                black_pl.loses += 1
            else:
                w_score = 0
                b_score = 1
                white_pl.loses += 1
                black_pl.wins += 1

            white_pl.save(only=[
                Player.matches, Player.last_match_id, Player.last_match_date,
                Player.wins, Player.loses, Player.draws
            ])
            black_pl.save(only=[
                Player.matches, Player.last_match_id, Player.last_match_date,
                Player.wins, Player.loses, Player.draws
            ])

            self.update_player_elo(white_pl,
                                   black_pl,
                                   score_p1=w_score,
                                   score_p2=b_score,
                                   guild_id=match.guild_id)
            logging.info(
                f'[DB] Players:{white_pl.id}, {black_pl.id} has been updated')

            return {
                "white_player": model_to_dict(white_pl),
                "black_player": model_to_dict(black_pl),
                "match": match
            }
        except Exception as e:
            logging.exception(
                f'[DB] Error while updating players:{white_pl.id}, {black_pl.id} {e}'
            )
Example #10
0
 def add_guild_player(self, guild_id, player_id):
     try:
         with db.atomic():
             guild_pl = GuildPlayer.create(guild_id=guild_id,
                                           player_id=player_id,
                                           elo=1500.0)
             # logging.info(f"new guild_player -> : {player_id}")
             return guild_pl
     except Exception as e:
         logging.exception(f'[DB] Error while adding guild_player : {e}')
Example #11
0
def sync_bucket_folder_and_delete_files(bucket, buckets_directory):
    directory_to_sync = os.path.join(buckets_directory, bucket)
    for dir, subdirs, files in os.walk(directory_to_sync):
        for file in files:
            filename = os.path.join(dir, file)
            key = os.path.relpath(filename, directory_to_sync)
            try:
                s3_client.upload_file(filename, bucket, key)
                os.remove(filename)
            except Exception as e:
                logging.exception(e)
Example #12
0
 def start(self):
     self.is_scraping = True
     logging.info("[Worker] Starting worker")
     while self.is_scraping:
         work_start_time = time.time()
         try:
             self.reddit_scraper.scrape()
             self.database.update_tables()
         except Exception as e:
             logging.exception(f"[Worker] Exception while scraping : {e}")
             pass
         logging.info(f"[Worker] Work done in {'{0:.3f}'.format(time.time()-work_start_time)}s")
         time.sleep(self.scrape_interval)
Example #13
0
def get_opened_files(log_errors=True):
    for pid in psutil.pids():
        try:
            yield (file[0] for file in psutil.Process(pid).open_files())
        except psutil.AccessDenied as e:
            if log_errors:
                logging.error("Access denied while getting process opened files")
                logging.exception(e)
        except psutil.NoSuchProcess:
            logging.debug("Process no longer exists")
        except Exception as e:
            if log_errors:
                logging.exception(e)
Example #14
0
 def get_player_stats(self, player_id, guild_id=None):
     try:
         pl = self.get_player_by_id(player_id)
         stats = dict()
         stats.update(player=model_to_dict(pl))
         if guild_id:
             pl_guild = self.get_guild_player_by_id(player_id=player_id,
                                                    guild_id=guild_id)
             stats.update(guild_player=model_to_dict(pl_guild))
         return stats
     except Exception as e:
         logging.exception(
             f'[DB] Error while getting player stats:{player_id} {e}')
Example #15
0
 def add_player(self, player_id, player_nick):
     try:
         with db.atomic():
             player = Player.create(id=player_id,
                                    nickname=player_nick,
                                    matches=0,
                                    wins=0,
                                    loses=0,
                                    draws=0,
                                    last_match_id="",
                                    last_match_date=time.time(),
                                    elo=1500.0)
             # logging.info(f"new player -> : {player_id}")
             return player
     except Exception as e:
         logging.exception(f'[DB] Error while adding player : {e}')
Example #16
0
 def add_match(self,
               match_id,
               guild_id,
               white_id=-1,
               black_id=-1,
               match_ts=time.time(),
               result="unfinished"):
     try:
         with db.atomic():
             match = Match.create(id=match_id,
                                  guild_id=guild_id,
                                  white_player_id=white_id,
                                  black_player_id=black_id,
                                  match_date=match_ts,
                                  result=result,
                                  result_code="?")
             # logging.info(f"new match -> : {match_id}")
             return match
     except Exception as e:
         logging.exception(f'[DB] Error while adding match : {e}')
Example #17
0
 def update_match(self,
                  match_id,
                  result,
                  result_code=None,
                  white_id=None,
                  black_id=None):
     try:
         m = self.get_match_by_id(match_id)
         m.result = result
         if not result_code == None:
             m.result_code = result_code
         if not white_id == None and not black_id == None:
             m.white_player_id = white_id
             m.black_player_id = black_id
         m.save(only=[
             Match.result, Match.white_player_id, Match.black_player_id,
             Match.result_code
         ])
         logging.info(f'[DB] Match:{match_id} has been updated')
     except Exception as e:
         logging.exception(
             f'[DB] Error while updating match:{match_id} {e}')
Example #18
0
    def update_match_end(self, match_id, data):
        try:
            m = self.get_match_by_id(match_id)
            if m.result == "unfinished":
                status = data["status"]
                result_code = self.get_result_code(match_data=data)
                self.update_match(match_id=match_id,
                                  result=status,
                                  result_code=result_code)

                # if we know player ids update their stats
                if not m.white_player_id == -1 and not m.black_player_id == -1:
                    # send updated match obj
                    m = self.get_match_by_id(match_id)
                    updated_data = self.update_players(match=m)
                    return updated_data
            else:
                # todo : throw exception or smth instead of logging shit
                pass
        except Exception as e:
            logging.exception(
                f'[DB] Error while updating match(end):{match_id} {e}')
Example #19
0
def catch_exception(session, e):
    logging.error("DbConnectorRetrying error. Catch exception with traceback")
    logging.exception(e)
    session.rollback()
Example #20
0
 def connect(self):
     try:
         db.connect()
         self.connected = True
     except Exception as e:
         logging.exception(f"[DB] Couldn't connect to db : {e}")