continue if data: if data['characters']: for character in data['characters']: for pgcr in character['matches']: match = { 'membershipId': membership_id, 'membershipType': platform, 'characterId': character['characterId'], 'mode': mode, 'match': pgcr } try: q_matches.put(json.dumps(match)) except Exception as e: logger.warning( f'{membership_id}:{platform}:{mode} Failed to connect to Redis while executing PUT command. Reconnecting. Reason: {e}' ) q_matches = redis_queue.get_redis_queue( queue_matches) continue if __name__ == "__main__": logger = log.get_logger(__name__) app = create_app() app.app_context().push() d2 = DestinyAPI() main()
import falcon from app.utils import log from app.api.base import BaseOptions from app.utils.auth import hash_password, verify_password from uuid import uuid4 from app.models import User, UserSchema, LoginSchema from app.utils.admin import get_roles from app.config.config import SECRET_KEY import json import jwt LOG = log.get_logger() class Collection(BaseOptions): """ Handles /v1/users List users; Add new user. """ def on_get(self, req, res): session = req.context['session'] users = session.query(User).all() schema = UserSchema() out = [] for u in users: out.append(schema.dump(u).data) res.status = falcon.HTTP_200 res.body = self.to_json(out) def on_post(self, req, res):
# coding: utf-8 import uuid from datetime import datetime import requests from scrapy import Selector import re import base64 from app.model.meituan import UserInfoDoc from app.spider.eleme.base import ResponseObj from .base import encrypt, get_token from app.utils.proxies import get_proxies from app.utils.log import get_logger logger = get_logger(__name__) class MeiTuanSpider: def __init__(self, phone=None, request_id=None, *args, **kwargs): self.session = requests.session() self.headers = { "User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36" } self.requestId = request_id or str(uuid.uuid4()) self.phone = phone self.hash = "" self.validate_token = "" self.user_id = "" self.model = UserInfoDoc()
from fastapi.responses import ORJSONResponse from typing import List from pydantic.error_wrappers import ValidationError from app.server.database import boba_collection, user_collection from app.utils.log import get_logger from datetime import datetime from bson.objectid import ObjectId from app.server.schemas.user import RegisterInput from app.server.utils.validate import validate_register from argon2 import PasswordHasher from argon2.exceptions import VerifyMismatchError from humps import camelize log = get_logger() type_defs = gql( """ type Boba { _id: String! drinkName: String! iceLevel: String! sugarLevel: String! createdAt: DateTime! updatedAt: DateTime! } type User { _id: String! firstName: String!
import json import uuid from celery import Celery from app.event_csv.utils import * from app.ftp import FTPClient from app.utils.index import build_index, in_index from app.utils.log import get_logger, fatal from models.models import VehicleEventSummary, CounterEvent, DoorEvent, database logging = get_logger(__name__) celery_app = Celery('fetcherWorker', backend='rpc://', broker='pyamqp://guest@localhost//') @celery_app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task(43200, store_data_from_ftp.s()) @celery_app.task def store_data_from_ftp(): qs = VehicleEventSummary.select(VehicleEventSummary.filename, VehicleEventSummary.vehicle).distinct().dicts() file_index = build_index(qs, 'filename', 'vehicle') with FTPClient() as ftp: dirs = ftp.ls_by_pattern('logging', pattern=r"([12]\d{3}_(0[1-9]|1[0-2])_(0[1-9]|[12]\d|3[01]))") for d in dirs: logging.info('{}'.format(d)) vehicles = ftp.ls_by_pattern('logging', d, pattern=r"\d{4,}", ) for v in vehicles: logging.info('- {}'.format(v)) reports = ftp.ls_by_pattern('logging', d, v, 'count', pattern=r"[\w\s]*\.csv")