Exemple #1
0
def get_wrapper(target: str) -> Tuple[BaseWrapper, str]:
    if target == "segmentation":
        wrapper = cache.get("segmentation_wrapper")
        if wrapper is None:
            wrapper = TorchvisionSegmentationWrapper()
            wrapper.load()
            cache.set("segmentation_wrapper", wrapper)
        target_name = "mask"
    elif target == "detection":
        wrapper = cache.get("detection_wrapper")
        if wrapper is None:
            wrapper = YOLOWrapper()
            wrapper.load()
            cache.set("detection_wrapper", wrapper)
        target_name = "boxes"
    elif target == "keypoints":
        wrapper = cache.get("keypoints_wrapper")
        if wrapper is None:
            wrapper = TorchvisionKeypointsWrapper()
            wrapper.load()
            cache.set("keypoints_wrapper", wrapper)
        target_name = "keypoints"
    else:
        wrapper = cache.get("helmet_det")
        if wrapper is None:
            wrapper = HelmetnetWrapper()
            wrapper.load()
            cache.set("helmet_det", wrapper)
        target_name = "boxes"
    return wrapper, target_name
    def check_api_key(*args, **kwargs):
        FORBIDDEN = requests.codes.forbidden

        # enforce admin permission
        try:
            return jwt.has_one_of_roles([ADMIN])(func)(*args, **kwargs)
        except Exception as e:
            current_app.logger.error(str(e))

        # TODO this checks for debug purpose, will be reworked in final pr
        otl_guid = request.headers.get(ONE_TIME_LINK)
        otp_guid = request.headers.get(ONE_TIME_PASSWORD)
        if not otl_guid and not otp_guid:
            current_app.logger.info("OTL and OTP is empty")
            abort(FORBIDDEN)
        elif otl_guid and otl_guid == cache.get(otl_guid):
            current_app.logger.info("OTL IS PRESENT NEED TO GENERATE OTP")
            abort(FORBIDDEN)
        elif not otl_guid and otp_guid:
            current_app.logger.info("OTL is not present but OTP is")
            otp_app_guid = cache.get(otp_guid)
            header_app_guid = request.headers.get("app_guid")
            if otp_app_guid and otp_app_guid == header_app_guid:
                current_app.logger.info("OTP is correct")
                return func(*args, **kwargs)
            else:
                current_app.logger.info("OTP is linked to a different application")
                abort(FORBIDDEN)
        else:
            current_app.logger.info("OTP is expired")
            abort(FORBIDDEN)
Exemple #3
0
    def patch(self, document_guid=None):
        if document_guid is None:
            return self.create_error_payload(
                400, 'Must specify document GUID in PATCH'), 400

        file_path = cache.get(FILE_UPLOAD_PATH(document_guid))
        if file_path is None or not os.path.lexists(file_path):
            return self.create_error_payload(
                404, 'PATCH sent for a upload that does not exist'), 404

        request_offset = int(request.headers.get('Upload-Offset', 0))
        file_offset = cache.get(FILE_UPLOAD_OFFSET(document_guid))
        if request_offset != file_offset:
            return self.create_error_payload(
                409,
                "Offset in request does not match uploaded file's offest"), 409

        chunk_size = request.headers.get('Content-Length')
        if chunk_size is None:
            return self.create_error_payload(
                400, 'No Content-Length header in request'), 400
        chunk_size = int(chunk_size)

        new_offset = file_offset + chunk_size
        file_size = cache.get(FILE_UPLOAD_SIZE(document_guid))
        if new_offset > file_size:
            return self.create_error_payload(
                413,
                'The uploaded chunk would put the file above its declared file size.'
            ), 413

        try:
            with open(file_path, "r+b") as f:
                f.seek(file_offset)
                f.write(request.data)
        except IOError as e:
            return self.create_error_payload(500,
                                             'Unable to write to file'), 500

        if new_offset == file_size:
            # File transfer complete.
            doc = DocumentManager.find_by_document_manager_guid(document_guid)
            doc.upload_completed_date = datetime.now()
            doc.save()

            cache.delete(FILE_UPLOAD_SIZE(document_guid))
            cache.delete(FILE_UPLOAD_OFFSET(document_guid))
            cache.delete(FILE_UPLOAD_PATH(document_guid))
        else:
            # File upload still in progress
            cache.set(FILE_UPLOAD_OFFSET(document_guid), new_offset,
                      TIMEOUT_24_HOURS)

        response = make_response("", 204)
        response.headers['Tus-Resumable'] = TUS_API_VERSION
        response.headers['Tus-Version'] = TUS_API_SUPPORTED_VERSIONS
        response.headers['Upload-Offset'] = new_offset
        response.headers[
            'Access-Control-Expose-Headers'] = "Tus-Resumable,Tus-Version,Upload-Offset"
        return response
def get_currency_rates(config):
    if cache.get("currency_rates"):
        return cache.get("currency_rates")
    else:
        rates = fetch_currency_rates(config)
        cache.set("currency_rates", rates)
        return rates
Exemple #5
0
def get_zone_estimator():

    aruco_params = cache.get("aruco_params")
    if aruco_params is None:
        with open("robot_work_zone_estimation/aruco_config.json") as conf_file:
            aruco_params = json.load(conf_file)
        cache.set("aruco_params", aruco_params)

    marker_id = aruco_params["marker_idx"]
    wz_cx = aruco_params["wz_cx"]
    wz_cy = aruco_params["wz_cy"]
    wz_height = aruco_params["wz_height"]
    wz_width = aruco_params["wz_width"]
    marker_world_size = aruco_params["marker_world_size"]
    marker_size = aruco_params["marker_size"]
    camera_params = aruco_params["camera_params"]
    camera_params = CameraParams(
        np.array(camera_params['camera_mtx'], dtype=np.float),
        np.array(camera_params['distortion_vec'], dtype=np.float),
        np.array(camera_params['rotation_vec'], dtype=np.float),
        np.array(camera_params['translation_vec'], dtype=np.float))

    zone = Workzone(wz_cx, wz_cy, wz_height, wz_width)
    estimator = cache.get("zone_estimator")
    if estimator is None:
        estimator = ArucoZoneEstimator(marker_world_size,
                                       ARUCO_MARKER_SIZE[marker_size],
                                       marker_id, camera_params, zone)
        cache.set("zone_estimator", estimator)
    return estimator
Exemple #6
0
    def getOGCdataframe(cls, cache_key, csv_url, process):
        serializer = pa.default_serialization_context()
        data = cache.get(cache_key)

        app = current_app._get_current_object()

        #if empty dataset refresh data synchronously, otherwise refresh in the background and continue
        if not data:
            df = refreshOGCdata(app, cache_key, csv_url, process)
        else:
            thread = Thread(target=refreshOGCdata,
                            args=(
                                app,
                                cache_key,
                                csv_url,
                                process,
                            ))
            thread.daemon = True
            thread.start()

        #update data and return
        data = cache.get(cache_key)
        if data:
            df = serializer.deserialize(data)

        return df
Exemple #7
0
    def get(self):
        # Below caches the mine map response object in redis with a timeout.
        # Generating and jsonifying the map data takes 4-7 seconds with 50,000 points,
        # so caching seems justified.
        #
        # TODO: Use some custom representation of this data vs JSON. The
        # json string is massive (with 50,000 points: 16mb uncompressed, 2.5mb compressed).
        # A quick test using delimented data brings this down to ~1mb compressed.
        map_result = cache.get(MINE_MAP_CACHE)
        last_modified = cache.get(MINE_MAP_CACHE + '_LAST_MODIFIED')
        if not map_result:
            map_result = MineMapResource.rebuild_and_return_map_cache()

        # It's more efficient to store the json to avoid re-initializing all of the objects
        # and jsonifying on every request, so a flask response is returned to prevent
        # flask_restplus from jsonifying the data again, which would mangle the json.
        response = make_response(map_result)
        response.headers['content-type'] = 'application/json'

        # While we're at it, let's set a last modified date and have flask return not modified
        # if it hasn't so the client doesn't download it again unless needed.
        response.last_modified = last_modified
        response.make_conditional(request)

        return response
Exemple #8
0
    def get(self, mine_no_or_guid=None):
        if mine_no_or_guid:
            mine = Mine.find_by_mine_no_or_guid(mine_no_or_guid)
            if mine:
                return mine.json()
            return self.create_error_payload(404, 'Mine not found'), 404
        else:
            # Handle MapView request
            _map = request.args.get('map', None, type=str)
            if _map and _map.lower() == 'true':

                # Below caches the mine map response object in redis with a timeout.
                # Generating and jsonifying the map data takes 4-7 seconds with 50,000 points,
                # so caching seems justified.
                #
                # TODO: Use some custom representation of this data vs JSON. The
                # json string is massive (with 50,000 points: 16mb uncompressed, 2.5mb compressed).
                # A quick test using delimented data brings this down to ~1mb compressed.
                map_result = cache.get(MINE_MAP_CACHE)
                last_modified = cache.get(MINE_MAP_CACHE + '_LAST_MODIFIED')
                if not map_result:
                    records = MineMapViewLocation.query.filter(MineMapViewLocation.latitude != None)
                    last_modified = datetime.now()

                    # jsonify then store in cache
                    map_result = json.dumps(
                        {
                            'mines': list((map(lambda x: x.json_for_map(), records)))
                        },
                        separators=(',', ':'))
                    cache.set(MINE_MAP_CACHE, map_result, timeout=TIMEOUT_12_HOURS)
                    cache.set(
                        MINE_MAP_CACHE + '_LAST_MODIFIED', last_modified, timeout=TIMEOUT_12_HOURS)

                # It's more efficient to store the json to avoid re-initializing all of the objects
                # and jsonifying on every request, so a flask response is returned to prevent
                # flask_restplus from jsonifying the data again, which would mangle the json.
                response = make_response(map_result)
                response.headers['content-type'] = 'application/json'

                # While we're at it, let's set a last modified date and have flask return not modified
                # if it hasn't so the client doesn't download it again unless needed.
                response.last_modified = last_modified
                response.make_conditional(request)

                return response

            paginated_mine_query, pagination_details = self.apply_filter_and_search(request.args)
            mines = paginated_mine_query.all()
            return {
                'mines': list(map(lambda x: x.json_for_list(), mines)),
                'current_page': pagination_details.page_number,
                'total_pages': pagination_details.num_pages,
                'items_per_page': pagination_details.page_size,
                'total': pagination_details.total_results,
            }
Exemple #9
0
def recent_terms():
    """
    Returns most recent submissions table; caches query results for half a minute.
    """
    submissions_key = 'recent-submissions'
    if not cache.get(submissions_key):
        wiki_records = get_all_records()
        data = format_record(wiki_records['Items'])
        cache.set(submissions_key, data, timeout=30)
    else:
        data = cache.get(submissions_key)

    return jsonify(data), 200
Exemple #10
0
    def patch(self, document_guid):
        file_path = cache.get(FILE_UPLOAD_PATH(document_guid))
        if file_path is None or not os.path.lexists(file_path):
            raise NotFound('PATCH sent for a upload that does not exist')

        request_offset = int(request.headers.get('Upload-Offset', 0))
        file_offset = cache.get(FILE_UPLOAD_OFFSET(document_guid))
        if request_offset != file_offset:
            raise Conflict(
                "Offset in request does not match uploaded file's offset")

        chunk_size = request.headers.get('Content-Length')
        if chunk_size is None:
            raise BadRequest('No Content-Length header in request')
        chunk_size = int(chunk_size)

        new_offset = file_offset + chunk_size
        file_size = cache.get(FILE_UPLOAD_SIZE(document_guid))
        if new_offset > file_size:
            raise RequestEntityTooLarge(
                'The uploaded chunk would put the file above its declared file size.'
            )

        try:
            with open(file_path, "r+b") as f:
                f.seek(file_offset)
                f.write(request.data)
        except IOError as e:
            raise InternalServerError('Unable to write to file')

        if new_offset == file_size:
            # File transfer complete.
            doc = Document.find_by_document_guid(document_guid)
            doc.upload_completed_date = datetime.utcnow()
            doc.save()

            cache.delete(FILE_UPLOAD_SIZE(document_guid))
            cache.delete(FILE_UPLOAD_OFFSET(document_guid))
            cache.delete(FILE_UPLOAD_PATH(document_guid))
        else:
            # File upload still in progress
            cache.set(FILE_UPLOAD_OFFSET(document_guid), new_offset,
                      TIMEOUT_24_HOURS)

        response = make_response('', 204)
        response.headers['Tus-Resumable'] = TUS_API_VERSION
        response.headers['Tus-Version'] = TUS_API_SUPPORTED_VERSIONS
        response.headers['Upload-Offset'] = new_offset
        response.headers[
            'Access-Control-Expose-Headers'] = "Tus-Resumable,Tus-Version,Upload-Offset"
        return response
Exemple #11
0
def inbox():
    if request.method == 'GET':
        if current_user.subscription or current_user.trial:
            if current_user.active_mailbox:
                if cache.get(current_user.mailbox_id):
                    emails = cache.get(current_user.mailbox_id)
                    return render_template('user/inbox.html',
                                           emails=emails,
                                           mailbox_id=current_user.mailbox_id,
                                           route="inbox")
                else:
                    return redirect(url_for('user.refresh'))
            else:
                flash('You don\'t have an inbox yet. Please create one below.',
                      'error')
        return redirect(url_for('user.settings'))
Exemple #12
0
    def get(self):
        if dict(request.args) == ALL_INSPECTORS_QUERY_PARAMS:
            result = cache.get(GET_ALL_INSPECTORS_KEY)
            if result:
                current_app.logger.debug(
                    f'CACHE HIT - {GET_ALL_INSPECTORS_KEY}')
                return result
            else:
                current_app.logger.debug(
                    f'CACHE MISS - {GET_ALL_INSPECTORS_KEY}')

        paginated_parties, pagination_details = self.apply_filter_and_search(
            request.args)
        if not paginated_parties:
            raise BadRequest('Unable to fetch parties')

        result = marshal(
            {
                'records': paginated_parties.all(),
                'current_page': pagination_details.page_number,
                'total_pages': pagination_details.num_pages,
                'items_per_page': pagination_details.page_size,
                'total': pagination_details.total_results,
            }, PAGINATED_PARTY_LIST)

        if dict(
                request.args
        ) == ALL_INSPECTORS_QUERY_PARAMS and pagination_details.total_results > 0:
            current_app.logger.debug(f'SET CACHE - {GET_ALL_INSPECTORS_KEY}')
            cache.set(GET_ALL_INSPECTORS_KEY, result, timeout=TIMEOUT_12_HOURS)
        return result
Exemple #13
0
    def get(self):
        token_guid = request.args.get('token', '')
        as_attachment = request.args.get('as_attachment', None)
        document_guid = cache.get(DOWNLOAD_TOKEN(token_guid))
        cache.delete(DOWNLOAD_TOKEN(token_guid))

        if not document_guid:
            raise BadRequest('Valid token required for download')

        document = Document.query.filter_by(
            document_guid=document_guid).first()
        if not document:
            raise NotFound(
                'Could not find the document corresponding to the token')
        if as_attachment is not None:
            as_attachment = True if as_attachment == 'true' else False
        else:
            as_attachment = '.pdf' not in document.file_display_name.lower()

        if document.object_store_path:
            return ObjectStoreStorageService().download_file(
                path=document.object_store_path,
                display_name=document.file_display_name,
                as_attachment=as_attachment)
        else:
            return send_file(filename_or_fp=document.full_storage_path,
                             attachment_filename=document.file_display_name,
                             as_attachment=as_attachment)
    def download(file_url):
        _nros_token = cache.get(NROS_TOKEN)
        if _nros_token is None:
            _nros_client_id = current_app.config['NROS_CLIENT_ID']
            _nros_client_secret = current_app.config['NROS_CLIENT_SECRET']
            _nros_token_url = current_app.config['NROS_TOKEN_URL']

            _nros_auth = HTTPBasicAuth(_nros_client_id, _nros_client_secret)
            _nros_resp = requests.get(_nros_token_url, auth=_nros_auth)
            _nros_resp_body = json.loads(_nros_resp.text)
            _nros_token = _nros_resp_body["access_token"]
            cache.set(NROS_TOKEN, _nros_token, timeout=TIMEOUT_60_MINUTES)

        file_info_req = requests.get(
            file_url,
            stream=True,
            headers={"Authorization": f"Bearer {_nros_token}"})
        file_info_body = json.loads(file_info_req.text)

        file_download_req = requests.get(
            f'{file_url}/content',
            stream=True,
            headers={"Authorization": f"Bearer {_nros_token}"})

        file_download_resp = Response(
            stream_with_context(
                file_download_req.iter_content(chunk_size=2048)))

        file_download_resp.headers['Content-Type'] = file_download_req.headers[
            'Content-Type']
        file_download_resp.headers[
            'Content-Disposition'] = f'attachment; filename="{quote(file_info_body["filename"])}"'
        return file_download_resp
def _get_NRIS_token():
    result = cache.get(NRIS_REMOTE_TOKEN)

    if result is None:
        
        _change_default_cipher()

        params = {
            'disableDeveloperFilter': 'true',
            'grant_type': 'client_credentials',
            'scope': 'NRISWS.*'
        }
        url = current_app.config['NRIS_REMOTE_TOKEN_URL']
        if url is None:
            raise TypeError('Could not load the NRIS URL.')
        else:
            resp = requests.get(
                url=url,
                params=params,
                auth=(current_app.config['NRIS_REMOTE_CLIENT_ID'], current_app.config['NRIS_REMOTE_CLIENT_SECRET']))
            try:
                resp.raise_for_status()
            except:
                raise

            result = resp.json().get('access_token')
            cache.set(NRIS_REMOTE_TOKEN, result, timeout=TIMEOUT_60_MINUTES)

    return result
Exemple #16
0
    def head(self, document_guid):
        if document_guid is None:
            raise BadRequest('Must specify document GUID in HEAD')

        file_path = cache.get(FILE_UPLOAD_PATH(document_guid))
        if file_path is None or not os.path.lexists(file_path):
            raise NotFound('File does not exist')

        response = make_response("", 200)
        response.headers['Tus-Resumable'] = TUS_API_VERSION
        response.headers['Tus-Version'] = TUS_API_SUPPORTED_VERSIONS
        response.headers['Upload-Offset'] = cache.get(FILE_UPLOAD_OFFSET(document_guid))
        response.headers['Upload-Length'] = cache.get(FILE_UPLOAD_SIZE(document_guid))
        response.headers['Cache-Control'] = 'no-store'
        response.headers['Access-Control-Expose-Headers'] = 'Tus-Resumable,Tus-Version,Upload-Offset,Upload-Length,Cache-Control'
        return response
Exemple #17
0
    def test_generate_document_returns_token(self, test_client, db_session,
                                             auth_headers):
        """Should return the a token for successful generation"""
        now_application = NOWApplicationFactory()
        now_application_identity = NOWApplicationIdentityFactory(
            now_application=now_application)

        changed_mine_no = str(now_application_identity.mine.mine_no + '1')
        data = {
            'now_application_guid':
            now_application_identity.now_application_guid,
            'template_data': {
                'mine_no': changed_mine_no
            }
        }

        post_resp = test_client.post(
            f'/now-applications/application-document-types/RJL/generate',
            json=data,
            headers=auth_headers['full_auth_header'])

        assert post_resp.status_code == 200
        post_data = json.loads(post_resp.data.decode())
        token_data = cache.get(NOW_DOCUMENT_DOWNLOAD_TOKEN(post_data['token']))
        assert token_data is not None
        assert token_data['template_data']['mine_no'] != changed_mine_no
        assert post_data['token']
Exemple #18
0
    def get(self, mine_no):
        result = cache.get(NRIS_COMPLIANCE_DATA(mine_no))
        if result is None:
            mine = Mine.find_by_mine_no_or_guid(mine_no)
            if not mine:
                raise NotFound("No mine record in CORE.")

            try:
                raw_data = NRIS_API_service._get_NRIS_data_by_mine(
                    request.headers.get('Authorization'), mine_no)
            except requests.exceptions.Timeout:
                current_app.logger.error(
                    f'NRIS_API Connection Timeout <mine_no={mine_no}>')
                raise
            except requests.exceptions.HTTPError as e:
                current_app.logger.error(
                    f'NRIS_API Connection HTTPError <mine_no={mine_no}>, {str(e)}'
                )
                raise

            result = NRIS_API_service._process_NRIS_data(raw_data)
            if len(result['orders']) > 0:
                cache.set(NRIS_COMPLIANCE_DATA(mine_no),
                          result,
                          timeout=TIMEOUT_60_MINUTES)
        return result
Exemple #19
0
    def put(self):
        otp_guid = None
        issued_time_utc = None
        timeout = AuthorizationResource.ONE_TIME_PASSWORD_TIMEOUT_SECONDS

        data = AuthorizationResource.parser.parse_args()
        otl_guid = data.get('otl_guid')
        app_guid = cache.get(otl_guid)

        current_app.logger.info(f'this is app_guid: {app_guid}')

        if otl_guid and app_guid:
            cache.delete(otl_guid)
            current_app.logger.info(f"OTL_GUID_VALUE: {cache.get(otl_guid)}")
            otp_guid = uuid.uuid4()
            issued_time_utc = datetime.now(timezone.utc)
            cache.set(str(otp_guid), app_guid, timeout=timeout)
        else:
            abort(401)

        return jsonify({
            "OTP":
            otp_guid,
            "issued_time_utc":
            issued_time_utc.strftime("%d %b %Y %H:%M:%S %z"),
            "timeout_seconds":
            AuthorizationResource.ONE_TIME_PASSWORD_TIMEOUT_SECONDS,
            "application_guid":
            app_guid
        })
Exemple #20
0
def get_face_detection_drawer() -> DrawFaceDetection:
    config = {"model_type": "cnn", "number_of_times_to_upsample": 0}
    drawer = cache.get("face_det_drawer")
    if drawer is None:
        drawer = DrawFaceDetection(FaceRecognitionLibWrapper(config))
        cache.set("face_det_drawer", drawer)
    return drawer
Exemple #21
0
def get_segmentation_drawer() -> DrawSegmentation:
    drawer = cache.get("segmentation_drawer")
    if drawer is None:
        drawer = DrawSegmentation(TorchvisionSegmentationWrapper())
        cache.set("segmentation_drawer",
                  DrawSegmentation(TorchvisionSegmentationWrapper()))
    return drawer
Exemple #22
0
    def get(self):
        token_guid = request.args.get('token', '')
        attachment = request.args.get('as_attachment', None)
        token_data = cache.get(DOWNLOAD_TOKEN(token_guid))
        cache.delete(DOWNLOAD_TOKEN(token_guid))
        current_app.logger.debug('redis_data' + str(token_data))

        if not token_data:
            raise BadRequest('Valid token required for download')

        document_guid = token_data['document_guid']
        payment_doc = PaymentDocument.query.filter_by(
            document_guid=document_guid).one_or_none()
        if not payment_doc:
            raise NotFound(
                'No document was found with the corresponding download token')
        if attachment is not None:
            attach_style = True if attachment == 'true' else False
        else:
            attach_style = '.pdf' not in payment_doc.document_name.lower()

        file_resp = ObjectStoreStorageService().download_file(
            path=payment_doc.object_store_path,
            display_name=quote(payment_doc.document_name),
            as_attachment=attach_style)

        return file_resp
Exemple #23
0
def get_workzone_drawer() -> DrawArucoZone:
    config_path = "../robot_work_zone_estimation/aruco_config.json"
    drawer = cache.get("workzone_drawer")
    if drawer is None:
        drawer = DrawArucoZone(config_path)
        cache.set("workzone_drawer", drawer)
    return drawer
Exemple #24
0
def _get_NRIS_token():
    result = cache.get(NRIS_CACHE_PREFIX + 'token')

    if result is None:
        params = {
            'disableDeveloperFilter': 'true',
            'grant_type': 'client_credentials',
            'scope': 'NRISWS.*'
        }
        url = current_app.config['NRIS_TOKEN_URL']
        if url is None:
            raise TypeError('Could not load the NRIS URL.')
        else:
            resp = requests.get(url=url,
                                params=params,
                                auth=(current_app.config['NRIS_USER_NAME'],
                                      current_app.config['NRIS_PASS']))
            try:
                resp.raise_for_status()
            except:
                raise

            result = resp.json().get('access_token')
            cache.set(NRIS_CACHE_PREFIX + 'token',
                      result,
                      timeout=TIMEOUT_12_HOURS)

    return result
Exemple #25
0
    def stats(cls):
        active_users = cache.get('active_users')
        if not active_users:
            active_users = cls.query.filter_by(active=True).count()
            cache.set('active_users', active_users)

        inactive_users = cache.get('inactive_users')
        if not inactive_users:
            inactive_users = cls.query.filter_by(active=False).count()
            cache.set('inactive_users', inactive_users)

        return {
            'all': active_users + inactive_users,
            'active': active_users,
            'inactive': inactive_users
        }
Exemple #26
0
    def stats(cls):
        active_users = cache.get('active_users')
        if not active_users:
            active_users = cls.query.filter_by(active=True).count()
            cache.set('active_users', active_users)

        inactive_users = cache.get('inactive_users')
        if not inactive_users:
            inactive_users = cls.query.filter_by(active=False).count()
            cache.set('inactive_users', inactive_users)

        return {
            'all': active_users + inactive_users,
            'active': active_users,
            'inactive': inactive_users
        }
    def get(self, inspection_id, attachment_id):
        token_guid = request.args.get('token', '')
        document_info = cache.get(DOWNLOAD_TOKEN(token_guid))
        cache.delete(DOWNLOAD_TOKEN(token_guid))
        if not document_info:
            raise BadRequest('Valid token requred for download')

        return NRISDownloadService.download(document_info["documenturl"], document_info["filename"])
Exemple #28
0
 def wrapper(*args, **kwargs):
     key = 'evil_' + str(current_user._id)
     rv = cache.get(key)
     if not rv:
         # 15秒
         cache.set(key, object(), timeout=15)
     else:
         abort(403)
     return f(*args, **kwargs)
Exemple #29
0
    def get(self):

        # Ensure that the token is valid
        token = request.args.get('token', '')
        token_data = cache.get(NOW_DOCUMENT_DOWNLOAD_TOKEN(token))
        cache.delete(NOW_DOCUMENT_DOWNLOAD_TOKEN(token))
        if not token_data:
            raise BadRequest('Valid token required for download')

        # Get the template associated with the token
        doc_type = NOWApplicationDocumentType.query.unbound_unsafe().get(
            token_data['document_type_code'])
        template_path = os.path.join(current_app.root_path,
                                     doc_type.document_template.template_file_path)

        # Generate the document using the template and template data
        docgen_resp = DocumentGeneratorService.generate_document_and_stream_response(
            template_path, data=token_data['template_data'])

        # Push the document to the Document Manager
        filename = docgen_resp.headers['X-Report-Name']
        now_application_guid = token_data['now_application_guid']
        now_application_identity = NOWApplicationIdentity.query.unbound_unsafe().get(
            now_application_guid)
        document_manager_guid = DocumentManagerService.pushFileToDocumentManager(
            file_content=docgen_resp.content,
            filename=filename,
            mine=now_application_identity.mine,
            document_category='noticeofwork',
            authorization_header=token_data['authorization_header'])

        # Add the document to the Notice of Work's documents
        username = token_data['username']
        new_mine_doc = MineDocument(
            mine_guid=now_application_identity.now_application.mine_guid,
            document_manager_guid=document_manager_guid,
            document_name=filename,
            create_user=username,
            update_user=username)
        now_doc = NOWApplicationDocumentXref(
            mine_document=new_mine_doc,
            now_application_document_type=doc_type,
            now_application_id=now_application_identity.now_application_id,
            create_user=username,
            update_user=username)
        now_application_identity.now_application.documents.append(now_doc)
        now_application_identity.save()

        # Return the generated document
        file_gen_resp = Response(
            stream_with_context(docgen_resp.iter_content(chunk_size=2048)),
            headers=dict(docgen_resp.headers))
        return file_gen_resp
Exemple #30
0
def login():

    form = LoginForm(next=request.args.get('next'))

    if form.validate_on_submit():
        u = User.find_by_identity(request.form.get('identity'))

        if u and u.authenticated(password=request.form.get('password')):
            # As you can see remember me is always enabled, this was a design
            # decision I made because more often than not users want this
            # enabled. This allows for a less complicated login form.
            #
            # If however you want them to be able to select whether or not they
            # should remain logged in then perform the following 3 steps:
            # 1) Replace 'True' below with: request.form.get('remember', False)
            # 2) Uncomment the 'remember' field in user/forms.py#LoginForm
            # 3) Add a checkbox to the login form with the id/name 'remember'
            if login_user(u, remember=True) and u.is_active():
                u.update_activity_tracking(request.remote_addr)

                # Handle optionally redirecting to the next URL safely.
                next_url = request.form.get('next')
                if next_url:
                    return redirect(safe_next_url(next_url))

                if current_user.role == 'admin':
                    return redirect(url_for('admin.dashboard'))

                if current_user.role == 'member':

                    if not cache.get(current_user.mailbox_id):
                        from app.blueprints.user.tasks import get_emails, get_rules, set_cache

                        emails = get_emails.delay(current_user.mailbox_id)

                        set_cache.delay(current_user.mailbox_id, emails.id)

                    if current_user.trial:
                        trial_days_left = 14 - (
                            datetime.datetime.now() -
                            current_user.created_on.replace(tzinfo=None)).days
                        if trial_days_left < 0:
                            current_user.trial = False
                            current_user.save()

                return redirect(url_for('user.settings'))
            else:
                flash('This account has been disabled.', 'error')
        else:
            flash('Your username/email or password is incorrect.', 'error')

    return render_template('user/login.html', form=form)
Exemple #31
0
def _cache_all_NRIS_major_mines_data():
    with sched.app.app_context():
        major_mine_list = cache.get(NRIS_JOB_PREFIX + NRIS_MAJOR_MINE_LIST)
        if major_mine_list is None:
            return

        for mine in major_mine_list:
            if cache.get(NRIS_JOB_PREFIX + mine) == 'False':
                cache.set(NRIS_JOB_PREFIX + mine, 'True', timeout=TIMEOUT_60_MINUTES)
                try:
                    data = NRIS_service._get_EMPR_data_from_NRIS(mine)
                except requests.exceptions.Timeout:
                    pass
                except requests.exceptions.HTTPError as errhttp:
                    #log error
                    pass
                except TypeError as e:
                    #log error
                    pass

                if data is not None and len(data) > 0:
                    NRIS_service._process_NRIS_data(data, mine)
Exemple #32
0
        def wrapper(*args, **kwargs):
            if len(args) > 1:
                cache_key = key % args[1:]
            else:
                cache_key = key

            rv = cache.get(cache_key)
            if rv is not None:
                return rv
            rv = f(*args, **kwargs)
            if rv:
                cache.set(cache_key, rv, timeout=timeout)
            return rv
    def get(self, application_guid, id):
        token_guid = request.args.get('token', '')
        document_info = cache.get(DOWNLOAD_TOKEN(token_guid))
        cache.delete(DOWNLOAD_TOKEN(token_guid))
        if not document_info:
            raise BadRequest('Valid token requred for download')

        if document_info["originating_system"] == "VFCBC":
            return VFCBCDownloadService.download(document_info["documenturl"],
                                                 document_info["filename"])
        if document_info["originating_system"] == "NROS":
            return NROSDownloadService.download(document_info["documenturl"])

        raise InternalServerError('Unknown application document server')