Example #1
0
def query_slip(pin):
    """Function to query the Landgate SLIP service for a cadastral location, by PIN.
    """
    url = env('SLIP_WFS_URL', None)
    auth = (env('SLIP_USERNAME', None), env('SLIP_PASSWORD', None))
    type_name = env('SLIP_DATASET', '')
    params = {
        'service': 'WFS',
        'version': '1.0.0',
        'typeName': type_name,
        'request': 'getFeature',
        'outputFormat': 'json',
        'cql_filter': 'polygon_number={}'.format(pin)
    }
    resp = requests.get(url, auth=auth, params=params)
    return resp
Example #2
0
def query_slip_esri(pin):
    """Function to query the Landgate SLIP service (Esri REST API) for a cadastral location, by PIN.
    Ref: https://catalogue.data.wa.gov.au/group/about/cadastre
    """
    url = env('SLIP_ESRI_FS_URL', None)
    url = url + '/query'  # Add query suffix to the URL.
    auth = (env('SLIP_USERNAME', None), env('SLIP_PASSWORD', None))
    params = {
        'f': 'json',
        'outSR': 4326,
        'outFields': '*',
        'returnGeometry': 'true',
        'where': 'polygon_number={}'.format(pin)
    }
    resp = requests.get(url, auth=auth, params=params)
    return resp
Example #3
0
def signal_sciences_extract_feed(from_datetime=None, minutes=None):
    """Extract the Signal Sciences feed for ``minutes`` duration from the passed-in timestamp (UTC).
    Returns the feed JSON as a string.
    """
    if not from_datetime or not minutes:
        return False
    ss_email = env('SIGSCI_EMAIL', None)
    api_token = env('SIGSCI_API_TOKEN', None)
    if not ss_email and not api_token:
        return False

    api_host = env('SIGSCI_API_HOST', 'https://dashboard.signalsciences.net')
    corp_name = env('SIGSCI_CORP_NAME', 'dbca')
    site_name = env('SIGSCI_SITE_NAME', 'www.dbca.wa.gov.au')
    from_datetime = from_datetime.replace(
        second=0, microsecond=0)  # Ensure lowest precision is minutes.
    from_time = calendar.timegm(from_datetime.utctimetuple())
    until_datetime = from_datetime + timedelta(minutes=minutes)
    until_time = calendar.timegm(until_datetime.utctimetuple())
    headers = {
        'x-api-user': ss_email,
        'x-api-token': api_token,
        'Content-Type': 'application/json',
    }
    url = api_host + '/api/v0/corps/{}/sites/{}/feed/requests?from={}&until={}'.format(
        corp_name, site_name, from_time, until_time)
    first = True
    feed_str = '['

    while True:
        resp_raw = requests.get(url, headers=headers)
        response = json.loads(resp_raw.text)
        for request in response['data']:
            data = json.dumps(request)
            if first:
                first = False
            else:
                data = ',' + data
            feed_str += (data)
        next_url = response['next']['uri']
        if next_url == '':
            feed_str += ']'
            break
        url = api_host + next_url

    return feed_str
Example #4
0
File: utils.py Project: ropable/prs
def borgcollector_harvest(request, publishes=["prs_locations"]):
    """Convenience function to manually run a Borg Collector harvest
    job for the PRS locations layer.

    Docs: https://github.com/parksandwildlife/borgcollector
    """
    api_url = env("BORGCOLLECTOR_API",
                  "https://borg.dpaw.wa.gov.au/api/") + "jobs/"
    # Send a POST request to the API endpoint.
    r = requests.post(user_request=request,
                      url=api_url,
                      data=json.dumps({"publishes": publishes}))
    return r
Example #5
0
def signal_sciences_upload_feed(from_datetime=None,
                                minutes=None,
                                compress=False,
                                upload=True,
                                csv=False):
    """For the given datetime and duration, download the Signal Sciences feed and upload the data
    to Azure blob storage (optionally compress the file using gzip).
    Optionally also upload a CSV summary of tagged requests to blob storage.
    """
    if not from_datetime or not minutes:
        return False
    feed_str = signal_sciences_extract_feed(from_datetime, minutes)
    corp_name = env('SIGSCI_CORP_NAME', 'dbca')

    if upload and csv:
        signal_sciences_feed_csv(feed_str, corp_name,
                                 from_datetime.isoformat())

    if compress:
        # Conditionally gzip the file.
        filename = 'sigsci_feed_{}_{}.json.gz'.format(
            corp_name, from_datetime.strftime('%Y-%m-%dT%H%M%S'))
        tf = gzip.open('/tmp/{}'.format(filename), 'wb')
        tf.write(feed_str.encode('utf-8'))
    else:
        filename = 'sigsci_feed_{}_{}.json'.format(
            corp_name, from_datetime.strftime('%Y-%m-%dT%H%M%S'))
        tf = open('/tmp/{}'.format(filename), 'w')
        tf.write(feed_str)
    tf.close()

    if upload:
        # Upload the returned feed data to blob storage.
        connect_string = env('AZURE_CONNECTION_STRING')
        store = AzureBlobStorage(connect_string, 'signalsciences')
        store.upload_file(filename, tf.name)

    return filename
Example #6
0
def host_dependencies():
    # Download the list of Nginx host proxy targets.
    connect_string = env('AZURE_CONNECTION_STRING')
    store = AzureBlobStorage(connect_string, 'analytics')
    store.download('nginx_host_proxy_targets.json',
                   '/tmp/nginx_host_proxy_targets.json')
    f = open('/tmp/nginx_host_proxy_targets.json')
    targets = json.loads(f.read())
    host_ct = ContentType.objects.get(app_label='status', model='host')

    # Production / Production (legacy) systems only.
    for it in ITSystem.objects.filter(link__isnull=False,
                                      status__in=[0, 2]).exclude(link=''):
        # Remove any existing IT System Host dependencies.
        for dep in it.dependencies.filter(content_type=host_ct):
            it.dependencies.remove(dep)

        if it.extra_data is None:
            it.extra_data = {}
            it.save()
            continue

        if 'url_synonyms' not in it.extra_data or not it.extra_data[
                'url_synonyms']:
            # Skip this IT System (no known URL or synonyms).
            continue

        # Create/update Host dependencies for IT systems as 'proxy targets'.
        target = None
        for syn in it.extra_data['url_synonyms']:
            for t in targets:
                if syn == t['host']:
                    target = t
                    break
            if target:
                for p in target["proxy_pass"]:
                    u = urlparse(p)
                    host = u.netloc.split(':')[0]
                    if Host.objects.filter(name=host).exists():
                        h = Host.objects.filter(name=host).first()
                        host_dep, created = Dependency.objects.get_or_create(
                            content_type=host_ct,
                            object_id=h.pk,
                            category='Proxy target',
                        )
                        # Add the dependency to the IT System.
                        it.dependencies.add(host_dep)
Example #7
0
def signal_sciences_feed_csv(feed_str, corp_name, timestamp, upload=True):
    """For a given passed-in Signal Sciences feed string, summarise it to a CSV for analysis.
    Upload the CSV to Azure blob storage.
    """
    filename = 'sigsci_request_tags_{}_{}.csv'.format(corp_name, timestamp)
    tf = open('/tmp/{}'.format(filename), 'w')
    writer = csv.writer(tf)
    feed_json = json.loads(feed_str)

    for entry in feed_json:
        for tag in entry['tags']:
            writer.writerow(
                [entry['timestamp'], entry['serverName'], tag['type']])

    tf.close()

    if upload:
        connect_string = env('AZURE_CONNECTION_STRING')
        store = AzureBlobStorage(connect_string, 'http-requests-tagged')
        store.upload_file(filename, tf.name)

    return
Example #8
0
from dbca_utils.utils import env
import dj_database_url
import os
from pathlib import Path
import sys

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])
PROJECT_DIR = str(Path(__file__).resolve().parents[0])
# Add PROJECT_DIR to the system path.
sys.path.insert(0, PROJECT_DIR)

# Settings defined in environment variables.
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', '').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'ibms_project.urls'
WSGI_APPLICATION = 'ibms_project.wsgi.application'
INSTALLED_APPS = (
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.messages',
    'django.contrib.sessions',
    'django.contrib.staticfiles',
Example #9
0
def import_fleetcare_to_staging_table(staging_table,
                                      from_datetime,
                                      to_datetime,
                                      buffer_hours=2):
    """
    Import fleetcare raw data from blob stroage to staging table
    """
    connection_string = env('FLEETCARE_CONNECTION_STRING')
    if not connection_string:
        raise Exception("Missing fleetcare blob stroage connection string'")

    container_name = env('FLEETCARE_CONTAINER')
    if not container_name:
        raise Exception("Missing fleetcare blob stroage container name")

    if staging_table.lower() == "logentry":
        raise Exception(
            "The staging table for reharvester can't be 'logentry'")

    time_buff = timedelta(hours=buffer_hours)

    from_dt = parse_datetime(from_datetime)
    to_dt = parse_datetime(to_datetime)

    from_dt_source = from_dt - time_buff
    to_dt_source = to_dt + time_buff

    #create the table in staging database
    staging_conn = connections['fcare']
    staging_schema = 'public'
    dbutils.create_table(
        staging_conn, staging_schema, staging_table,
        "CREATE TABLE {}.{} ( like logentry including all)".format(
            staging_schema, staging_table))

    print(
        "Import logpoints which were created between {} and {} from blob storage to staging table {}.buffer = {} hours"
        .format(format_datetime(from_dt_source), format_datetime(to_dt_source),
                staging_table, buffer_hours))

    #clean or check staging table
    rows = dbutils.count(
        staging_conn,
        sql="SELECT count(*) FROM {} WHERE created >= '{}' AND created < '{}'".
        format(staging_table, to_db_timestamp(from_dt_source),
               to_db_timestamp(to_dt_source)),
        log=True)
    if rows:
        print(
            "There are {} datas in staging table {} between {} and {}, delete them"
            .format(rows, staging_table, format_datetime(from_dt_source),
                    format_datetime(to_dt_source)))
        deleted_rows = dbutils.execute(
            staging_conn,
            "DELETE FROM {} WHERE created >= '{}' AND created < '{}'".format(
                staging_table, to_db_timestamp(from_dt_source),
                to_db_timestamp(to_dt_source)),
            log=True)
        print(
            "Deleted {} datas from staging table {} between {} and {}".format(
                deleted_rows, staging_table, format_datetime(from_dt_source),
                format_datetime(to_dt_source)))

    #import the data from blob storage to staging database
    storage = AzureBlobStorage(connection_string, container_name)

    oneday = timedelta(days=1)
    day = from_dt_source.date()
    imported_rows = 0
    start = timezone.now()
    while day <= to_dt_source.date():
        day_start = timezone.now()
        metadatas = storage.list_resources("{}/{}/{}/".format(
            day.year, day.month, day.day))
        metadatas.sort(key=lambda o: o['name'][-24:-5])
        day_rows = 0
        for metadata in metadatas:
            #blob data creation_time is not reliable, extract the timestmap from file name
            creation_time = get_fleetcare_creationtime(metadata['name'])
            if creation_time >= from_dt_source and creation_time < to_dt_source:
                content = storage.get_content(metadata['name']).decode()
                try:
                    day_rows += dbutils.execute(
                        staging_conn,
                        "INSERT INTO {} (name,created,text) values('{}','{}','{}')"
                        .format(staging_table, metadata['name'],
                                to_db_timestamp(creation_time), content))
                except:
                    #Failed to insert the data to staging table.
                    #Check whether the data exists or not, if exists, ignore; otherwise raise exception
                    #the type of the column 'created' of original table 'logentry' is timestamp without timezone, and its value is utc timestamp
                    rows = dbutils.count(
                        staging_conn,
                        sql=
                        "SELECT count(*) FROM {}.{} WHERE name='{}' and created='{}' "
                        .format(staging_schema,
                                staging_table, metadata['name'],
                                to_db_timestamp(creation_time)))
                    if rows:
                        continue
                    else:
                        raise

        print(
            "{}: Spend {} to import {} rows from blob storage to staging table {}"
            .format(day.strftime("%Y/%m/%d"), str(timezone.now() - day_start),
                    day_rows, staging_table))

        imported_rows += day_rows

        day += oneday

    print(
        "Spend {} to import {} rows from blob storage to staging table {} between {} and {}"
        .format(str(timezone.now() - start), imported_rows, staging_table,
                format_datetime(from_dt_source),
                format_datetime(to_dt_source)))
Example #10
0
from dbca_utils.utils import env
import dj_database_url
import os
import sys

# Project paths
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_DIR = os.path.join(BASE_DIR, 'bfrs_project')
# Add PROJECT_DIR to the system path.
sys.path.insert(0, PROJECT_DIR)

# Application definition
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', required=True)
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', ['localhost'])
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'bfrs_project.urls'
WSGI_APPLICATION = 'bfrs_project.wsgi.application'
INSTALLED_APPS = [
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.sessions',
    'django.contrib.messages',
    'django.contrib.staticfiles',
Example #11
0
from dbca_utils.utils import env
import dj_database_url
import os
from pathlib import Path
import sys

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])
PROJECT_DIR = str(Path(__file__).resolve().parents[0])
# Add PROJECT_DIR to the system path.
sys.path.insert(0, PROJECT_DIR)

# Settings defined in environment variables.
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', '').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'ibms_project.urls'
WSGI_APPLICATION = 'ibms_project.wsgi.application'
INSTALLED_APPS = (
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.messages',
    'django.contrib.sessions',
    'django.contrib.staticfiles',
Example #12
0
from dbca_utils.utils import env
import dj_database_url
import os
from pathlib import Path

# Project paths
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])

# Application definition
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', 'localhost').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'resource_tracking.urls'
WSGI_APPLICATION = 'resource_tracking.wsgi.application'
TRACPLUS_URL = env('TRACPLUS_URL', False)
KMI_VEHICLE_BASE_URL = env('KMI_VEHICLE_BASE_URL', '')
DFES_URL = env('DFES_URL', False)
DFES_USER = env('DFES_USER', False)
DFES_PASS = env('DFES_PASS', False)
DFES_OUT_OF_ORDER_BUFFER = int(env('DFES_OUT_OF_ORDER_BUFFER') or 300)
# Add scary warning on device edit page for prod
PROD_SCARY_WARNING = env('PROD_SCARY_WARNING', False)
DEVICE_HTTP_CACHE_TIMEOUT = env('DEVICE_HTTP_CACHE_TIMEOUT', 60)
HISTORY_HTTP_CACHE_TIMEOUT = env('HISTORY_HTTP_CACHE_TIMEOUT', 60)
Example #13
0
from dbca_utils.utils import env
import dj_database_url
import os
from pathlib import Path
import sys

# Project paths
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])
PROJECT_DIR = str(Path(__file__).resolve().parents[0])
# Add PROJECT_DIR to the system path.
sys.path.insert(0, PROJECT_DIR)

# Application definition
DEBUG = env('DEBUG', False)
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
SECURE_SSL_REDIRECT = env('SECURE_SSL_REDIRECT', False)
SECURE_REFERRER_POLICY = env('SECURE_REFERRER_POLICY', None)
SECURE_HSTS_SECONDS = env('SECURE_HSTS_SECONDS', 0)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', 'localhost').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'prs2.urls'
WSGI_APPLICATION = 'prs2.wsgi.application'
GEOSERVER_WMTS_URL = env('GEOSERVER_WMTS_URL', '')
GEOSERVER_WFS_URL = env('GEOSERVER_WFS_URL', '')
from dbca_utils.utils import env
import dj_database_url
import os
from datetime import timedelta
from pathlib import Path

# Project paths
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])

# Application definition
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', 'localhost').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'resource_tracking.urls'
WSGI_APPLICATION = 'resource_tracking.wsgi.application'
TRACPLUS_URL = env('TRACPLUS_URL', False)
KMI_VEHICLE_BASE_URL = env('KMI_VEHICLE_BASE_URL', '')
DFES_URL = env('DFES_URL', False)
DFES_USER = env('DFES_USER', False)
DFES_PASS = env('DFES_PASS', False)
DFES_OUT_OF_ORDER_BUFFER = int(env('DFES_OUT_OF_ORDER_BUFFER') or 300)
# Add scary warning on device edit page for prod
PROD_SCARY_WARNING = env('PROD_SCARY_WARNING', False)
DEVICE_HTTP_CACHE_TIMEOUT = env('DEVICE_HTTP_CACHE_TIMEOUT', 60)
Example #15
0
def GET_CLUSTER_MANAGEMENT_URL(clustername):
    if clustername not in CLUSTERS_MANAGEMENT_URL:
        CLUSTERS_MANAGEMENT_URL[clustername] = env(
            clustername.upper(),
            default=RANCHER_MANAGEMENT_URL.format(clustername))
    return CLUSTERS_MANAGEMENT_URL[clustername]
from dbca_utils.utils import env

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'r22c+54z97+$2!$*bcv8kwc)9hcnje4&hrjlw!0#guocoq(+jf'

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True

ALLOWED_HOSTS = env(
    'ALLOWED_HOSTS',
    ["eventhubconsole.dpaw.wa.gov.au", "eventhubconsole.dbca.wa.gov.au"])

# Application definition

INSTALLED_APPS = [
    'django.contrib.admin', 'django.contrib.auth',
    'django.contrib.contenttypes', 'django.contrib.sessions',
    'django.contrib.messages', 'django.contrib.staticfiles', 'reversion',
    'smart_selects', 'console'
]

MIDDLEWARE = [
    'django.middleware.security.SecurityMiddleware',
    'django.contrib.sessions.middleware.SessionMiddleware',
    'django.middleware.common.CommonMiddleware',
Example #17
0
from dbca_utils.utils import env
import dj_database_url
import os
import sys
from pathlib import Path
from datetime import timedelta

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])
PROJECT_DIR = str(Path(__file__).resolve().parents[0])
# Add PROJECT_DIR to the system path.
sys.path.insert(0, PROJECT_DIR)

# Settings defined in environment variables.
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', '').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'itassets.urls'
WSGI_APPLICATION = 'itassets.wsgi.application'

INSTALLED_APPS = (
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.messages',
Example #18
0
from dbca_utils.utils import env
import dj_database_url
import os
from pathlib import Path

# Project paths
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])

# Application definition
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', '').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'csw.urls'
WSGI_APPLICATION = 'csw.wsgi.application'
BASE_URL = env('BASE_URL', 'https://csw.dbca.wa.gov.au')
BORG_URL = env('BORG_URL', 'https://borg.dbca.wa.gov.au')
INSTALLED_APPS = [
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.gis',
    'django.contrib.sessions',
    'django.contrib.messages',
    'django.contrib.staticfiles',
from dbca_utils.utils import env
import dj_database_url
import os
from pathlib import Path

# Project paths
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])

# Application definition
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', 'localhost').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'resource_tracking.urls'
WSGI_APPLICATION = 'resource_tracking.wsgi.application'
CSW_URL = env('CSW_URL', '')
PRINTING_URL = env('PRINTING_URL', '')
TRACPLUS_URL = env('TRACPLUS_URL', False)
KMI_VEHICLE_BASE_URL = env('KMI_VEHICLE_BASE_URL', '')
JQUERY_SOURCE = env('JQUERY_SOURCE', '')
JQUERYUI_SOURCE = env('JQUERYUI_SOURCE', '')
DFES_URL = env('DFES_URL', False)
DFES_USER = env('DFES_USER', False)
DFES_PASS = env('DFES_PASS', False)
DFES_OUT_OF_ORDER_BUFFER = int(env('DFES_OUT_OF_ORDER_BUFFER') or 300)
Example #20
0
from dbca_utils.utils import env

from pathlib import Path
import os

# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '477l7!ncr_%7%6=-h5ett44#jhc1cy1ni#sp%b(yh2@+wxlr6^'

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env('DEBUG', False)

ALLOWED_HOSTS = ['*']

CORS_ALLOW_ALL_ORIGINS = True
CORS_ORIGIN_ALLOW_ALL = True
# Application definition

INSTALLED_APPS = [
    'django.contrib.admin', 'django.contrib.auth',
    'django.contrib.contenttypes', 'django.contrib.sessions',
    'django.contrib.messages', 'django.contrib.staticfiles', 'corsheaders',
    'django_site_queue'
]

MIDDLEWARE = [
Example #21
0
def itsystem_risks_traffic(it_systems=None):
    """Set automatic risk assessment for IT system web apps based on the mean of daily HTTP requests.
    """
    if not it_systems:
        it_systems = ITSystem.objects.all()
    # Download the report of HTTP requests.
    connect_string = env('AZURE_CONNECTION_STRING')
    store = AzureBlobStorage(connect_string, 'analytics')
    store.download('host_requests_7_day_count.csv',
                   '/tmp/host_requests_7_day_count.csv')
    counts = csv.reader(open('/tmp/host_requests_7_day_count.csv'))
    next(counts)  # Skip the header.
    report = {}
    for row in counts:
        try:
            report[row[0]] = int(row[1])
        except:
            # Sometimes the report contains junk rows; just ignore these.
            pass
    itsystem_ct = ContentType.objects.get(app_label='registers',
                                          model='itsystem')

    for it in it_systems:
        # First, check if an auto assessment has been created OR if not assessment exists.
        # If so, carry on. If not, skip automated assessment (assumes that a manual assessment exists,
        # which we don't want to overwrite).
        if (RiskAssessment.objects.filter(
                content_type=itsystem_ct,
                object_id=it.pk,
                category='Traffic',
                notes__contains='[AUTOMATED ASSESSMENT]').exists()
                or not RiskAssessment.objects.filter(
                    content_type=itsystem_ct,
                    object_id=it.pk,
                    category='Traffic').exists()):
            if 'url_synonyms' not in it.extra_data or not it.extra_data[
                    'url_synonyms']:
                # Skip this IT System (no known URL or synonyms).
                continue

            # Get/create a Traffic risk
            risk = RiskAssessment.objects.filter(content_type=itsystem_ct,
                                                 object_id=it.pk,
                                                 category='Traffic').first()
            if not risk:
                risk = RiskAssessment(content_type=itsystem_ct,
                                      object_id=it.pk,
                                      category='Traffic')

            # Total the count of HTTP requests to all URI synonyms for this system.
            request_count = 0
            for syn in it.extra_data['url_synonyms']:
                if syn in report and report[syn]:
                    request_count += report[syn]

            requests_mean = request_count / 7  # Daily average request count.
            if requests_mean:
                risk = RiskAssessment.objects.filter(
                    content_type=itsystem_ct,
                    object_id=it.pk,
                    category='Traffic').first()
                if not risk:
                    risk = RiskAssessment(content_type=itsystem_ct,
                                          object_id=it.pk,
                                          category='Traffic')
                if requests_mean >= 10000:
                    risk.rating = 3
                    risk.notes = '[AUTOMATED ASSESSMENT] High traffic of daily HTTP requests'
                elif requests_mean >= 1000:
                    risk.rating = 2
                    risk.notes = '[AUTOMATED ASSESSMENT] Moderate traffic of daily HTTP requests'
                elif requests_mean >= 100:
                    risk.rating = 1
                    risk.notes = '[AUTOMATED ASSESSMENT] Low traffic of daily HTTP requests'
                else:
                    risk.rating = 0
                    risk.notes = '[AUTOMATED ASSESSMENT] Minimal traffic of daily HTTP requests'
                risk.save()
            else:  # Volume of HTTP traffic is too small to assess.
                # If any Traffic risk exists, delete it.
                if RiskAssessment.objects.filter(content_type=itsystem_ct,
                                                 object_id=it.pk,
                                                 category='Traffic').exists():
                    risk = RiskAssessment.objects.filter(
                        content_type=itsystem_ct,
                        object_id=it.pk,
                        category='Traffic').first()
                    risk.delete()
Example #22
0
from dbca_utils.utils import env
import dj_database_url
import os
import sys
from pathlib import Path

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])
PROJECT_DIR = str(Path(__file__).resolve().parents[0])
# Add PROJECT_DIR to the system path.
sys.path.insert(0, PROJECT_DIR)

# Settings defined in environment variables.
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', '').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'itassets.urls'
WSGI_APPLICATION = 'itassets.wsgi.application'
INSTALLED_APPS = (
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.messages',
    'django.contrib.sessions',
    'django.contrib.sites',
Example #23
0
from dbca_utils.utils import env
import dj_database_url
import os
from datetime import timedelta
from pathlib import Path
import sys

# Project paths
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])

# Application definition
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', 'localhost').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'resource_tracking.urls'
WSGI_APPLICATION = 'resource_tracking.wsgi.application'
TRACPLUS_URL = env('TRACPLUS_URL', '')
KMI_VEHICLE_BASE_URL = env('KMI_VEHICLE_BASE_URL', '')
DFES_URL = env('DFES_URL', '')
DFES_USER = env('DFES_USER', '')
DFES_PASS = env('DFES_PASS', '')
DFES_OUT_OF_ORDER_BUFFER = int(env('DFES_OUT_OF_ORDER_BUFFER') or 300)
# Add scary warning on device edit page for prod
PROD_SCARY_WARNING = env('PROD_SCARY_WARNING', False)
Example #24
0
def itsystem_risks_access(it_systems=None):
    """Set automatic risk assessment for IT system web apps based on whether they require SSO on the root location.
    """
    if not it_systems:
        it_systems = ITSystem.objects.all()

    # Download the list of Nginx host proxy targets.
    connect_string = env('AZURE_CONNECTION_STRING')
    store = AzureBlobStorage(connect_string, 'analytics')
    store.download('nginx_host_proxy_targets.json',
                   '/tmp/nginx_host_proxy_targets.json')
    f = open('/tmp/nginx_host_proxy_targets.json')
    targets = json.loads(f.read())
    itsystem_ct = ContentType.objects.get(app_label='registers',
                                          model='itsystem')

    for it in it_systems:
        # First, check if an auto assessment has been created OR if no assessment exists.
        # If so, carry on. If not, skip automated assessment (assumes that a manual assessment exists,
        # which we don't want to overwrite).
        if (RiskAssessment.objects.filter(
                content_type=itsystem_ct,
                object_id=it.pk,
                category='Access',
                notes__contains='[AUTOMATED ASSESSMENT]').exists() or
                not RiskAssessment.objects.filter(content_type=itsystem_ct,
                                                  object_id=it.pk,
                                                  category='Access').exists()):
            if 'url_synonyms' not in it.extra_data or not it.extra_data[
                    'url_synonyms']:
                # Skip this IT System (no known URL or synonyms).
                continue

            target = None

            # Get/create an access risk
            risk = RiskAssessment.objects.filter(content_type=itsystem_ct,
                                                 object_id=it.pk,
                                                 category='Access').first()
            if not risk:
                risk = RiskAssessment(content_type=itsystem_ct,
                                      object_id=it.pk,
                                      category='Access')

            for syn in it.extra_data['url_synonyms']:
                for t in targets:
                    if syn == t['host']:
                        target = t
                        break
                if target:
                    if 'sso_locations' in target:
                        if '/' in target['sso_locations'] or '^~ /' in target[
                                'sso_locations'] or '= /' in target[
                                    'sso_locations']:
                            risk.rating = 0
                            risk.notes = '[AUTOMATED ASSESSMENT] Web application root location requires SSO'
                        else:
                            risk.rating = 1
                            risk.notes = '[AUTOMATED ASSESSMENT] Web application locations configured to require SSO'
                    else:
                        if 'custom/dpaw_subnets' in target['includes']:
                            risk.rating = 1
                            risk.notes = '[AUTOMATED ASSESSMENT] Web application root location does not require SSO, but is restricted to internal subnets'
                        else:
                            risk.rating = 2
                            risk.notes = '[AUTOMATED ASSESSMENT] Web application root location does not require SSO and is not restricted to internal subnets'
                    risk.save()
                else:
                    # If any access risk exists, delete it.
                    if RiskAssessment.objects.filter(
                            content_type=itsystem_ct,
                            object_id=it.pk,
                            category='Access').exists():
                        risk = RiskAssessment.objects.filter(
                            content_type=itsystem_ct,
                            object_id=it.pk,
                            category='Access').first()
                        risk.delete()
Example #25
0
from dbca_utils.utils import env
import dj_database_url
import os
from pathlib import Path
import sys

# Project paths
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])
PROJECT_DIR = str(Path(__file__).resolve().parents[0])
# Add PROJECT_DIR to the system path.
sys.path.insert(0, PROJECT_DIR)

# Application definition
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', 'localhost').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'prs2.urls'
WSGI_APPLICATION = 'prs2.wsgi.application'
GEOSERVER_WMS_URL = env('GEOSERVER_WMS_URL', '')
GEOSERVER_WFS_URL = env('GEOSERVER_WFS_URL', '')
INSTALLED_APPS = (
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
Example #26
0
from dbca_utils.utils import env
import dj_database_url
import os
import sys
from pathlib import Path

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).resolve().parents[1])
PROJECT_DIR = str(Path(__file__).resolve().parents[0])
# Add PROJECT_DIR to the system path.
sys.path.insert(0, PROJECT_DIR)

# Settings defined in environment variables.
DEBUG = env('DEBUG', False)
SECRET_KEY = env('SECRET_KEY', 'PlaceholderSecretKey')
CSRF_COOKIE_SECURE = env('CSRF_COOKIE_SECURE', False)
SESSION_COOKIE_SECURE = env('SESSION_COOKIE_SECURE', False)
if not DEBUG:
    ALLOWED_HOSTS = env('ALLOWED_DOMAINS', '').split(',')
else:
    ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ['127.0.0.1', '::1']
ROOT_URLCONF = 'itassets.urls'
WSGI_APPLICATION = 'itassets.wsgi.application'
INSTALLED_APPS = (
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.messages',
    'django.contrib.sessions',
    'django.contrib.sites',
Example #27
0
from django import http, VERSION
from django.conf import settings
from django.contrib.auth import login, logout, get_user_model
from django.db.models import signals
from django.utils.deprecation import MiddlewareMixin
from django.utils.functional import SimpleLazyObject
from django.contrib.auth.middleware import AuthenticationMiddleware, get_user

from dbca_utils.utils import env

ENABLE_AUTH2_GROUPS = env("ENABLE_AUTH2_GROUPS", default=False)
LOCAL_USERGROUPS = env("LOCAL_USERGROUPS", default=[])


def sync_usergroups(user, groups):
    from django.contrib.auth.models import Group

    usergroups = ([
        Group.objects.get_or_create(name=name)[0] for name in groups.split(",")
    ] if groups else [])
    usergroups.sort(key=lambda o: o.id)
    existing_usergroups = list(
        user.groups.exclude(name__in=LOCAL_USERGROUPS).order_by("id"))
    index1 = 0
    index2 = 0
    len1 = len(usergroups)
    len2 = len(existing_usergroups)
    while True:
        group1 = usergroups[index1] if index1 < len1 else None
        group2 = existing_usergroups[index2] if index2 < len2 else None
        if not group1 and not group2: