コード例 #1
0
ファイル: zmap.py プロジェクト: aushack/ws-backend-community
 def create_dummy(cls):
     from lib import DatetimeHelper, WsFaker
     return ZmapResultModel(
         start_time=DatetimeHelper.minutes_ago(5),
         end_time=DatetimeHelper.now(),
         port=WsFaker.get_port(),
         discovered_endpoints=WsFaker.create_ip_addresses(),
         cmd_line=
         "zmap -B 10M -f \"saddr,daddr\" -w /tmp/white -o /tmp/zmap_output -p 80 -i en0",
         scanned_networks=WsFaker.create_class_c_networks(),
         org_uuid=WsFaker.create_uuid(),
     )
コード例 #2
0
 def __check_for_close(self):
     """
     Check to see if this spider has been running for longer than the maximum amount
     of allowed time, and stop the spider if it has.
     :return: None
     """
     if self._start_time is None:
         self._start_time = DatetimeHelper.now()
     elapsed_time = (DatetimeHelper.now() - self.start_time).total_seconds()
     if elapsed_time > self.max_run_time:
         raise CloseSpider(
             "Spider run time exceeded maximum time of %s seconds. Closing."
             % (self.max_run_time, ))
コード例 #3
0
def initiate_network_scans_for_organization(self, org_uuid=None, requeue=True):
    """
    Kick off network scans for the given organization and queue up an additional network
    scanning task for the next interval.
    :param org_uuid: The UUID of the organization to scan.
    :param requeue: Whether or not to queue up another network scanning task for the
    configured interval.
    :return: None
    """
    logger.info(
        "Kicking off all network scans for Organization %s."
        % (org_uuid,)
    )
    zmap_scan_organization.si(org_uuid=org_uuid).apply_async()
    if requeue:
        scan_interval = get_network_scan_interval_for_organization(
            org_uuid=org_uuid,
            db_session=self.db_session,
        )
        next_time = DatetimeHelper.seconds_from_now(scan_interval)
        logger.info(
            "Queueing up an additional instance of %s in %s seconds (%s)."
            % (self.name, scan_interval, next_time)
        )
        initiate_network_scans_for_organization.si(org_uuid=org_uuid, requeue=requeue).apply_async(eta=next_time)
    else:
        logger.info("Requeueing not enabled, therefore not queueing up another network scanning task.")
コード例 #4
0
ファイル: base.py プロジェクト: aushack/ws-backend-community
 def update_field_to_now(self, key):
     """
     Add a script line to this query that updates the given field on this model to the current datetime.
     :param key: The key to update.
     :return: None
     """
     self.update_field(key=key, value=DatetimeHelper.now())
コード例 #5
0
def initiate_network_scans_for_order(self, order_uuid=None, requeue=False):
    """
    Initiate all of the network scans for the given order.
    :param order_uuid: The UUID of the order to initiate network scans for.
    :param requeue: Whether or not to requeue the network scans.
    :return: None
    """
    logger.info(
        "Now initiating all network scans for order %s. Requeue is %s." %
        (order_uuid, requeue))
    zmap_scan_order.si(order_uuid=order_uuid).apply_async()
    if requeue:
        org_uuid = get_org_uuid_from_order(order_uuid=order_uuid,
                                           db_session=self.db_session)
        scan_interval = get_network_scan_interval_for_organization(
            org_uuid=org_uuid,
            db_session=self.db_session,
        )
        next_time = DatetimeHelper.seconds_from_now(scan_interval)
        logger.info(
            "Queueing up an additional instance of %s in %s seconds (%s)." %
            (self.name, scan_interval, next_time))
        initiate_network_scans_for_order.si(
            org_uuid=org_uuid, requeue=requeue).apply_async(eta=next_time)
    else:
        logger.info(
            "Requeueing not enabled, therefore not queueing up another network scanning task."
        )
コード例 #6
0
ファイル: dns.py プロジェクト: aushack/ws-backend-community
def check_domain_name_scanning_status(db_session=None, domain_uuid=None, update_status=True):
    """
    Check to see whether the given domain name is currently being scanned. If it is not, then modify it to
    show that it is. Return a boolean depicting whether or not scanning could should proceed with scanning
    the given domain name.
    :param db_session: A SQLAlchemy session.
    :param domain_uuid: The UUID of the domain name to check.
    :param update_status: Whether or not to update the status of the domain name's current scanning state
    during the check.
    :return: True if scanning should be performed for the given network service, False otherwise.
    """
    db_session.execute("begin;")
    current_scanning_status = get_domain_name_scanning_status(
        db_session=db_session,
        domain_uuid=domain_uuid,
        with_for_update=True,
    )
    if current_scanning_status:
        db_session.execute("end;")
        return False
    last_completed_scan = get_last_completed_domain_name_scan(db_session=db_session, domain_uuid=domain_uuid)
    if not last_completed_scan or not config.task_enforce_domain_name_scan_interval:
        do_scan = True
    else:
        now = DatetimeHelper.now().replace(tzinfo=last_completed_scan.ended_at.tzinfo)
        elapsed_seconds = (now - last_completed_scan.ended_at).total_seconds()
        if elapsed_seconds > config.task_minimum_domain_name_scan_interval:
            do_scan = True
        else:
            do_scan = False
    if do_scan and update_status:
        update_domain_name_scanning_status(db_session=db_session, domain_uuid=domain_uuid, scanning_status=True)
        db_session.commit()
    db_session.execute("end;")
    return do_scan
コード例 #7
0
 def add_equals_now(self, key):
     """
     Add a scripting line to this script that sets the given key of the referenced model to
     the current datetime.
     :param key: The key to update.
     :return: None
     """
     self.add_equals(key=key, value=DatetimeHelper.now())
コード例 #8
0
def scan_ip_address_for_service_from_domain(
    self,
    org_uuid=None,
    ip_address_uuid=None,
    port=None,
    protocol=None,
    domain_uuid=None,
    domain_scan_uuid=None,
):
    """
    Check to see if the given network service port is currently open and listening at the given IP
    address as a result of resolving IP addresses for the given domain.
    :param org_uuid: The organization to scan the endpoint on behalf of.
    :param ip_address_uuid: The UUID of the IP address to scan.
    :param port: The port number to check.
    :param protocol: The networking protocol to use to connect to the endpoint.
    :param domain_uuid: The UUID of the domain name that resulted in this scan.
    :param domain_scan_uuid: The UUID of the domain name scan that this task is a part of.
    :return: None
    """
    ip_address = get_address_from_ip_address(ip_address_uuid=ip_address_uuid,
                                             db_session=self.db_session)
    logger.info(
        "Now checking to see if service at %s:%s (%s) is alive for domain %s, scan %s, organization %s."
        %
        (ip_address, port, protocol, domain_uuid, domain_scan_uuid, org_uuid))
    inspector = PortInspector(address=ip_address, port=port, protocol=protocol)
    is_open = inspector.check_if_open()
    logger.info("Service at %s:%s (%s) %s open!" %
                (ip_address, port, protocol, "is" if is_open else "is not"))
    liveness_model = DomainServiceLivenessModel.from_database_model_uuid(
        uuid=domain_scan_uuid,
        db_session=self.db_session,
        is_alive=is_open,
        checked_at=DatetimeHelper.now(),
    )
    liveness_model.save(org_uuid)
    if not is_open:
        logger.info(
            "Service at %s:%s (%s) was not open, therefore not continuing with inspection."
            % (ip_address, port, protocol))
        return
    logger.info(
        "Service at %s:%s (%s) was open! Setting up a single pass of network service monitoring."
        % (ip_address, port, protocol))
    network_service_model = get_or_create_network_service_from_org_ip(
        ip_uuid=ip_address_uuid,
        port=port,
        protocol=protocol,
        db_session=self.db_session,
        discovered_by="domain scan",
    )
    network_service_inspection_pass.si(
        service_uuid=network_service_model.uuid,
        org_uuid=org_uuid,
        schedule_again=False,
    ).apply_async()
コード例 #9
0
ファイル: ip.py プロジェクト: lavalamp-/ws-backend-community
def scan_ip_address_for_udp_network_services(
        self,
        org_uuid=None,
        ip_address_uuid=None,
        ip_address_scan_uuid=None,
        ports=None,
        order_uuid=None,
):
    """
    Scan the given IP address for UDP network services.
    :param org_uuid: The UUID of the organization to perform data retrieval on behalf of.
    :param ip_address_uuid: The UUID of the IP address to retrieve data about.
    :param ip_address_scan_uuid: The UUID of the IP address scan to associate retrieved data with.
    :param ports: A list of integers representing the ports to scan.
    :return: None
    """
    logger.info(
        "Now scanning IP address %s for UDP ports (%s total)."
        % (ip_address_uuid, len(ports))
    )
    start_time = DatetimeHelper.now()
    open_udp_ports = self.inspector.scan_for_open_udp_ports(ports=ports, db_session=self.db_session)
    end_time = DatetimeHelper.now()
    port_statuses = []
    for open_port in open_udp_ports:
        port_statuses.append({
            "port_number": open_port,
            "port_status": "open",
            "port_protocol": "udp",
        })
    scan_model = IpPortScanModel.from_database_model(
        self.ip_address_scan,
        port_results=port_statuses,
        port_scan_method="nmap",
        scan_start_time=start_time,
        scan_end_time=end_time,
    )
    scan_model.save(org_uuid)
    logger.info(
        "IP address %s scanned for %s UDP ports (%s were open)."
        % (ip_address_uuid, len(ports), len(open_udp_ports))
    )
コード例 #10
0
ファイル: dns.py プロジェクト: aushack/ws-backend-community
def update_domain_name_scan_completed(scan_uuid=None, db_session=None):
    """
    Update the given DomainNameScan to reflect that scanning has concluded.
    :param scan_uuid: The UUID of the domain name scan to update.
    :param db_session: A SQLAlchemy session.
    :return: None
    """
    update_dict = {
        "ended_at": DatetimeHelper.now(),
    }
    update_domain_name_scan(scan_uuid=scan_uuid, db_session=db_session, update_dict=update_dict)
コード例 #11
0
def create_ip_address_scan_for_ip(ip_address_uuid):
    """
    Create and return a new IP address scan associated with the given IP address.
    :param ip_address_uuid: The UUID of the IP address to associate the scan with.
    :return: A newly-created IP address scan associated with the given IP address.
    """
    ip_address_uuid = ConversionHelper.string_to_unicode(ip_address_uuid)
    return IpAddressScan.new(
        ip_address_id=ip_address_uuid,
        started_at=DatetimeHelper.now(),
    )
コード例 #12
0
 def __init__(self,
              is_alive=None,
              checked_at=None,
              liveness_cause=None,
              **kwargs):
     super(NetworkServiceLivenessModel, self).__init__(**kwargs)
     self.is_alive = is_alive
     if checked_at is None:
         checked_at = DatetimeHelper.now()
     self.checked_at = checked_at
     self.liveness_cause = liveness_cause
コード例 #13
0
ファイル: web.py プロジェクト: aushack/ws-backend-community
def update_web_service_scan_completed(scan_uuid=None, db_session=None):
    """
    Update the given WebServiceScan to reflect that scanning has concluded.
    :param scan_uuid: The UUID of the WebServiceScan to update.
    :param db_session: A SQLAlchemy session.
    :return: None
    """
    update_dict = {"ended_at": DatetimeHelper.now()}
    update_web_service_scan(scan_uuid=scan_uuid,
                            update_dict=update_dict,
                            db_session=db_session)
コード例 #14
0
ファイル: dns.py プロジェクト: aushack/ws-backend-community
def create_domain_scan_for_domain(domain_uuid):
    """
    Create and return a new DomainNameScan object associated with the given domain name.
    :param domain_uuid: The UUID of the DomainName to associate the scan with.
    :return: The newly-created DomainNameScan object.
    """
    domain_uuid = ConversionHelper.string_to_unicode(domain_uuid)
    return DomainNameScan.new(
        domain_name_id=domain_uuid,
        started_at=DatetimeHelper.now(),
    )
コード例 #15
0
def update_ip_address_scan_completed(scan_uuid=None, db_session=None):
    """
    Update the given IP address scan to indicate that the scan has completed.
    :param scan_uuid: The UUID of the IP address scan to update.
    :param db_session: A SQLAlchemy session.
    :return: None
    """
    update_dict = {
        "ended_at": DatetimeHelper.now(),
    }
    update_ip_address_scan(scan_uuid=scan_uuid,
                           db_session=db_session,
                           update_dict=update_dict)
コード例 #16
0
def create_network_scan_for_organization(db_session=None, org_uuid=None):
    """
    Create a new NetworkScan object, associate it with the given organization, and return it.
    :param db_session: A SQLAlchemy session.
    :param org_uuid: The UUID of the organization to associate the network scan with.
    :return: The newly-created NetworkScan object.
    """
    new_scan = OrganizationNetworkScan.new(
        started_at=DatetimeHelper.now(),
        organization_id=org_uuid,
    )
    db_session.add(new_scan)
    return new_scan
コード例 #17
0
ファイル: zmap.py プロジェクト: aushack/ws-backend-community
 def _populate_dummy(cls, to_populate):
     from lib import WsFaker, DatetimeHelper
     to_populate.start_time = WsFaker.get_time_in_past()
     to_populate.end_time = DatetimeHelper.now()
     to_populate.cmd_line = WsFaker.get_command_line()
     discovered_endpoints = WsFaker.create_ip_addresses()
     to_populate.discovered_endpoints = discovered_endpoints
     to_populate.live_service_count = len(discovered_endpoints)
     to_populate.port = WsFaker.get_port()
     to_populate.protocol = WsFaker.get_network_protocol()
     scanned_networks = WsFaker.get_networks()
     to_populate.scanned_networks = scanned_networks
     to_populate.scanned_networks_count = len(scanned_networks)
     return to_populate
コード例 #18
0
ファイル: web.py プロジェクト: aushack/ws-backend-community
def create_new_web_service_scan(web_service_uuid=None, db_session=None):
    """
    Create a new WebServiceScan, populate it, and return it.
    :param web_service_uuid: The UUID of the WebService to associate the scan with.
    :param db_session: A SQLAlchemy session.
    :return: The newly-created WebServiceScan.
    """
    web_service_uuid = ConversionHelper.string_to_unicode(web_service_uuid)
    new_scan = WebServiceScan.new(
        web_service_id=web_service_uuid,
        started_at=DatetimeHelper.now(),
    )
    db_session.add(new_scan)
    return new_scan
コード例 #19
0
def create_new_network_service_scan(network_service_uuid=None,
                                    db_session=None):
    """
    Create and return a new NetworkServiceScan object and ensure that it's related to the
    given NetworkService.
    :param network_service_uuid: The UUID of the NetworkService to associate the NetworkServiceScan with.
    :param db_session: A SQLAlchemy session.
    :return: The newly-created NetworkServiceScan.
    """
    network_service_uuid = ConversionHelper.string_to_unicode(
        network_service_uuid)
    to_return = NetworkServiceScan.new(
        network_service_id=network_service_uuid,
        started_at=DatetimeHelper.now(),
    )
    db_session.add(to_return)
    return to_return
コード例 #20
0
def task_prerun_handler(signal, sender, task_id, task, *args, **kwargs):
    """
    Handle any sort of preparations for a task prior to it running.
    :param signal: The signal that was processed.
    :param sender: The entity that sent the task.
    :param task_id: The task's ID.
    :param task: The task itself.
    :param args: Positional arguments for the task.
    :param kwargs: Keyword arguments for the task.
    :return: None
    """
    from .tasks.base import DatabaseTask
    task._start_time = DatetimeHelper.now()
    if isinstance(task, DatabaseTask):
        if task._db_session is not None:
            logger.debug(
                "Task %s (%s) has a non-None db_session. Resetting now." %
                (task.name, task.id))
            task._db_session = None
コード例 #21
0
def handle_scanning_order_from_pubsub(self, org_uuid=None, targets=None):
    """
    Create and kick off an order based on the contents of the UUID of the given organization
    and the list of targets to scan.
    :param org_uuid: The UUID of the organization to create the scan for.
    :param targets: A list of the targets to perform the scan on.
    :return: None
    """
    logger.error("HERE WE ARE CHIPPY CHAP: %s, %s" % (org_uuid, targets))
    try:
        organization = Organization.by_uuid(uuid=org_uuid,
                                            db_session=self.db_session)
    except sqlalchemy.orm.exc.NoResultFound:
        self.pubsub_manager.send_scan_error_message(
            "No organization was found for the UUID %s." % (org_uuid, ))
        return
    if not organization.scan_config:
        self.pubsub_manager.send_scan_error_message(
            "There was no scanning configuration associated with the organization %s."
            % (org_uuid, ))
        return
    self.db_session.execute("begin;")
    self.db_session.begin_nested()

    # Create the order

    admin_group = filter(lambda x: x.name == "org_admin",
                         organization.auth_groups)[0]
    admin_user = admin_group.users[0]
    new_order = Order.new(
        started_at=datetime.now(),
        user_email=admin_user.email,
        scoped_domains_count=0,
        scoped_endpoints_count=0,
        scoped_endpoints_size=0,
        has_been_placed=True,
        organization_id=org_uuid,
        user_id=admin_user.uuid,
    )
    self.db_session.add(new_order)
    print("ORDER IS %s (%s) " % (new_order, new_order.uuid))

    # Duplicate the ScanConfig and associate it with the order

    scan_config, new_models = organization.scan_config.duplicate()
    scan_config.order_id = new_order.uuid
    for new_model in new_models:
        self.db_session.add(new_model)

    # Process all of the targets for the order

    skipped_targets = []
    too_soon_targets = []
    domains = []
    networks = []

    for target in targets:
        target = target.strip()
        if RegexLib.domain_name_regex.match(target):
            domain_name = get_or_create_domain_name_for_organization(
                db_session=self.db_session,
                name=target,
                added_by="quick_scan",
                org_uuid=org_uuid,
                nest_transaction=True,
            )
            self.db_session.add(domain_name)
            time_since_scan = get_time_since_scanned(domain_name)
            if time_since_scan < config.task_domain_scanning_interval:
                too_soon_targets.append(target)
            else:
                new_order_domain = OrderDomainName.new(
                    name=target,
                    order_id=new_order.uuid,
                    domain_name_id=domain_name.uuid,
                )
                domains.append(new_order_domain)
        elif RegexLib.ipv4_cidr_regex.match(target):
            address, mask_length = target.split("/")
            mask_length = int(mask_length)
            network = get_or_create_network_for_organization(
                db_session=self.db_session,
                added_by="quick_scan",
                org_uuid=org_uuid,
                address=address,
                mask_length=mask_length,
                nest_transaction=True,
            )
            self.db_session.add(network)
            time_since_scan = get_time_since_scanned(network)
            if time_since_scan < config.task_network_scanning_interval:
                too_soon_targets.append(target)
            else:
                new_order_network = OrderNetwork.new(
                    network_cidr=target,
                    order_id=new_order.uuid,
                    network_id=network.uuid,
                )
                networks.append(new_order_network)
        elif RegexLib.ipv4_address_regex.match(target):
            network = get_or_create_network_for_organization(
                db_session=self.db_session,
                added_by="quick_scan",
                org_uuid=org_uuid,
                address=target,
                mask_length=32,
                nest_transaction=True,
            )
            self.db_session.add(network)
            time_since_scan = get_time_since_scanned(network)
            if time_since_scan < config.task_network_scanning_interval:
                too_soon_targets.append(target)
            else:
                new_order_network = OrderNetwork.new(
                    network_cidr=target,
                    order_id=new_order.uuid,
                    network_id=network.uuid,
                )
                networks.append(new_order_network)
        else:
            skipped_targets.append(target)

    # Check that everything is good to go and roll back if it's not

    total_count = len(domains) + len(networks)
    if total_count == 0:
        self.db_session.rollback()
        self.pubsub_manager.send_scan_error_message(
            "There were no targets defined for the scan.")
        return

    # Update all of the last scan times for the targets

    self.db_session.commit()
    self.db_session.execute("end;")
    for domain in domains:
        self.db_session.add(domain)
    for network in networks:
        self.db_session.add(network)
    self.db_session.commit()
    update_last_scanning_times_for_order(
        order_uuid=new_order.uuid,
        db_session=self.db_session,
        scan_time=DatetimeHelper.now(),
    )

    # Send success message to PubSub

    self.pubsub_manager.send_scan_success_message(
        org_uuid=org_uuid,
        order_uuid=str(new_order.uuid),
        skipped_targets=skipped_targets,
        too_soon_targets=too_soon_targets,
        domains=[x.name for x in domains],
        networks=[x.network_cidr for x in networks],
    )

    # Kick off the order

    handle_placed_order.delay(order_uuid=unicode(new_order.uuid))
コード例 #22
0
ファイル: base.py プロジェクト: aushack/ws-backend-community
def network_service_inspection_pass(self,
                                    service_uuid=None,
                                    org_uuid=None,
                                    schedule_again=True):
    """
    This task performs a single network service scan pass, doing all of the necessary things to
    check on the state of a network service.
    :param service_uuid: The UUID of the OrganizationNetworkService to monitor.
    :param org_uuid: The UUID of the organization to monitor the network service on behalf of.
    :param schedule_again: Whether or not to schedule another monitoring task.
    :return: None
    """
    logger.info(
        "Now starting pass for network service %s. Organization is %s." %
        (service_uuid, org_uuid))

    # TODO check to see if the network service has been dead for the past N times and don't requeue if it has

    should_scan = check_network_service_scanning_status(
        db_session=self.db_session, service_uuid=service_uuid)
    if not should_scan:
        logger.info(
            "Network service %s either is already being scanned or has been scanned too recently to continue now."
            % (service_uuid, ))
        return
    ip_address, port, protocol = self.get_endpoint_information(service_uuid)
    network_service_scan = create_new_network_service_scan(
        network_service_uuid=service_uuid,
        db_session=self.db_session,
    )
    task_signatures = []
    task_signatures.append(
        perform_network_service_inspection.si(
            org_uuid=org_uuid,
            service_uuid=service_uuid,
            scan_uuid=network_service_scan.uuid,
        ))
    task_signatures.append(
        analyze_network_service_scan.si(
            network_service_scan_uuid=network_service_scan.uuid,
            org_uuid=org_uuid,
        ))
    task_signatures.append(
        update_network_service_scan_elasticsearch.si(
            network_service_scan_uuid=network_service_scan.uuid,
            org_uuid=org_uuid,
            network_service_uuid=service_uuid,
        ))
    task_signatures.append(
        update_network_service_scan_completed.si(
            network_service_scan_uuid=network_service_scan.uuid,
            org_uuid=org_uuid,
            network_service_uuid=service_uuid,
        ))
    task_signatures.append(
        inspect_service_application.si(
            org_uuid=org_uuid,
            network_service_scan_uuid=network_service_scan.uuid,
            network_service_uuid=service_uuid,
        ))
    canvas_sig = chain(task_signatures)
    canvas_sig.apply_async()
    if not config.task_network_service_monitoring_enabled:
        logger.info(
            "Not scheduling another monitoring task as network service monitoring is disabled."
        )
    elif not schedule_again:
        logger.info(
            "Not scheduling another monitoring task as schedule_again was False."
        )
    else:
        scan_interval = get_network_service_scan_interval_for_organization(
            org_uuid=org_uuid,
            db_session=self.db_session,
        )
        next_time = DatetimeHelper.seconds_from_now(scan_interval)
        logger.info(
            "Queueing up an additional instance of %s in %s seconds (%s). Endpoint is %s:%s (%s)."
            %
            (self.name, scan_interval, next_time, ip_address, port, protocol))
        init_sig = network_service_inspection_pass.si(
            service_uuid=service_uuid,
            org_uuid=org_uuid,
            schedule_again=True,
        )
        init_sig.apply_async(eta=next_time)
コード例 #23
0
def check_tcp_service_for_liveness(
        self,
        org_uuid=None,
        do_fingerprinting=True,
        do_ssl_inspection=True,
        scan_uuid=None,
        service_uuid=None,
):
    """
    Check to see if the given TCP network service is alive.
    :param org_uuid: The UUID of the organization to check the service on behalf of.
    :param do_fingerprinting: Whether or not to continue with service fingerprinting.
    :param do_ssl_inspection: Whether or not to gather information about SSL if the service
    is alive and supports SSL.
    :param scan_uuid: The UUID of the network service scan that this liveness check is associated
    with.
    :param service_uuid: The UUID of the network service to check for liveness.
    :return: None
    """
    ip_address, port, protocol = self.get_endpoint_information(service_uuid)
    logger.info(
        "Checking to see if TCP service at %s:%s is alive for organization %s. Scan is %s."
        % (ip_address, port, org_uuid, scan_uuid)
    )
    inspector = PortInspector(address=ip_address, port=port, protocol="tcp")
    service_alive = inspector.check_if_open()
    liveness_check = NetworkServiceLivenessModel.from_database_model_uuid(
        uuid=scan_uuid,
        db_session=self.db_session,
        is_alive=service_alive,
        checked_at=DatetimeHelper.now(),
    )
    liveness_check.save(org_uuid)
    logger.info(
        "TCP service at %s:%s %s alive. Updated Elasticsearch successfully."
        % (ip_address, port, "is" if service_alive else "is not")
    )
    next_tasks = []
    if not service_alive:
        logger.info(
            "TCP service at %s:%s was not alive. Not performing any additional inspection."
            % (ip_address, port)
        )
        return
    if do_ssl_inspection:
        next_tasks.append(inspect_tcp_service_for_ssl_support.si(
            org_uuid=org_uuid,
            network_service_uuid=service_uuid,
            network_service_scan_uuid=scan_uuid,
        ))
    if do_fingerprinting:
        next_tasks.append(fingerprint_tcp_service.si(
            org_uuid=org_uuid,
            service_uuid=service_uuid,
            scan_uuid=scan_uuid,
        ))
    if len(next_tasks) > 0:
        logger.info(
            "Kicking off %s tasks to continue investigation of TCP service at %s:%s for organization %s."
            % (len(next_tasks), ip_address, port, org_uuid)
        )
        canvas_sig = chain(next_tasks)
        self.finish_after(signature=canvas_sig)
    else:
        logger.info(
            "No further tasks to be performed after TCP liveness check at %s:%s for organization %s."
            % (ip_address, port, org_uuid)
        )
コード例 #24
0
 def _populate_dummy(cls, to_populate):
     from lib import RandomHelper, DatetimeHelper, WsFaker
     to_populate.is_alive = RandomHelper.flip_coin()
     to_populate.checked_at = DatetimeHelper.now()
     to_populate.liveness_cause = WsFaker.get_word()
     return to_populate
コード例 #25
0
 def _populate_dummy(cls, to_populate):
     from lib import RandomHelper, DatetimeHelper
     to_populate.is_alive = RandomHelper.flip_coin()
     to_populate.checked_at = DatetimeHelper.now()
     return to_populate
コード例 #26
0
 def __init__(self, org_uuid=None, org_name=None, *args, **kwargs):
     super(UserOrganizationSelectModel, self).__init__(*args, **kwargs)
     self.org_uuid = org_uuid
     self.org_name = org_name
     self.selected_at = DatetimeHelper.now()
コード例 #27
0
 def __init__(self, **kwargs):
     kwargs["value"] = DatetimeHelper.now()
     super(NowUpdateElasticsearchScript, self).__init__(**kwargs)
コード例 #28
0
 def _populate_dummy(cls, to_populate):
     from lib import WsFaker, RandomHelper
     to_populate.uses_wordpress = RandomHelper.flip_coin()
     to_populate.uses_iss = RandomHelper.flip_coin()
     to_populate.uses_apache = RandomHelper.flip_coin()
     to_populate.uses_nginx = RandomHelper.flip_coin()
     to_populate.total_header_count = WsFaker.get_random_int(minimum=1,
                                                             maximum=500)
     to_populate.unique_header_count = WsFaker.get_random_int(minimum=1,
                                                              maximum=500)
     to_populate.server_headers = WsFaker.get_server_header_values()
     to_populate.transactions_count = WsFaker.get_random_int(minimum=1,
                                                             maximum=500)
     to_populate.ok_count = WsFaker.get_random_int(minimum=1, maximum=500)
     to_populate.has_ok = to_populate.ok_count > 0
     to_populate.redirect_count = WsFaker.get_random_int(minimum=1,
                                                         maximum=500)
     to_populate.has_redirect = to_populate.redirect_count > 0
     to_populate.client_error_count = WsFaker.get_random_int(minimum=1,
                                                             maximum=500)
     to_populate.has_client_error = to_populate.client_error_count > 0
     to_populate.server_error_count = WsFaker.get_random_int(minimum=1,
                                                             maximum=500)
     to_populate.has_server_error = to_populate.server_error_count > 0
     to_populate.total_resource_size = WsFaker.get_random_int(
         minimum=100000, maximum=500000)
     to_populate.uses_tomcat_management_portal = RandomHelper.flip_coin()
     to_populate.has_screenshots = RandomHelper.flip_coin()
     to_populate.screenshots_count = WsFaker.get_random_int(minimum=1,
                                                            maximum=10)
     to_populate.main_screenshot_s3_bucket = WsFaker.get_s3_bucket()
     to_populate.main_screenshot_s3_key = WsFaker.get_s3_key()
     to_populate.response_count = WsFaker.get_random_int(minimum=1,
                                                         maximum=10)
     to_populate.redirect_301_count = WsFaker.get_random_int(minimum=1,
                                                             maximum=10)
     to_populate.redirect_302_count = WsFaker.get_random_int(minimum=1,
                                                             maximum=10)
     to_populate.all_responses_redirects = RandomHelper.flip_coin()
     to_populate.all_responses_server_errors = RandomHelper.flip_coin()
     to_populate.all_responses_client_errors = RandomHelper.flip_coin()
     to_populate.response_statuses = WsFaker.get_http_response_statuses()
     to_populate.hostname_resolves = RandomHelper.flip_coin()
     to_populate.resolved_ip_matches_hostname = RandomHelper.flip_coin()
     to_populate.response_content_types = WsFaker.get_response_content_types(
     )
     to_populate.www_authenticate_headers = WsFaker.get_words()
     to_populate.has_www_authenticate_headers = RandomHelper.flip_coin()
     to_populate.has_basic_auth = RandomHelper.flip_coin()
     to_populate.has_digest_auth = RandomHelper.flip_coin()
     to_populate.has_ntlm_auth = RandomHelper.flip_coin()
     to_populate.basic_auth_realms = WsFaker.get_words()
     to_populate.has_server_headers = RandomHelper.flip_coin()
     to_populate.has_multiple_server_headers = RandomHelper.flip_coin()
     to_populate.all_responses_not_found = RandomHelper.flip_coin()
     to_populate.resolved_ip_address = WsFaker.get_ipv4_address()
     to_populate.ssl_certificate_cname = WsFaker.get_domain_name()
     to_populate.ssl_certificate_expired = RandomHelper.flip_coin()
     to_populate.ssl_certificate_is_valid = RandomHelper.flip_coin()
     to_populate.ssl_certificate_start_time = WsFaker.get_time_in_past()
     to_populate.ssl_certificate_invalid_time = WsFaker.get_time_in_future()
     to_populate.scan_completed_at = DatetimeHelper.now()
     to_populate.hostname_is_ip_address = RandomHelper.flip_coin()
     to_populate.has_ssl_certificate_data = RandomHelper.flip_coin()
     to_populate.ssl_certificate_md5_digest = WsFaker.get_md5_string()
     to_populate.open_ports = WsFaker.get_web_app_open_ports()
     to_populate.landing_header_redirect_location = WsFaker.get_url()
     to_populate.landing_meta_refresh_location = WsFaker.get_url()
     to_populate.landing_response_status = WsFaker.get_http_response_status(
     )
     to_populate.landing_title = " ".join(WsFaker.get_words())
     to_populate.local_login_form_count = WsFaker.get_random_int()
     to_populate.local_login_form_https_count = WsFaker.get_random_int()
     to_populate.remote_login_form_count = WsFaker.get_random_int()
     to_populate.remote_login_form_https_count = WsFaker.get_random_int()
     to_populate.user_agent_fingerprints = WsFaker.get_user_agent_fingerprints(
     )
     return to_populate
コード例 #29
0
ファイル: base.py プロジェクト: aushack/ws-backend-community
 def __init__(self, created=None):
     self._id = None
     if created is None:
         created = DatetimeHelper.now()
     self.created = created
コード例 #30
0
 def _to_es_model(self):
     from wselasticsearch.models import WebServiceReportModel
     to_return = WebServiceReportModel(
         uses_wordpress=self.uses_wordpress,
         uses_iis=self.uses_iis,
         uses_apache=self.uses_apache,
         uses_nginx=self.uses_nginx,
         uses_tomcat_management_portal=self.uses_tomcat_management_portal,
         total_header_count=self.response_header_count,
         unique_header_count=self.unique_header_count,
         transactions_count=self.transactions_count,
         ok_count=self.ok_count,
         has_ok=self.ok_count > 0,
         redirect_count=self.redirect_count,
         has_redirect=self.redirect_count > 0,
         client_error_count=self.client_error_count,
         has_client_error=self.client_error_count > 0,
         server_error_count=self.server_error_count,
         has_server_error=self.server_error_count > 0,
         total_resource_size=self.total_resource_size,
         server_headers=self.server_headers,
         has_screenshots=self.has_screenshots,
         screenshots_count=self.screenshots_count,
         main_screenshot_s3_bucket=self.main_screenshot["s3_bucket"]
         if self.main_screenshot else None,
         main_screenshot_s3_key=self.main_screenshot["s3_key"]
         if self.main_screenshot else None,
         response_count=self.response_count,
         redirect_301_count=self.redirect_301_count,
         redirect_302_count=self.redirect_302_count,
         all_responses_redirects=self.all_responses_redirects,
         all_responses_server_errors=self.all_responses_server_errors,
         all_responses_client_errors=self.all_responses_client_errors,
         response_statuses=self.response_statuses,
         hostname_resolves=self.hostname_resolves,
         resolved_ip_matches_hostname=self.resolved_ip_matches_hostname,
         response_content_types=self.response_content_types,
         www_authenticate_headers=self.www_authenticate_headers,
         has_www_authenticate_headers=self.has_www_authenticate_headers,
         has_basic_auth=self.has_basic_auth,
         has_digest_auth=self.has_digest_auth,
         has_ntlm_auth=self.has_ntlm_auth,
         basic_auth_realms=self.basic_auth_realms,
         has_server_headers=self.has_server_headers,
         has_multiple_server_headers=self.has_multiple_server_headers,
         all_responses_not_found=self.all_responses_not_found,
         resolved_ip_address=self.resolved_ip_address,
         scan_completed_at=DatetimeHelper.now(),
         hostname_is_ip_address=self.hostname_is_ip_address,
         open_ports=self.open_ports,
         landing_header_redirect_location=self.
         landing_header_redirect_location,
         landing_meta_refresh_location=self.landing_meta_refresh_location,
         landing_response_status=self.landing_response_status,
         landing_title=self.landing_title,
         local_login_form_count=self.local_login_form_count,
         local_login_form_https_count=self.local_login_form_https_count,
         remote_login_form_count=self.remote_login_form_count,
         remote_login_form_https_count=self.remote_login_form_https_count,
         user_agent_fingerprints=self.user_agent_fingerprints,
     )
     to_return.populate_from_ssl_support(self.ssl_support_report)
     return to_return