Exemplo n.º 1
0
def start_scan(auth_token, datasource_id, domain_id, user_email):
    db_session = db_connection().get_session()
    scanner_types = [
        slack_constants.ScannerTypes.APPS.value,
        slack_constants.ScannerTypes.USERS.value,
        slack_constants.ScannerTypes.CHANNELS.value
    ]
    for scanner_type in scanner_types:
        scanner = DatasourceScanners()
        scanner.datasource_id = datasource_id
        scanner.scanner_type = scanner_type
        scanner.channel_id = str(uuid.uuid4())
        scanner.user_email = user_email
        scanner.started_at = datetime.utcnow()
        scanner.in_progress = 1
        db_session.add(scanner)
        db_connection().commit()
        query_params = {
            "dataSourceId": datasource_id,
            "domainId": domain_id,
            "scannerId": scanner.id,
            "change_type": slack_constants.AppChangedTypes.ADDED.value
        }
        messaging.trigger_get_event(urls.SCAN_SLACK_ENTITIES, auth_token,
                                    query_params, "slack")
Exemplo n.º 2
0
def post_process(db_session, auth_token, query_params):
    if query_params[
            "change_type"] != slack_constants.AppChangedTypes.REMOVED.value:
        query_params[
            "change_type"] = slack_constants.AppChangedTypes.REMOVED.value
        messaging.trigger_get_event(urls.SCAN_SLACK_ENTITIES, auth_token,
                                    query_params, "slack")
Exemplo n.º 3
0
def post_process(db_session, auth_token, query_params):
    domain_id = query_params["domainId"]
    datasource_id = query_params["dataSourceId"]
    now = datetime.utcnow()
    internal_users = db_session.query(DomainUser).filter(
        and_(
            DomainUser.datasource_id == datasource_id,
            DomainUser.type == constants.DirectoryEntityType.USER.value,
            DomainUser.member_type ==
            constants.EntityExposureType.INTERNAL.value)).all()
    for internal_user in internal_users:
        scanner = DatasourceScanners()
        scanner.datasource_id = datasource_id
        scanner.scanner_type = slack_constants.ScannerTypes.FILES.value
        scanner.channel_id = str(uuid.uuid4())
        scanner.user_email = internal_user.email
        scanner.started_at = now
        scanner.in_progress = 1
        db_session.add(scanner)
        db_connection().commit()
        file_query_params = {
            'domainId': domain_id,
            'dataSourceId': datasource_id,
            'scannerId': str(scanner.id),
            'userId': internal_user.user_id,
            'userEmail': internal_user.email
        }
        messaging.trigger_get_event(urls.SCAN_SLACK_ENTITIES, auth_token,
                                    file_query_params, "slack")
Exemplo n.º 4
0
def create_datasource(auth_token, access_token, scope, user_email):
    db_session = db_connection().get_session()
    login_user = db_session.query(LoginUser).filter(LoginUser.auth_token == auth_token).first()
    datasource = DataSource()
    datasource.datasource_id = str(uuid.uuid4())
    datasource.domain_id = login_user.domain_id
    datasource.display_name = login_user.domain_id
    datasource.creation_time = datetime.datetime.utcnow()
    datasource.datasource_type = constants.ConnectorTypes.GITHUB.value
    datasource.is_push_notifications_enabled = 0

    db_session.add(datasource)
    db_connection().commit()

    github_domain_id = user_email.split('@')[1] if user_email else login_user.domain_id
    datasource_credentials = DatasourceCredentials()
    datasource_credentials.datasource_id = datasource.datasource_id
    datasource_credentials.created_user = user_email
    datasource_credentials.credentials = json.dumps({ 'domain_id': github_domain_id, 'authorize_scope_name': scope, 'token': access_token })
    
    db_session.add(datasource_credentials)
    db_connection().commit()

    query_params = {"domainId": github_domain_id,
                            "dataSourceId": datasource.datasource_id,
                            "userEmail": login_user.email}
    messaging.trigger_get_event(urls.GITHUB_SCAN_UPDATE, auth_token, query_params, "github")
    
Exemplo n.º 5
0
def get_accesslogs(datasource_id, page_num=1):
    db_session = db_connection().get_session()
    slack_client = get_slack_client(datasource_id)
    login_user_list = slack_client.api_call("team.accessLogs",
                                            count=100,
                                            page=page_num)

    is_login_user_list = True if login_user_list['ok'] == True else False
    if is_login_user_list:
        current_page = login_user_list['page']
        total_pages = login_user_list['paging']['pages']
        logins = login_user_list['logins']
        for user in logins:
            last_login = datetime.datetime.fromtimestamp(user['date_last'])
            user_id = user['user_id']
            db_session.query(DomainUser).filter(
                and_(DomainUser.datasource_id == datasource_id, DomainUser.user_id == user_id)). \
                update({DomainUser.last_login_time: last_login})
        db_connection().commit()

        if current_page != total_pages:
            query_param = {
                'datasource_id': datasource_id,
                'page_num': current_page + 1
            }
            messaging.trigger_get_event(urls.SLACK_ACCESSLOGS,
                                        constants.INTERNAL_SECRET, query_param,
                                        constants.ConnectorTypes.SLACK.value)
            return constants.ACCEPTED_STATUS_CODE

    return constants.SUCCESS_STATUS_CODE
Exemplo n.º 6
0
def create_datasource(auth_token, access_token, scopes, team_id, domain, email_domain_id):
    now = datetime.datetime.utcnow()
    db_session = db_connection().get_session()
    login_user = db_session.query(LoginUser).filter(LoginUser.auth_token == auth_token).first()
    datasource_id = str(uuid.uuid4())
    datasource = DataSource()
    datasource.domain_id = login_user.domain_id
    datasource.datasource_id = datasource_id
    datasource.display_name = domain
    datasource.creation_time = now
    datasource.is_push_notifications_enabled = 0
    datasource.datasource_type = constants.ConnectorTypes.SLACK.value
    datasource.source_id = team_id
    db_session.add(datasource)
    db_connection().commit()

    datasource_credentials = DatasourceCredentials()
    datasource_credentials.datasource_id = datasource.datasource_id
    datasource_credentials.credentials = json.dumps(
        {'team_id': team_id, 'domain_id': email_domain_id, 'domain_name': domain, 'authorize_scope_name': scopes,
         'token': access_token})
    datasource_credentials.created_user = login_user.email
    db_session.add(datasource_credentials)
    db_connection().commit()

    query_params = {"domainId": email_domain_id,
                            "dataSourceId": datasource.datasource_id,
                            "userEmail": login_user.email}
    messaging.trigger_get_event(urls.SCAN_SLACK_UPDATE, auth_token, query_params, "slack")
            
    return datasource
Exemplo n.º 7
0
def request_scanner_data(auth_token, query_params):
    #try:
    datasource_id = query_params["dataSourceId"]
    scanner_id = query_params["scannerId"]

    db_session = db_connection().get_session()
    scanner = db_session.query(DatasourceScanners).filter(
        and_(DatasourceScanners.datasource_id == datasource_id,
             DatasourceScanners.id == scanner_id)).first()
    if not scanner:
        return

    response = get_scanner_processor(scanner.scanner_type).query(
        auth_token, query_params, scanner)
    next_page_token = response["nextPageNumber"]
    if next_page_token:
        scanner.page_token = str(next_page_token)
        query_params["nextPageNumber"] = scanner.page_token
        messaging.trigger_get_event(urls.SCAN_SLACK_ENTITIES, auth_token,
                                    query_params, "slack")
    else:
        scanner.page_token = ""

    entities_list = response["payload"]
    fetched_entities_count = len(entities_list)

    in_progress = 0 if fetched_entities_count < 1 else 1
    db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \
            update({DatasourceScanners.total_count: DatasourceScanners.total_count + fetched_entities_count,
            DatasourceScanners.query_status: DatasourceScanners.query_status + 1})

    if in_progress == 0:
        db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \
            update({DatasourceScanners.in_progress: in_progress})
        db_connection().commit()
        messaging.trigger_post_event(urls.SCAN_SLACK_UPDATE, auth_token,
                                     query_params, {}, "slack")
        return

    datasource_metric_column = get_datasource_column(scanner.scanner_type)
    if datasource_metric_column:
        db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id). \
                update({datasource_metric_column: datasource_metric_column + fetched_entities_count})
    db_connection().commit()
    #datasource = db_session.query(DataSource).filter(and_(DataSource.datasource_id == datasource_id, DataSource.is_async_delete == False)).first()
    #messaging.send_push_notification("adya-scan-update", json.dumps(datasource, cls=alchemy_encoder()))
    #db_connection().close_connection()
    sent_member_count = 0
    while sent_member_count < fetched_entities_count:
        scanner_data = {}
        scanner_data["entities"] = entities_list[
            sent_member_count:sent_member_count + 30]
        #If this is the last set of users, in the process call, send the next page number as empty
        if fetched_entities_count - sent_member_count <= 30 and not scanner.page_token:
            query_params["nextPageNumber"] = ""
        messaging.trigger_post_event(urls.SCAN_SLACK_ENTITIES, auth_token,
                                     query_params, scanner_data, "slack")
        sent_member_count += 30
Exemplo n.º 8
0
def request_scanner_data(auth_token, query_params):
    datasource_id = query_params["dataSourceId"]
    scanner_id = query_params["scannerId"]

    db_session = db_connection().get_session()
    scanner = db_session.query(DatasourceScanners).filter(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id).first()
    if not scanner:
        return
    
    response = None
    try:
        response = get_scanner_processor(scanner.scanner_type).query(auth_token, query_params, scanner)
    except Exception as ex:
        Logger().exception("Exception occurred while querying scan data for - {} - {} ".format(query_params, ex))
        db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \
            update({DatasourceScanners.in_progress: 0})
        db_connection().commit()
        return
    next_page_token = response["nextPageNumber"] if "nextPageNumber" in response else None
    if next_page_token:
        scanner.next_page_token = str(next_page_token)
        query_params["nextPageNumber"] = scanner.next_page_token
        messaging.trigger_get_event(urls.GITHUB_SCAN_ENTITIES, auth_token, query_params, "github")
    else:
        scanner.next_page_token = ""

    entities_list = response["payload"]
    fetched_entities_count = len(entities_list)

    in_progress = 0 if fetched_entities_count < 1 else 1
    db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \
            update({DatasourceScanners.total_count: DatasourceScanners.total_count + fetched_entities_count, 
            DatasourceScanners.query_status: DatasourceScanners.query_status + 1})
    
    if in_progress == 0:
        db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \
            update({DatasourceScanners.in_progress: in_progress})
        db_connection().commit()
        messaging.trigger_post_event(urls.GITHUB_SCAN_UPDATE, auth_token, query_params, {}, "github")
        return
    
    datasource_metric_column = get_datasource_column(scanner.scanner_type)
    if datasource_metric_column:
        db_session.query(DataSource).filter(DataSource.datasource_id == datasource_id). \
                update({datasource_metric_column: datasource_metric_column + fetched_entities_count})
    db_connection().commit()
    sent_member_count = 0
    batch_size = response["batchSize"] if "batchSize" in response else fetched_entities_count
    while sent_member_count < fetched_entities_count:
        scanner_data = {}
        scanner_data["entities"] = entities_list[sent_member_count:sent_member_count + batch_size]
        #If this is the last set of users, in the process call, send the next page number as empty
        if fetched_entities_count - sent_member_count <= batch_size and not scanner.next_page_token:
            query_params["nextPageNumber"] = ""
        messaging.trigger_post_event(urls.GITHUB_SCAN_ENTITIES, auth_token, query_params, scanner_data, "github")
        sent_member_count += batch_size
Exemplo n.º 9
0
def start_scan(auth_token, datasource_id, domain_id, user_email):
    db_session = db_connection().get_session()
    scanner_types = [gsuite_constants.ScannerTypes.USERS.value,
                gsuite_constants.ScannerTypes.GROUPS.value]
    for scanner_type in scanner_types:
        scanner = DatasourceScanners()
        scanner.datasource_id = datasource_id
        scanner.scanner_type = scanner_type
        scanner.channel_id = str(uuid.uuid4())
        scanner.user_email = user_email
        scanner.started_at = datetime.utcnow()
        scanner.in_progress = 1
        db_session.add(scanner)
        db_connection().commit()
        query_params = {"dataSourceId": datasource_id, "domainId": domain_id, "scannerId": scanner.id }
        messaging.trigger_get_event(urls.SCAN_GSUITE_ENTITIES, auth_token, query_params, "gsuite")
Exemplo n.º 10
0
def request_scanner_data(auth_token, query_params):
    #try:
    datasource_id = query_params["dataSourceId"]
    scanner_id = query_params["scannerId"]
    
    db_session = db_connection().get_session()
    scanner = db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)).first()
    if not scanner:
        return
    
    response = None
    try:
        response = get_scanner_processor(scanner.scanner_type).query(auth_token, query_params, scanner)
    except Exception as ex:
        Logger().exception("Exception occurred while querying scan data for - {} - {} ".format(query_params, ex))
        db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \
            update({DatasourceScanners.in_progress: 0})
        db_connection().commit()
        return

    next_page_token = response["nextPageNumber"] if "nextPageNumber" in response else ""
    scanner.page_token = str(next_page_token)
    query_params["nextPageNumber"] = next_page_token

    entities_list = response["payload"]
    fetched_entities_count = len(entities_list)

    in_progress = 0 if fetched_entities_count < 1 else 1
    db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \
            update({DatasourceScanners.total_count: DatasourceScanners.total_count + fetched_entities_count, 
            DatasourceScanners.query_status: DatasourceScanners.query_status + 1})
    
    if in_progress == 0:
        db_session.query(DatasourceScanners).filter(and_(DatasourceScanners.datasource_id == datasource_id, DatasourceScanners.id == scanner_id)). \
            update({DatasourceScanners.in_progress: in_progress})
        update_scan(auth_token, datasource_id, db_session)
        db_connection().commit()
        #messaging.trigger_post_event(urls.SCAN_GSUITE_UPDATE, auth_token, query_params, {}, "gsuite")
        return
    
    db_connection().commit()
    
    process_scanner_data(db_session, scanner, auth_token, query_params, {"entities": entities_list})
    if next_page_token:
        messaging.trigger_get_event(urls.SCAN_GSUITE_ENTITIES, auth_token, query_params, "gsuite")
Exemplo n.º 11
0
def handle_channel_expiration(page_num):
    page_num = page_num if page_num else 0
    db_session = db_connection().get_session()
    subscription_list = db_session.query(PushNotificationsSubscription).offset(
        page_num * 50).limit(50).all()
    for row in subscription_list:
        access_time = datetime.datetime.utcnow()

        #If the subscription is not yet expired and expiry is more than 6 hours, dont resubscribe
        #It will happen in the next 6 hourly check
        if row.expire_at and row.expire_at > access_time and row.expire_at > (
                access_time + timedelta(seconds=21600)):
            continue

        #If the subscription is not yet expired and is going to expire in next 6 hours, then first unsubscribe
        # if row.expire_at and row.expire_at > access_time and row.expire_at < (access_time + timedelta(seconds=21600)):
        #     unsubscribe_subscription(row)

        #If subscription is in progress, then dont renew it in this cycle
        if row.in_progress:
            continue

        if row.notification_type == constants.GSuiteNotificationType.DRIVE_CHANGE.value:
            auth_token = None
            if not row.drive_root_id == "SVC":
                user = db_session.query(LoginUser).filter(
                    and_(LoginUser.domain_id == row.domain_id,
                         LoginUser.email == row.user_email)).first()
                auth_token = user.auth_token
            row.channel_id = str(uuid.uuid4())
            _subscribe_for_drive_change(db_session, auth_token, row, False)
        else:
            row.channel_id = str(uuid.uuid4())
            _subscribe_for_activity(db_session, row, False)

        db_connection().commit()

    #If there are more subscriptions, call the api again with next page number
    if len(subscription_list) == 50:
        messaging.trigger_get_event(urls.HANDLE_GDRIVE_CHANNEL_EXPIRATION_PATH,
                                    constants.INTERNAL_SECRET,
                                    {"page_num": page_num + 1}, "gsuite")

    return "Subscription renewal completed"
Exemplo n.º 12
0
def receive_notifications(payload):
    callback_type = payload["type"]
    if callback_type == "url_verification":
        challenge_token = payload["challenge"]
        return {"challenge": challenge_token}

    elif callback_type == "event_callback":
        event_type = payload["event"]["type"]
        handler = get_handler(event_type)
        handler.process_activity(payload)

    team_id = payload["team_id"] if 'team_id' in payload else None
    if team_id:
        db_session = db_connection().get_session()
        slack_datasource = db_session.query(DataSource).filter(
            and_(
                DataSource.source_id == team_id, DataSource.datasource_type ==
                constants.ConnectorTypes.SLACK.value)).first()

        if slack_datasource:
            datasource_scanner = db_session.query(DatasourceScanners).filter(
                and_(
                    DatasourceScanners.scanner_type ==
                    slack_constants.ScannerTypes.USERS.value,
                    DatasourceScanners.datasource_id ==
                    slack_datasource.datasource_id)).first()
            last_updated_at = datasource_scanner.updated_at
            six_hours_ago = datetime.datetime.utcnow() - datetime.timedelta(
                hours=6)
            if last_updated_at < six_hours_ago:
                datasource_scanner.updated_at = datetime.datetime.utcnow()
                query_param = {
                    'datasource_id': datasource_scanner.datasource_id
                }
                messaging.trigger_get_event(
                    urls.SLACK_ACCESSLOGS, constants.INTERNAL_SECRET,
                    query_param, constants.ConnectorTypes.SLACK.value)

            db_connection().commit()
Exemplo n.º 13
0
def create_datasource(auth_token, payload):
    datasource_id = str(uuid.uuid4())
    db_session = db_connection().get_session()

    existing_user = db_session.query(LoginUser).filter(
        LoginUser.auth_token == auth_token).first()
    if existing_user:
        datasource = DataSource()
        datasource.is_push_notifications_enabled = 0
        datasource.domain_id = existing_user.domain_id
        datasource.datasource_id = datasource_id
        datasource.is_dummy_datasource = True if payload.get(
            "isDummyDatasource") else False

        #datasource.display_name = payload["display_name"]
        datasource.display_name = datasource.domain_id
        # we are fixing the datasoure type this can be obtained from the frontend
        datasource.datasource_type = "GSUITE"
        datasource.creation_time = datetime.datetime.utcnow()
        if datasource.is_dummy_datasource:
            datasource.is_serviceaccount_enabled = False
        else:
            datasource.is_serviceaccount_enabled = existing_user.is_serviceaccount_enabled

        admin_response = gutils.check_if_user_isadmin(
            auth_token, existing_user.email, db_session)
        is_admin_user = False

        #If service account is enabled, non admin cannot create a data source
        if(datasource.is_serviceaccount_enabled and admin_response):
            raise Exception(
                 admin_response + " Action not allowed.")
        if not admin_response:
            is_admin_user = True

        if not is_admin_user:
            datasource.user_scan_status = 1
            datasource.group_scan_status = 1

        if is_admin_user and not datasource.is_serviceaccount_enabled:
            # Since it is an admin user, update the domain name in domain table to strip off the full email
            domain_name = utils.get_domain_name_from_email(existing_user.email)
            db_session.query(Domain).filter(Domain.domain_id == existing_user.domain_id).update(
                {"domain_name": domain_name})

        db_session.add(datasource)
        db_connection().commit()
        if datasource.is_dummy_datasource:
            create_dummy_datasource(
                db_session, existing_user.domain_id, datasource_id)
        else:
            Logger().info("Starting the scan")
            query_params = { "domainId": datasource.domain_id,
                            "dataSourceId": datasource.datasource_id,
                            "userEmail": existing_user.email
                           }
            messaging.trigger_get_event(urls.SCAN_GSUITE_UPDATE, auth_token, query_params, "gsuite")
            
    
        return datasource
    else:
        return None
Exemplo n.º 14
0
def process(db_session, auth_token, query_params, scanner_data):
    domain_id = query_params["domainId"]
    datasource_id = query_params["dataSourceId"]
    groups_db_insert_data_dic = []
    group_email_list = []
    scanners_list = []
    scanner_channel_ids = []
    group_count = 0
    now = datetime.datetime.utcnow()
    for group_data in scanner_data["entities"]:
        group_count = group_count + 1
        group = {}
        group["datasource_id"] = datasource_id
        group["user_id"] = group_data["id"]
        groupemail = group_data["email"]
        group["email"] = groupemail
        group["full_name"] = group_data["name"]
        #group["direct_members_count"] = group_data["directMembersCount"]
        group["description"] = group_data.get('description')
        group_aliases = group_data.get('aliases')
        if group_aliases:
            group["aliases"] = ",".join(group_aliases)
        group["member_type"] = constants.EntityExposureType.INTERNAL.value
        group["type"] = constants.DirectoryEntityType.GROUP.value
        group_email_list.append(groupemail)
        groups_db_insert_data_dic.append(group)

        channel_id = str(uuid.uuid4())
        scanner = {}
        scanner["datasource_id"] = datasource_id
        scanner["scanner_type"] = gsuite_constants.ScannerTypes.MEMBERS.value
        scanner["channel_id"] = channel_id
        scanner["user_email"] = groupemail
        scanner["started_at"] = now
        scanner["in_progress"] = 1
        scanners_list.append(scanner)
        scanner_channel_ids.append(channel_id)

    try:
        db_session.bulk_insert_mappings(models.DomainUser,
                                        groups_db_insert_data_dic)
        db_session.bulk_insert_mappings(models.DatasourceScanners,
                                        scanners_list)
        db_connection().commit()

        for scanner in db_session.query(DatasourceScanners).filter(
                and_(DatasourceScanners.datasource_id == datasource_id,
                     DatasourceScanners.channel_id.in_(
                         scanner_channel_ids))).all():
            query_params = {
                'domainId': domain_id,
                'dataSourceId': datasource_id,
                'scannerId': str(scanner.id),
                'groupEmail': scanner.user_email
            }
            messaging.trigger_get_event(urls.SCAN_GSUITE_ENTITIES, auth_token,
                                        query_params, "gsuite")

        return group_count
    except Exception as ex:
        Logger().exception(
            "Exception occurred while processing google directory groups for domain_id: {} - {}"
            .format(domain_id, ex))
        db_session.rollback()
        return 0
Exemplo n.º 15
0
def process(db_session, auth_token, query_params, scanner_data):
    domain_id = query_params["domainId"]
    datasource_id = query_params["dataSourceId"]

    user_db_insert_data_dic = []
    db_session = db_connection().get_session()
    scanners_list = []
    scanner_channel_ids = []
    user_count = 0
    now = datetime.datetime.utcnow()
    for user_data in scanner_data["entities"]:
        user_count = user_count + 1
        user_email = user_data.get("primaryEmail")
        names = user_data["name"]
        user = {}
        user["datasource_id"] = datasource_id
        user["email"] = user_email
        user["first_name"] = names.get("givenName")
        user["last_name"] = names.get("familyName")
        user["full_name"] = names.get("fullName")
        user["is_admin"] = user_data.get("isAdmin")
        user["creation_time"] = user_data["creationTime"][:-1]
        user["is_suspended"] = user_data.get("suspended")
        user["user_id"] = user_data["id"]
        user["photo_url"] = user_data.get("thumbnailPhotoUrl")
        aliases = user_data.get("aliases")
        user["customer_id"] = user_data.get("customerId")
        user["type"] = constants.DirectoryEntityType.USER.value
        user["last_login_time"] = user_data["lastLoginTime"][:-1]
        if aliases:
            user["aliases"] = ",".join(aliases)
        user["member_type"] = constants.EntityExposureType.INTERNAL.value
        user_db_insert_data_dic.append(user)

        channel_id = str(uuid.uuid4())
        file_scanner = {}
        file_scanner["datasource_id"] = datasource_id
        file_scanner[
            "scanner_type"] = gsuite_constants.ScannerTypes.FILES.value
        file_scanner["channel_id"] = channel_id
        file_scanner["user_email"] = user_email
        file_scanner["started_at"] = now
        file_scanner["in_progress"] = 1
        scanners_list.append(file_scanner)

        app_scanner = {}
        app_scanner["datasource_id"] = datasource_id
        app_scanner["scanner_type"] = gsuite_constants.ScannerTypes.APPS.value
        app_scanner["channel_id"] = channel_id
        app_scanner["user_email"] = user_email
        app_scanner["started_at"] = now
        app_scanner["in_progress"] = 1
        scanners_list.append(app_scanner)
        scanner_channel_ids.append(channel_id)

    try:
        db_session.bulk_insert_mappings(models.DomainUser,
                                        user_db_insert_data_dic)
        db_session.bulk_insert_mappings(models.DatasourceScanners,
                                        scanners_list)
        db_connection().commit()

        for scanner in db_session.query(DatasourceScanners).filter(
                and_(DatasourceScanners.datasource_id == datasource_id,
                     DatasourceScanners.channel_id.in_(
                         scanner_channel_ids))).all():
            Logger().info("AdyaUserScan - Starting for user - {}".format(
                scanner.user_email))
            file_query_params = {
                'domainId': domain_id,
                'dataSourceId': datasource_id,
                'scannerId': str(scanner.id),
                'userEmail': scanner.user_email,
                'ownerEmail': scanner.user_email
            }
            messaging.trigger_get_event(urls.SCAN_GSUITE_ENTITIES, auth_token,
                                        file_query_params, "gsuite")
            Logger().info(
                "AdyaUserScan - Triggerred the files scanner for user - {}".
                format(user_email))

        return user_count

    except Exception as ex:
        Logger().exception(
            "Exception occurred while processing google directory users for domain_id: {} - {} "
            .format(domain_id, ex))
        db_session.rollback()
        return 0
Exemplo n.º 16
0
def process(db_session, auth_token, query_params, scanner_data):
    #Process the repositories and organisations and initiate members scan here
    all_repos = scanner_data["entities"]
    datasource_id = query_params["dataSourceId"]
    domain_id = query_params["domainId"]

    #Update the Resource table and ResourcePermission table with the repository entries
    repo_list = []
    processed_repo_count = 0

    for repo in all_repos:

        owner_email = ''
        repo_owner_obj = repo["owner"]
        # print repo_owner_obj
        if "email" in repo_owner_obj:
            owner_email = repo_owner_obj["email"]
        else:
            owner_email = github_utils.get_default_github_email(
                repo_owner_obj["id"], repo_owner_obj["login"])
        repo_dict = {}
        repo_dict["datasource_id"] = datasource_id
        repo_dict["resource_id"] = repo["id"]
        repo_dict["resource_name"] = repo["full_name"]
        repo_dict["resource_size"] = repo["size"]
        repo_dict["last_modified_time"] = datetime.datetime.strptime(
            repo["pushed_at"], "%Y-%m-%dT%H:%M:%SZ")
        repo_dict["creation_time"] = datetime.datetime.strptime(
            repo["created_at"], "%Y-%m-%dT%H:%M:%SZ")
        repo_dict["description"] = repo["description"]
        #TODO: If parent is available, then store parent also
        repo_dict["parent_id"] = repo["parent"]["id"] if repo["fork"] else None
        repo_dict["resource_owner_id"] = owner_email
        repo_dict[
            "exposure_type"] = constants.EntityExposureType.DOMAIN.value if repo[
                "private"] else constants.EntityExposureType.PUBLIC.value
        repo_dict["resource_type"] = "repository"
        repo_list.append(repo_dict)
        processed_repo_count = processed_repo_count + 1

    try:
        if len(repo_list) > 0:
            db_session.bulk_insert_mappings(Resource, repo_list)
            db_connection().commit()

            for repo in repo_list:
                #Starting a scanner for each repository
                scanner = DatasourceScanners()
                scanner.datasource_id = datasource_id
                scanner.scanner_type = github_constants.ScannerTypes.REP_COLLABORATORS.value
                scanner.channel_id = str(uuid.uuid4())
                scanner.user_email = repo["resource_owner_id"]
                scanner.started_at = datetime.datetime.utcnow()
                scanner.in_progress = 1
                db_session.add(scanner)
                db_connection().commit()
                query_params = {
                    "dataSourceId": datasource_id,
                    "domainId": domain_id,
                    "repo_name": repo["resource_name"],
                    "scannerId": scanner.id,
                    "repo_id": repo["resource_id"]
                }
                messaging.trigger_get_event(urls.GITHUB_SCAN_ENTITIES,
                                            auth_token, query_params, "github")
    except Exception as ex:
        Logger().exception(
            "Exception occurred while processing repositories with exception - {}"
            .format(ex))
        db_session.rollback()
    return processed_repo_count
Exemplo n.º 17
0
def process(db_session, auth_token, query_params, scanner_data):
    datasource_id = query_params["dataSourceId"]
    domain_id = query_params["domainId"]
    all_orgs = scanner_data["entities"]
    new_orgs_list = []
    processed_org_count = 0
    for org in all_orgs:
        org_info = {}
        org_info["datasource_id"] = datasource_id
        org_info["full_name"] = org['name']
        name_split = org_info["full_name"].split(" ")
        if len(name_split) > 1:
            org_info["first_name"] = name_split[0]
            org_info["last_name"] = name_split[1]
        else:
            org_info["first_name"] = name_split[0]
            org_info["last_name"] = ''
        org_info["email"] = org["email"] if org[
            "email"] else github_utils.get_default_github_email(
                org["id"], org["login"])
        org_info["description"] = org["description"]
        org_info["type"] = constants.DirectoryEntityType.ORGANIZATION.value
        org_info["creation_time"] = datetime.datetime.strptime(
            org["created_at"], "%Y-%m-%dT%H:%M:%SZ")
        org_info["last_updated"] = datetime.datetime.strptime(
            org["updated_at"], "%Y-%m-%dT%H:%M:%SZ")
        org_info["user_id"] = org["id"]
        org_info["member_type"] = constants.EntityExposureType.INTERNAL.value

        new_orgs_list.append(org_info)
        processed_org_count = processed_org_count + 1

    try:
        if len(all_orgs) > 0:
            db_session.bulk_insert_mappings(DomainUser, new_orgs_list)
            db_connection().commit()

            for org in all_orgs:
                #Starting a scanner for each organization
                scanner = DatasourceScanners()
                scanner.datasource_id = datasource_id
                scanner.scanner_type = github_constants.ScannerTypes.ORG_MEMBERS.value
                scanner.channel_id = str(uuid.uuid4())
                scanner.user_email = org_info["email"]
                scanner.started_at = datetime.datetime.utcnow()
                scanner.in_progress = 1
                db_session.add(scanner)
                db_connection().commit()
                query_params = {
                    "dataSourceId": datasource_id,
                    "domainId": domain_id,
                    "org_name": org["login"],
                    "scannerId": scanner.id
                }
                messaging.trigger_get_event(urls.GITHUB_SCAN_ENTITIES,
                                            auth_token, query_params, "github")

    except Exception as ex:
        Logger().exception(
            "Exception occurred while processing orgs with exception - {}".
            format(ex))
        db_session.rollback()

    return processed_org_count