예제 #1
0
파일: steps.py 프로젝트: kovariktomas/sner4
def discover_ipv6_enum(_, interval, queue):
    """enqueues ranged derived from storage registered ipv6 addresses"""

    if not should_run('discover_ipv6_enum', timeparse(interval)):
        return

    queue = Queue.query.filter(Queue.name == queue).one()
    targets = set()
    query = Host.query.filter(func.family(Host.address) == 6).order_by(Host.address)
    for host in query.all():
        exploded = IPv6Address(host.address).exploded
        # do not enumerate EUI-64 hosts/nets
        if exploded[27:32] == 'ff:fe':
            continue

        exploded = exploded.split(':')
        exploded[-1] = '0-ffff'
        target = ':'.join(exploded)

        targets.add(target)

    targets = filter_already_queued(queue, targets)
    queue_enqueue(queue, targets)

    if targets:
        current_app.logger.info(f'discover_ipv6_enum, queued {len(targets)} items')
    update_lastrun('discover_ipv6_enum')
예제 #2
0
파일: steps.py 프로젝트: kovariktomas/sner4
def rescan_services(_, interval, queue):
    """rescan services from storage; update known services info"""

    qref = Queue.query.filter(Queue.name == queue).one()

    now = datetime.utcnow()
    rescan_horizont = now - timedelta(seconds=timeparse(interval))
    query = Service.query.filter(or_(Service.rescan_time < rescan_horizont, Service.rescan_time == None))  # noqa: E501,E711  pylint: disable=singleton-comparison

    rescan, ids = [], []
    for service in windowed_query(query, Service.id):
        item = f'{service.proto}://{format_host_address(service.host.address)}:{service.port}'
        rescan.append(item)
        ids.append(service.id)
    # orm is bypassed for performance reasons in case of large rescans
    update_statement = Service.__table__.update().where(Service.id.in_(ids)).values(rescan_time=now)
    db.session.execute(update_statement)
    db.session.commit()
    db.session.expire_all()

    rescan = filter_already_queued(qref, rescan)
    queue_enqueue(qref, rescan)

    if rescan:
        current_app.logger.info(f'rescan_services, rescan {len(rescan)} items')
예제 #3
0
파일: steps.py 프로젝트: kovariktomas/sner4
def discover_ipv6_dns(_, interval, netranges, queue):
    """enqueues all netranges into dns discovery queue"""

    if not should_run('discover_ipv6_dns', timeparse(interval)):
        return

    queue = Queue.query.filter(Queue.name == queue).one()
    count = 0
    for netrange in netranges:
        targets = filter_already_queued(queue, enumerate_network(netrange))
        count += len(targets)
        queue_enqueue(queue, targets)

    if count:
        current_app.logger.info(f'discover_ipv6_dns, queued {count} items')
    update_lastrun('discover_ipv6_dns')
예제 #4
0
def parse_config():
    """Parse the environ and the config file to set options in globals"""
    config_file = yaml_load(open("config.yml", "r"))
    config_cast = {
        "LOG_LEVEL": lambda value: getattr(logging, value),
        "WEBAPI_PORT": int,
        "JWT_EXPIRATION_TIME": lambda value: timedelta(seconds=timeparse(value)),
        "TOKEN_EXPIRATION_TIME": timeparse,
        "USE_SSL": parse_bool,
        "REDIS_PORT": int,
        "REDIS_DB": int,
        "REDIS_PASSWORD": lambda value: None if value is None else value,
        "POSTGRES_PORT": int,
        "POSTGRES_PASSWORD": lambda value: None if value is None else value
    }

    for key, value in config_file.items():
        globals()[key] = config_cast.get(key, str)(environ.get(key, value))
예제 #5
0
파일: core.py 프로젝트: bodik/sner4
def run_interval_pipeline(config):
    """run interval pipeline"""

    name = config['name']
    interval = config['interval']
    lastrun_path = Path(current_app.config['SNER_VAR']) / f'lastrun.{name}'

    if lastrun_path.exists():
        lastrun = datetime.fromisoformat(lastrun_path.read_text())
        if (datetime.utcnow().timestamp() -
                lastrun.timestamp()) < timeparse(interval):
            return

    try:
        run_steps(config['steps'])
    except StopPipeline:
        # stop_pipeline is emited during tests to check backoff interval
        pass

    lastrun_path.write_text(datetime.utcnow().isoformat())
예제 #6
0
def rescan_hosts(ctx, interval):
    """rescan hosts from storage; discovers new services on hosts"""

    now = datetime.utcnow()
    rescan_horizont = now - timedelta(seconds=timeparse(interval))
    query = Host.query.filter(or_(Host.rescan_time < rescan_horizont, Host.rescan_time == None))  # noqa: E711  pylint: disable=singleton-comparison

    rescan, ids = [], []
    for host in windowed_query(query, Host.id):
        rescan.append(host.address)
        ids.append(host.id)
    # orm is bypassed for performance reasons in case of large rescans
    update_statement = Host.__table__.update().where(Host.id.in_(ids)).values(rescan_time=now)
    db.session.execute(update_statement)
    db.session.commit()
    db.session.expire_all()

    ctx.data = rescan
    if rescan:
        current_app.logger.info(f'rescan_hosts, rescan {len(rescan)} items')

    return ctx
def calc_time_from_issue_notes(proj_id, issueIID, proj_name):
    """
    Given a single ticket, read the ticket and all comments on it
    Get all the time info from an issue

    :param str projID : the project ID
    :param str issueIID : the issue ID
    :return: the JSON response as list
    :rtype: list
    """

    # build the URL endpoint and extract notes in the issue
    built_endpoint = proj_url + '/' + proj_id + '/issues/' + \
        issueIID + '/notes' + '?sort=asc&order_by=updated_at'

    notes_json_res = pull_api_response(built_endpoint)

    concat_notes = ""

    final_ouput_dict = {}
    # has unique dates as key and list of dicts as value

    # Time info holders
    pos_time = 0
    neg_time = 0

    # loop through each note object, extract Author and Comment Body
    for each_note in notes_json_res:
        note_body = each_note["body"]
        time_substr = 'of time spent'
        # regex patterns
        time_info_pattern = r'(^added|subtracted).*of\stime\sspent\sat\s\d+-\d+-\d+$'
        time_removed_pattern = r'(^removed\stime\sspent$)'

        # check if a time spent information is in the note body else skip
        if re.match(time_info_pattern, note_body):
            # extract username and date
            note_author = each_note["author"]["name"]
            date_time_created = each_note['created_at']

            # concatenate the notebody with username
            concat_notes = (note_body) + ' ' + (note_author)

            # preprocess the date
            dt = dateutil.parser.parse(date_time_created)
            date_time_logged = '%4d-%02d-%02d' % (dt.year, dt.month, dt.day)

            # extract time phrase
            split_at_indx = concat_notes.find(time_substr)
            time_phrase = concat_notes[:split_at_indx - 1]

            # parse time phrase using the timeparse module and return seconds
            total_seconds = pytimeparse.timeparse(time_phrase)

            if (total_seconds >= 0):
                pos_time = total_seconds
            else:
                neg_time = -(total_seconds)

            final_ouput_dict.setdefault(note_author, []).append({
                'date':
                date_time_logged,
                "positivetime":
                pos_time,
                "negativetime":
                neg_time,
                "proj_name":
                proj_name,
                "proj_id":
                proj_id
            })

        # check if a time removed information is in the note body
        if re.match(time_removed_pattern, note_body):
            # this case has been decided that it won't affect the time record
            pass

        else:
            pass

        pos_time, neg_time = 0, 0  # Reset counters

    return final_ouput_dict
예제 #8
0
from sanic import Blueprint
from sanic.exceptions import Forbidden, NotFound
from sanic.response import json, text
import scrypt
import jwt as jwtlib
from .. import config
from ..middlewares import authenticate, require_fields
from ..server import RDB, KVS, HTTP
from ..storage.models import ClientType, MsgQueueType
from ..exceptions import NotFoundError
from ..tools import generate_token

bp = Blueprint("auth")
logger = getLogger(__name__)
JWT_EXPIRATION_TIME = timedelta(
    seconds=timeparse(config.webapi.JWT_EXPIRATION_TIME))


@bp.route("/register", methods=["POST"])
@require_fields({"username", "email", "password"})
async def register(_req, username, email, password):
    """Create a new account"""
    userid = uuid4()
    hashed_password = scrypt.encrypt(token_bytes(64), password, maxtime=0.1)
    await RDB.create_user(userid, username, email, hashed_password)
    logger.info("Account created: %s", userid)
    return json({"userid": str(userid)})


@bp.route("/", methods=["POST"])
@require_fields({"login", "password"})
예제 #9
0
 def _parse_ignore_newer_than(self):
     if not self.args.ignore_newer_than:
         return
     self._ignore_newer_than = timedelta(
         seconds=timeparse(self.args.ignore_newer_than))