コード例 #1
0
ファイル: tasks.py プロジェクト: luissiqueira/SearchName
def log_delete_entity(entity):
    """
    :param entity: Uma instância de core.models.Entity
    :type entity: core.models.Entity
    """
    log = Log(entity_key=entity.number, kind=Log.DELETE)
    log.description = 'Name: %s' % entity.name
    log.save()
コード例 #2
0
 def process_response(self, request, response):
     if not '/admin/' in request.get_full_path():
         self.execution_time = time.time() - self.start_time
         log = Log(path=request.path,
                   method=request.method,
                   start_time=self.start_time,
                   time=self.execution_time)
         log.save()
     return response
コード例 #3
0
ファイル: tasks.py プロジェクト: luissiqueira/SearchName
def log_update_entity(entity, old_values):
    """
    :type entity: core.models.Entity
    :type old_values: dict
    """
    log = Log(entity_key=entity.number, kind=Log.UPDATE)
    old_name = old_values['name'] if 'name' in old_values else ''
    log.description = 'Old name: %s\nNew name: %s' % (old_name, entity.name)
    log.save()
コード例 #4
0
ファイル: main.py プロジェクト: iecruz/qa-bank
def logout():
    if 'user' in session:
        user = session['user']
        session.pop('user')
    elif 'admin' in session:
        user = session['admin']
        session.pop('admin')

    Log.insert(user_id=user['id'], action='LOGOUT').execute()
    return redirect(url_for('main.login'))
コード例 #5
0
ファイル: views.py プロジェクト: kdelinx/landing
def landing_delete(request, id):
    landing = get_object_or_404(Landing, id=id)
    user = get_object_or_404(User, id=request.user.id)
    del_log = Log(user=user, log='Delete landing with domen - %s' % landing.domen)
    del_log.save()
    landing.delete()
    messages.error(request, 'Record was deleted!')
    return HttpResponseRedirect(
        reverse('landing:landing')
    )
コード例 #6
0
ファイル: main.py プロジェクト: iecruz/bank-admin
def login():
    form = LoginForm(request.form)
    if form.validate_on_submit():
        user = User.get_or_none(User.email_address == form.email_address.data)
        if user and check_password_hash(user.password, form.password.data):
            Log.insert(user_id=user.id, action='LOGIN').execute()
            session['user'] = model_to_dict(user)

            flash("Welcome back, {}!".format(user.first_name))
            return redirect(request.args.get('next', url_for('main.index')))
    return render_template('main/login.html', form=form)
コード例 #7
0
ファイル: views.py プロジェクト: kdelinx/landing
def landing_create(request):
    user = get_object_or_404(User, id=request.user.id)
    name_landing = request.POST.get('domen')
    form = CreateLanding(request.POST or None)
    if form.is_valid():
        form.save()
        new_log = Log(user=user, log='Added new landing with domain - %s' % name_landing)
        new_log.save()
        messages.success(request, 'New landing was added successful!')
        return HttpResponseRedirect(
            reverse('landing:landing')
        )
    else:
        print form.errors
    return render(request, 'core/add.html', {'form': form})
コード例 #8
0
    def emit(self, record):

        with db.atomic() as nested_txn:

            msg = self.format(record)
            level = self.level
            log_record = Log(message=msg, level=level)
            log_record.save()

            if date_diff > 0:

                delete_date = datetime.datetime.utcnow() - datetime.timedelta(
                    days=date_diff)
                delete_older_logs = Log.delete().where(Log.date < delete_date)
                delete_older_logs.execute()
コード例 #9
0
ファイル: tasks.py プロジェクト: eol-uchile/edx-stats
 def make_row(row):
     user_id = 0 if math.isnan(
         row["context.user_id"]) else row["context.user_id"]
     try:
         log = Log(username=row["username"],
                   event_source=row["event_source"],
                   name=row["name"],
                   accept_language=row["accept_language"],
                   ip=row["ip"],
                   agent=row["agent"],
                   page=row["page"],
                   host=row["host"],
                   session=row["session"],
                   referer=row["referer"],
                   time=row["time"],
                   event=row["event"],
                   event_type=row["event_type"],
                   course_id=row["context.course_id"],
                   org_id=row["context.org_id"],
                   user_id=user_id,
                   path=row["context.path"])
         return log
     except Exception:
         logger.warn(str(Exception))
         return None
コード例 #10
0
ファイル: log.py プロジェクト: syegulalp/mercury
    def emit(self, record):

        with db.atomic() as nested_txn:

            msg = self.format(record)
            level = self.level
            log_record = Log(
                message=msg,
                level=level)
            log_record.save()

            if date_diff > 0:

                delete_date = datetime.datetime.utcnow() - datetime.timedelta(days=date_diff)
                delete_older_logs = Log.delete().where(
                    Log.date < delete_date)
                delete_older_logs.execute()
コード例 #11
0
ファイル: system.py プロジェクト: syegulalp/mercury
def system_log():
    user = auth.is_logged_in(request)
    permission = auth.is_sys_admin(user)
    log = Log.select().order_by(Log.date.desc(), Log.id.desc())

    return listing(request, None, log,
               'system_log', 'system_log',
               user=user)
コード例 #12
0
ファイル: views.py プロジェクト: MashroomsP/anny_blog
def log_write(request):
    user_agent = request.META.get('HTTP_USER_AGENT')
    if user_agent is not None and 'YandexMetrika' in user_agent:
        return None
    log_row = Log(
        ip = request.META.get('REMOTE_ADDR', '127.0.0.1'),
        port = int(request.META.get('REMOTE_PORT', '0')),
        method = request.META.get('REQUEST_METHOD', 'GET'),
        path = request.path,
        query_get = request.GET.__str__(),
        query_post = request.POST.__str__(),
        sessionid = request.COOKIES.get('sessionid', ''),
        http_referer = request.META.get('HTTP_REFERER', ''),
        http_user_agent = user_agent,
    )
    if request.user.is_authenticated():
        log_row.user = request.user

    log_row.save()
コード例 #13
0
ファイル: main.py プロジェクト: iecruz/bank-admin
def index():
    time_deposits = TimeDeposit.select().where(
        (TimeDeposit.terminal_date <= datetime.now())
        & (TimeDeposit.deleted == False)).execute()

    for time_deposit in time_deposits:
        Account.update(account=Account.balance +
                       (time_deposit.amount * time_deposit.interest) +
                       time_deposit.amount).where(
                           Account.account_number ==
                           time_deposit.account_number).execute()

        TimeDeposit.update(deleted=True).where(
            TimeDeposit.id == time_deposit.id).execute()

    try:
        log = (Log.select().where((Log.action == 'LOGIN') & (
            Log.user_id == session['user']['id'])).order_by(
                Log.created_at.desc()).get()
               ).created_at.strftime('%d %B %Y %I:%M %p')
    except DoesNotExist:
        log = None
    return render_template('main/index.html', log=log)
コード例 #14
0
ファイル: views.py プロジェクト: martinsv/velo.lv
def mailgun_webhook(request):
    if not mailgun_verify(settings.MAILGUN_ACCESS_KEY, request.POST.get('token'), request.POST.get('timestamp'), request.POST.get('signature')):
        return HttpResponse()
    event = Log.from_mailgun_request(request)
    return HttpResponse()
コード例 #15
0
ファイル: system.py プロジェクト: ra2003/mercury
def system_log():
    user = auth.is_logged_in(request)
    permission = auth.is_sys_admin(user)
    log = Log.select().order_by(Log.date.desc(), Log.id.desc())

    return listing(request, None, log, 'system_log', 'system_log', user=user)
    def upload_log_zip(self, request):
        base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
        zip_root = ''
        try:
            # Access S3 bucket via the boto3 library. Credentials stored in the env file
            s3 = boto3.resource(
                's3',
                aws_access_key_id=config('AWS_ACCESS_KEY'),
                aws_secret_access_key=config('AWS_SECRET_ACCESS_KEY'))

            # Write the request bytes to destination of 'upload.zip'
            with open('upload.zip', 'wb+') as destination:
                for chunk in request.FILES['file'].chunks():
                    destination.write(chunk)

            # Open and begin processing the uploaded files
            with ZipFile('upload.zip', 'r') as upload:

                # Extract the zip file to access the files
                upload.extractall()

                # The log files will be under a common 'root' directory
                zip_root = upload.namelist()[0]

                # Walk through the upper most directory
                for root, directories, files in os.walk(
                        os.path.join(base_dir, '../' + zip_root)):
                    for directory in directories:
                        # At this point, dir_root contains the path of zip root and directory
                        for dir_root, dirs, dir_files in os.walk(
                                os.path.join(base_dir,
                                             '../' + zip_root + directory)):
                            # Iterate through each file in the zip files
                            for dir_file in dir_files:
                                # We are only interested in processing and storing the moos, alog, and script files
                                # We want to store raw versions of these types of files in the S3 bucket

                                if '._moos' in dir_file:
                                    # Store raw file in S3
                                    # Open the file as binary data
                                    with open(
                                            os.path.join(
                                                base_dir,
                                                dir_root + '/' + dir_file),
                                            'rb') as file_data:
                                        # Place the file in the bucket
                                        s3.Bucket(
                                            'swarm-logs-bucket').put_object(
                                                Key='{}{}{}'.format(
                                                    zip_root, directory + '/',
                                                    dir_file),
                                                Body=file_data)

                                # If the file is .alog it needs to be parsed into json and stored in the db
                                if '.alog' in dir_file:

                                    # Store in S3 bucket
                                    with open(
                                            os.path.join(
                                                base_dir,
                                                dir_root + '/' + dir_file),
                                            'rb') as file_data:

                                        # Place the un-parsed file in the bucket
                                        s3.Bucket(
                                            'swarm-logs-bucket').put_object(
                                                Key='{}{}{}'.format(
                                                    zip_root, directory + '/',
                                                    dir_file),
                                                Body=file_data)

                                        # Parse into json
                                        # Web parser return json objects that contain metadata for the log and run objects
                                        # Basically only what you need to put in the database, and enough to get the files on the S3
                                        json_obj, runs_obj = parsers.web_parser(
                                            os.path.join(
                                                base_dir,
                                                dir_root + '/' + dir_file))
                                        index_json_obj = json.loads(json_obj)
                                        index_runs = json.loads(runs_obj)

                                        # Create pieces of objects to store them in the DB
                                        device_id = index_json_obj['device_id']
                                        file_path = zip_root + directory + '/' + dir_file + '.json'
                                        # print(file_path)
                                        date = index_json_obj['date']
                                        time = index_json_obj['time']

                                        # TODO specify timezone
                                        date_time = datetime.strptime(
                                            date + ' ' + time,
                                            '%d-%m-%Y %H:%M:%S')

                                        # Create the log object first, so it can be used in the run objects
                                        log_obj = Log(dateTime=date_time,
                                                      deviceID=device_id,
                                                      filePath=file_path)
                                        log_obj.save()

                                        # Iterate through the returned runs and store each in the DB
                                        for i in index_runs:
                                            run_id = i['run_id']

                                            # This is the filepath the will be on the bucket
                                            run_fp = zip_root + directory + '/' + dir_file + f'-run{run_id}.json'

                                            # Save the run data to db
                                            run_obj = Run(dateTime=date_time,
                                                          deviceID=device_id,
                                                          runID=run_id,
                                                          logID=log_obj,
                                                          filePath=run_fp)
                                            run_obj.save()

                                            run_file_path = os.path.join(
                                                base_dir,
                                                dir_root + '/' + dir_file +
                                                f'-run{run_id}.json')

                                            # Upload run json to bucket
                                            with open(run_file_path,
                                                      'rb') as run_file:
                                                s3.Bucket(
                                                    'swarm-logs-bucket'
                                                ).put_object(
                                                    Key='{}{}{}'.format(
                                                        zip_root,
                                                        directory + '/',
                                                        run_file.name.split(
                                                            '/')[-1]),
                                                    Body=run_file)

                                            # Upload the script files to the bucket
                                            if 'Narwhal' in run_file_path:
                                                run_script_path = run_file_path.replace(
                                                    '.json', '') + '.script'
                                                with open(
                                                        run_script_path,
                                                        'rb') as script_file:
                                                    s3.Bucket(
                                                        'swarm-logs-bucket'
                                                    ).put_object(
                                                        Key='{}{}{}'.format(
                                                            zip_root,
                                                            directory + '/',
                                                            script_file.name.
                                                            split('/')[-1]),
                                                        Body=script_file)
                                                    script_file.seek(0)
                                                    s3.Bucket(
                                                        'swarm-robotics-visualization'
                                                    ).put_object(
                                                        Key='scripts/{}{}{}'.
                                                        format(
                                                            zip_root,
                                                            directory + '/',
                                                            script_file.name.
                                                            split('/')[-1]),
                                                        Body=script_file)
                                    # Open and place the parsed json file in the bucket
                                    with open(
                                            os.path.join(
                                                base_dir, dir_root + '/' +
                                                dir_file + '.json'),
                                            'rb') as json_file:
                                        s3.Bucket(
                                            'swarm-logs-bucket').put_object(
                                                Key='{}{}{}'.format(
                                                    zip_root, directory + '/',
                                                    json_file.name.split('/')
                                                    [-1]),
                                                Body=json_file)
        except Exception as e:
            return Response({"Status": "Upload Failed. {}".format(e)},
                            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
        else:
            # Return the 200 response
            return Response({"Status": "Uploaded Successfully."},
                            status=status.HTTP_200_OK)
        finally:
            # Clean up the files and directories that get created
            try:
                os.remove(os.path.join(base_dir, '../upload.zip'))
            except OSError as error:
                print('Error removing upload.zip \n' + error)
            if zip_root != '':
                shutil.rmtree(os.path.join(base_dir, '../' + zip_root))

            # Walk the directory above to make sure the __MACOSX directory gets deleted if it is created
            for root, directories, files in os.walk(
                    os.path.join(base_dir, '../')):
                if '__MACOSX' in directories:
                    shutil.rmtree(os.path.join(base_dir, '../__MACOSX'))
                    break
コード例 #17
0
import logging

from settings import PRODUCT_NAME, DAYS_TO_KEEP_LOGS
import datetime

logger = logging.getLogger(PRODUCT_NAME)
logger.setLevel(logging.DEBUG)

from core.models import Log, db

log_record = Log()

try:
    date_diff = int(DAYS_TO_KEEP_LOGS)
except BaseException:
    date_diff = 0
    
class DBLogHandler(logging.Handler):
    
    def emit(self, record):
        
        with db.atomic() as nested_txn:
        
            msg = self.format(record)
            level = self.level
            log_record = Log(
                message=msg,
                level=level)
            log_record.save()            
            
            if date_diff > 0:
コード例 #18
0
ファイル: main.py プロジェクト: iecruz/bank-admin
def logout():
    Log.insert(user_id=session['user']['id'], action='LOGOUT').execute()
    session.pop('user')
    return redirect(url_for('main.login'))