Example #1
0
async def test_run_task_timeout(context):
    """`run_task` raises `ScriptWorkerTaskException` and kills the process
    after exceeding `task_max_timeout`.
    """
    temp_dir = os.path.join(context.config['work_dir'], "timeout")
    context.config['task_script'] = (
        sys.executable, TIMEOUT_SCRIPT, temp_dir
    )
    # With shorter timeouts we hit issues with the script not managing to
    # create all 6 files
    context.config['task_max_timeout'] = 5

    pre = arrow.utcnow().timestamp
    with pytest.raises(ScriptWorkerTaskException):
        await swtask.run_task(context, noop_to_cancellable_process)
    post = arrow.utcnow().timestamp
    # I don't love these checks, because timing issues may cause this test
    # to be flaky. However, I don't want a non- or long- running test to pass.
    # Did this run at all?
    assert post - pre >= 5
    # Did this run too long? e.g. did it exit on its own rather than killed
    # If this is set too low (too close to the timeout), it may not be enough
    # time for kill_proc, kill_pid, and the `finally` block to run
    assert post - pre < 10
    # Did the script generate the expected output?
    files = {}
    for path in glob.glob(os.path.join(temp_dir, '*')):
        files[path] = (time.ctime(os.path.getmtime(path)), os.stat(path).st_size)
        print("{} {}".format(path, files[path]))
    for path in glob.glob(os.path.join(temp_dir, '*')):
        print("Checking {}...".format(path))
        assert files[path] == (time.ctime(os.path.getmtime(path)), os.stat(path).st_size)
    assert len(list(files.keys())) == 6
    # Did we clean up?
    assert context.proc is None
Example #2
0
    def post(self):
        '''make a new paste'''
        if not 'paste' in request.form:
            return json.dumps({'error':'paste required'})
        if 'api_key' in request.form:
            user = database.ApiKey.objects(key=request.form['api_key']).first()
            if user:
                user = user.user
            else:
                return json.dumps({'error': 'invalid api_key'})
        paste = database.Paste()
        paste.name = random_string()
        #deduplicate paste name
        while database.Paste.objects(name=paste.name).first():
            paste.name = random_string()
        paste.paste = request.form['paste']
        paste.language = request.form['language'] if 'language' in request.form else None #TODO: autodetect language here
        paste.user = user
        paste.digest = sha1(paste.paste.encode('utf-8')).hexdigest()
        paste.time = arrow.utcnow().datetime
        if 'expiration' in request.form:
            #expiration needs to be a time in seconds, >0
            try:
                seconds = int(request.form['expiration'])
                if seconds < 0:
                    return json.dumps({'error': 'cannot expire in the past'})
                if seconds > 0:
                    paste.expire = arrow.utcnow().replace(seconds=+seconds)
            except ValueError:
                return json.dumps({'error': 'invalid expiration format, should be number of seconds'})

        paste.save()
        #domain is optional, no validation done. if you feel like using one of the alternatives (vomitb.in, not-pasteb.in), set the domain before sending the paste
        return json.dumps({'success': 1, 'url': 'https://{domain}/{name}'.format(domain=request.form['domain'] if 'domain' in request.form else 'zifb.in', name=paste.name)})
def upgrade():
    """ Add next_check and last_check columns to the projects table. """
    op.add_column(
        "projects",
        sa.Column(
            "last_check",
            sa.TIMESTAMP(timezone=True),
            default=arrow.utcnow().datetime,
            server_default=sa.func.current_timestamp(),
        ),
    )

    op.add_column(
        "projects",
        sa.Column(
            "next_check",
            sa.TIMESTAMP(timezone=True),
            default=arrow.utcnow().datetime,
            server_default=sa.func.current_timestamp(),
        ),
    )
    op.create_index(
        op.f("ix_projects_last_check"), "projects", ["last_check"], unique=False
    )
    op.create_index(
        op.f("ix_projects_next_check"), "projects", ["next_check"], unique=False
    )
Example #4
0
def format_between(tp1, tp2):
    t1 = arrow.get(tp1)
    t2 = arrow.get(tp2)
    now = arrow.utcnow()
    today = arrow.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)

    if t2 < now:
        return "Päättynyt"
    elif t1 < now < t2:
        left = t2 - now
        l_hours = int(left.total_seconds() / timedelta(hours=1).total_seconds())
        l_minutes = int((left.total_seconds() - timedelta(hours=l_hours).total_seconds()) / 60)
        if l_hours == 0:
            return "Menossa, aikaa jäljellä {} minuuttia".format(l_minutes)
        else:
            return "Menossa, aikaa jäljellä {} tuntia ja {} minuuttia".format(l_hours, l_minutes)

    elif now < t1 < (today + timedelta(days=3)):
        return "Alkaa {} ja päättyy {}.".format(
            format_single_helper(t1),
            format_single_helper(t2))
    else:
        return "Alkaa {} ja päättyy {}.".format(
            t1.to(settings.TIME_ZONE).format("DD.MM.YYYY klo. HH:mm", locale='fi_FI'),
            t2.to(settings.TIME_ZONE).format("DD.MM.YYYY klo. HH:mm", locale='fi_FI'))
Example #5
0
 def get_url_with_papi_access(self, url, type, cookies=None, timeout=None, saveToDb=True):
     papiaccess = IndexerApiAccess(indexer=self.indexer, type=type, url=url, time=arrow.utcnow().datetime)
     try:
         papiaccess.username = request.authorization.username if request.authorization is not None else None
     except RuntimeError:
         #Is thrown when we're searching which is run in a thread. When downloading NFOs or whatever this will work
         pass
     indexerStatus = None
     try:
         time_before = arrow.utcnow()
         response = self.get(url, cookies=cookies, timeout=timeout)
         response.raise_for_status()
         time_after = arrow.utcnow()
         papiaccess.response_time = (time_after - time_before).seconds * 1000 + ((time_after - time_before).microseconds / 1000)
         papiaccess.response_successful = True
         indexerStatus = self.handle_indexer_success(saveIndexerStatus=saveToDb)
     except RequestException as e:
         self.error("Error while connecting to URL %s: %s" % (url, str(e)))
         papiaccess.error = "Connection failed: %s" % removeSensitiveData(str(e))
         response = None
         indexerStatus = self.handle_indexer_failure("Connection failed: %s" % removeSensitiveData(str(e)), saveIndexerStatus=saveToDb)
     finally:
         if saveToDb:
             papiaccess.save()
     return response, papiaccess, indexerStatus
Example #6
0
def test_should_get_builds_statuses(app, f_projects):
    p = f_projects[0]
    assert ProjectLogic.should_fetch_builds_statuses_for_project(get_fresh_p(p)) is False
    wp = get_fresh_p(p)
    wp.build_triggered_on = arrow.utcnow()
    app.db.session.add(wp)
    app.db.session.commit()
    assert ProjectLogic.should_fetch_builds_statuses_for_project(get_fresh_p(p)) is True

    bs = BuildStatus(
        repo_name="foobar",
        build_id="12345",
        href="http://example.com/afasdfgt",
        status="pending"
    )
    #
    ProjectLogic.get_or_create_build_info_from_bs(p, bs)
    app.db.session.commit()
    assert ProjectLogic.should_fetch_builds_statuses_for_project(get_fresh_p(p)) is False

    wp = get_fresh_p(p)
    wp.build_triggered_on = arrow.utcnow()
    app.db.session.add(wp)
    app.db.session.commit()

    assert ProjectLogic.should_fetch_builds_statuses_for_project(get_fresh_p(p)) is True
Example #7
0
def test_get_projects_to_fetch_builds_status(app, f_projects):
    p = f_projects[0]
    assert len(ProjectLogic.get_projects_to_fetch_builds_status()) == 0

    wp = get_fresh_p(p)
    wp.build_triggered_on = arrow.utcnow()
    app.db.session.add(wp)
    app.db.session.commit()
    p_list = ProjectLogic.get_projects_to_fetch_builds_status()
    assert len(p_list) == 1
    assert p_list[0].id == p.id

    bs = BuildStatus(
        repo_name="foobar",
        build_id="12345",
        href="http://example.com/afasdfgt",
        status="pending"
    )
    #
    ProjectLogic.get_or_create_build_info_from_bs(p, bs)
    app.db.session.commit()
    assert len(ProjectLogic.get_projects_to_fetch_builds_status()) == 0

    wp = get_fresh_p(p)
    wp.build_triggered_on = arrow.utcnow()
    app.db.session.add(wp)
    app.db.session.commit()
    p_list = ProjectLogic.get_projects_to_fetch_builds_status()
    assert len(p_list) == 1
    assert p_list[0].id == p.id
Example #8
0
def map_cis_fields(options, csr):
    """
    MAP issuer options to DigiCert CIS fields/options.

    :param options:
    :param csr:
    :return:
    """
    if not options.get('validity_years'):
        if not options.get('validity_end'):
            options['validity_end'] = arrow.utcnow().replace(years=current_app.config.get('DIGICERT_DEFAULT_VALIDITY', 1))
        options['validity_years'] = determine_validity_years(options['validity_end'])
    else:
        options['validity_end'] = arrow.utcnow().replace(years=options['validity_years'])

    data = {
        "profile_name": current_app.config.get('DIGICERT_CIS_PROFILE_NAME'),
        "common_name": options['common_name'],
        "additional_dns_names": get_additional_names(options),
        "csr": csr,
        "signature_hash": signature_hash(options.get('signing_algorithm')),
        "validity": {
            "valid_to": options['validity_end'].format('YYYY-MM-DD')
        },
        "organization": {
            "name": options['organization'],
            "units": [options['organizational_unit']]
        }
    }

    return data
Example #9
0
    def _test_purge_expire(self, models):

        policy = self._get_policy(purge_interval=0.1)
        
        back_datetime = arrow.utcnow().replace(hours=-1).datetime
        models.GreylistEntry.create_entry(key='1.1.1.1', expire_time=back_datetime)
        
        green = gevent.spawn(policy.task_purge_expire, run_once=True)
        gevent.joinall([green], timeout=1.1)
        #gevent.kill(green)
        self.assertEquals(self._model_count(models.GreylistEntry), 0)
        
        back_datetime = arrow.utcnow().replace(hours=-25).datetime
        models.GreylistEntry.create_entry(key='1.1.1.1', timestamp=back_datetime)
        green = gevent.spawn(policy.task_purge_expire, run_once=True)
        gevent.joinall([green], timeout=1.1)
        #gevent.kill(green)
        self.assertEquals(self._model_count(models.GreylistEntry), 0)
        
        back_datetime = arrow.utcnow().replace(hours=-10).datetime
        #models.GreylistEntry(key='1.1.1.1', timestamp=back_datetime, protocol="").save()
        models.GreylistEntry.create_entry(key='1.1.1.1', timestamp=back_datetime)
        green = gevent.spawn(policy.task_purge_expire, run_once=True)
        gevent.joinall([green], timeout=1.1)
        #gevent.kill(green)
        self.assertEquals(self._model_count(models.GreylistEntry), 1)
 def test_all_filters(self):
     meta = json.loads(INPUT1)
     # true case
     meta['SentTimestamp'] = str(arrow.utcnow().timestamp)
     self.assertTrue(self.filters.checkAllFilters(meta))
     # fail sending site
     meta['SendingSite'] = 'TEST3'
     self.assertFalse(self.filters.checkAllFilters(meta))
     # fail payload type
     meta = json.loads(INPUT1)
     meta['PayloadType'] = 'Report'
     self.assertFalse(self.filters.checkAllFilters(meta))
     # fail payload format
     meta = json.loads(INPUT1)
     meta['PayloadFormat'] = 'OtherPDF'
     self.assertFalse(self.filters.checkAllFilters(meta))
     # fail sensitivity
     meta = json.loads(INPUT1)
     meta['DataSensitivity'] = 'OUO'
     self.assertFalse(self.filters.checkAllFilters(meta))
     # fail restrictions
     meta = json.loads(INPUT1)
     meta['SharingRestrictions'] = 'AMBER'
     self.assertFalse(self.filters.checkAllFilters(meta))
     # fail recon policy
     meta = json.loads(INPUT1)
     meta['ReconPolicy'] = 'Touch'
     self.assertFalse(self.filters.checkAllFilters(meta))
     # fail file age
     meta = json.loads(INPUT1)
     meta['SentTimestamp'] = str(arrow.utcnow().shift(months=-2, seconds=-10).timestamp)
     self.assertFalse(self.filters.checkAllFilters(meta))
Example #11
0
    def process_tuple(self, tup):
        _user_id, _feature_id, _feature = tup.values

        # Output log to file.
        # log_bolt_feature = "\n[%s] Features %s for user %s: \n" % (arrow.utcnow(), _feature_id, _user_id) + \
        #                    "- Total duration:\t%s\n" \
        #                    "- Start time:\t%s\n" \
        #                    "- End time:\t%s\n" \
        #                    "- Most motion:\t%s\n" \
        #                    "- motion prob:\t%s\n" \
        #                    "- Most location lv1:\t%s\n" \
        #                    "- location lv1 prob:\t%s\n" \
        #                    "- Most location lv2:\t%s\n" \
        #                    "- location lv2 prob:\t%s\n" \
        #                    "- Max speed:\t%s\n" \
        #                    "- Min speed:\t%s\n" \
        #                    "- Average speed:\t%s\n" % _feature
        # log.debug(log_bolt_feature)

        temp = random.choice(self.event_list)
        temp["user"]["objectId"] = _user_id
        temp["timestamp"] = arrow.utcnow().timestamp
        temp["startTime"],temp["endTime"] = _feature[1], _feature[2]
        log.debug("hi")
        log.debug(str(arrow.utcnow()))
        log.debug(save_event(temp))
Example #12
0
def allSlots(): 
    
    if caching.get('all_slots', False):
        #we have a cached all_slots
        cached_time = caching.get('time_all_slots', arrow.utcnow().replace(hours=-1)) 
        if cached_time >  arrow.utcnow().replace(minutes=-5):
            #use cache!
            print("cached all_slots")
            return caching['all_slots']
        else:
            #timing is off!
            print('timing!')
    #else
    print('uncached!')
    
    try:
        master_sheet = gc.open_by_key(MASTER_KEY).sheet1
        all_slots = master_sheet.get_all_records()
        
        #all_slots = r.json()['result']
    except Exception as e:
        return {"Error":e}
    
    caching['all_slots'] = all_slots
    caching['time_all_slots'] = arrow.utcnow()
    return all_slots
Example #13
0
    def parse_date(self, agetd, entry):
        m = self.age_pattern.search(agetd.text)
        days = None
        hours = None
        if m:
            days = int(m.group("days1"))
            hours = int(m.group("days2")) * 2.4
        else:
            p = re.compile(r"(?P<hours>\d+) hours?")
            m = p.search(agetd.text)
            if m:
                days = 0
                hours = int(m.group("hours"))
        if hours is not None:
            pubdate = arrow.utcnow().replace(days=-days, hours=-1)  # hours because of timezone change below
            if hours > 0:
                pubdate = pubdate.replace(hours=-hours)
            pubdate = pubdate.to("+01:00")  # nzbindex server time, I guess?
            entry.epoch = pubdate.timestamp
            entry.pubdate_utc = str(pubdate)
            entry.age_days = (arrow.utcnow() - pubdate).days
            entry.age = str(entry.age_days) + "d"
            entry.age_precise = True  # Precise to 2.4 hours, should be enough for duplicate detection
            entry.pubDate = pubdate.format("ddd, DD MMM YYYY HH:mm:ss Z")

        else:
            self.error("Found no age info in %s" % str(agetd))
            raise IndexerResultParsingRowException("Unable to parse age")
def get_records():
    import sqlite3
    from_date_str = request.args.get('from', time.strftime(
        "%Y-%m-%d 00:00"))  # Get the from date value from the URL
    to_date_str = request.args.get('to', time.strftime(
        "%Y-%m-%d %H:%M"))  # Get the to date value from the URL
    timezone = request.args.get('timezone', 'Etc/UTC')
    # This will return a string, if field range_h exists in the request
    range_h_form = request.args.get('range_h', '')
    range_h_int = "nan"  # initialise this variable with not a number

    print "REQUEST:"
    print request.args

    try:
        range_h_int = int(range_h_form)
    except:
        print "range_h_form not a number"

    print "Received from browser: %s, %s, %s, %s" % (from_date_str, to_date_str, timezone, range_h_int)

    # Validate date before sending it to the DB
    if not validate_date(from_date_str):
        from_date_str = time.strftime("%Y-%m-%d 00:00")
    if not validate_date(to_date_str):
        # Validate date before sending it to the DB
        to_date_str = time.strftime("%Y-%m-%d %H:%M")
    print '2. From: %s, to: %s, timezone: %s' % (from_date_str, to_date_str, timezone)
    # Create datetime object so that we can convert to UTC from the browser's
    # local time
    from_date_obj = datetime.datetime.strptime(from_date_str, '%Y-%m-%d %H:%M')
    to_date_obj = datetime.datetime.strptime(to_date_str, '%Y-%m-%d %H:%M')

    # If range_h is defined, we don't need the from and to times
    if isinstance(range_h_int, int):
        arrow_time_from = arrow.utcnow().replace(hours=-range_h_int)
        arrow_time_to = arrow.utcnow()
        from_date_utc = arrow_time_from.strftime("%Y-%m-%d %H:%M")
        to_date_utc = arrow_time_to.strftime("%Y-%m-%d %H:%M")
        from_date_str = arrow_time_from.to(timezone).strftime("%Y-%m-%d %H:%M")
        to_date_str = arrow_time_to.to(timezone).strftime("%Y-%m-%d %H:%M")
    else:
        # Convert datetimes to UTC so we can retrieve the appropriate records
        # from the database
        from_date_utc = arrow.get(from_date_obj, timezone).to(
            'Etc/UTC').strftime("%Y-%m-%d %H:%M")
        to_date_utc = arrow.get(to_date_obj, timezone).to(
            'Etc/UTC').strftime("%Y-%m-%d %H:%M")

    conn = sqlite3.connect('/var/www/lab_app/lab_app.db')
    curs = conn.cursor()
    curs.execute("SELECT * FROM temperatures WHERE rDateTime BETWEEN ? AND ? AND sensorID = 'Ambient'",
                 (from_date_utc.format('YYYY-MM-DD HH:mm'), to_date_utc.format('YYYY-MM-DD HH:mm')))
    temperatures = curs.fetchall()
    curs.execute("SELECT * FROM humidities WHERE rDateTime BETWEEN ? AND ? AND sensorID = 'Ambient'",
                 (from_date_utc.format('YYYY-MM-DD HH:mm'), to_date_utc.format('YYYY-MM-DD HH:mm')))
    humidities = curs.fetchall()
    conn.close()

    return [temperatures, humidities, timezone, from_date_str, to_date_str]
Example #15
0
def sitemap():
    pages = []
    for p in Post.published():
        pages.append({'url': url_for('.post', slug=p.slug, _external=True),
                      'last_modified': p.utc_pub_date,
                      })
    # FIXME: Be lazy with the semistatic pages and categories for the moment
    awhile_ago = arrow.utcnow().replace(days=-10)
    for cat in Category.query.all():
        pages.append({'url': url_for('.category', slug=cat.title.lower(), _external=True),
                      'last_modified': awhile_ago,
                      })
    for page in ('about', 'projects'):
        pages.append({'url': url_for('.semistatic', page=page, _external=True),
                      'last_modified': awhile_ago,
                      })
    posts = (Post.query
             .filter(Post.utc_pub_date < arrow.utcnow().datetime)
             .filter_by(status=Post.PUBLISHED).count())
    last_pub_date = Post.published().order_by(Post.utc_pub_date.desc())[0].utc_pub_date
    for archive_page in xrange(posts / POSTS_PER_ARCHIVE_PAGE):
        pages.append({'url': url_for('.archive', page=archive_page + 1, _external=True),
                      'last_modified': last_pub_date,
                      })
    return render_template('sitemap.xml', pages=pages)
Example #16
0
    def handle_indexer_failure(self, reason=None, disable_permanently=False, saveIndexerStatus=True):
        # Escalate level by 1. Set disabled-time according to level so that with increased level the time is further in the future
        try:
            indexer_status = self.indexer.status.get()
        except IndexerStatus.DoesNotExist:
            indexer_status = IndexerStatus(indexer=self.indexer)

        if indexer_status.level == 0:
            indexer_status.first_failure = arrow.utcnow()

        indexer_status.latest_failure = arrow.utcnow()
        indexer_status.reason = reason  # Overwrite the last reason if one is set, should've been logged anyway
        if disable_permanently:
            indexer_status.disabled_permanently = True
            self.info("Disabling indexer permanently until reenabled by user because the authentication failed")
        else:
            indexer_status.level = min(len(self.disable_periods) - 1, indexer_status.level + 1)
            indexer_status.disabled_until = arrow.utcnow().replace(minutes=+self.disable_periods[indexer_status.level])
            self.info(
                "Disabling indexer temporarily due to access problems. Will be reenabled %s"
                % indexer_status.disabled_until.humanize()
            )

        if saveIndexerStatus:
            self.saveIndexerStatus(indexer_status)

        return indexer_status
def test_publication_date(minimal_record_model, depid_pid, legacyjson_v1):
    """Test publication date."""
    for k in ['publication_date', 'embargo_date']:
        minimal_record_model[k] = arrow.utcnow().date() - timedelta(days=1)
        obj = legacyjson_v1.transform_record(
            depid_pid, minimal_record_model)['metadata']
        assert arrow.get(obj[k]).date() <= arrow.utcnow().date()
Example #18
0
    def post(self, catalog, name, title, summary, tags, content):
        """
        创建文章

        $input:
            catalog?str: 目录
            title?str: 标题
            name?str&optional: 名称
            summary?str&optional: 摘要
            tags:
              - str
            content?str: 内容
        $output:
            id?str: ID
        """
        if not name:
            name = title[:32]
        if not summary:
            summary = content[:160]
        id = "/".join([g.user["id"], catalog, name])
        resp = db.run(r.table("article").insert({
            "id": id,
            "author": g.user["id"],
            "catalog": catalog,
            "name": name,
            "title": title,
            "summary": summary,
            "tags": tags,
            "content": content,
            "date_create": arrow.utcnow().datetime,
            "date_modify": arrow.utcnow().datetime
        }))
        if resp["errors"]:
            abort(400, "Conflict", "创建失败: %s" % resp["first_error"])
        return {"id": id}
 def initialize(self):
     default_vector = lambda: [
         # Length of duration
         arrow.utcnow().timestamp,
         # Start time of feature
         arrow.utcnow().timestamp,
         # End time of feature
         arrow.utcnow().timestamp,
         # Most possible motion type
         "",
         # Most possible motion prob
         -1.0,
         # Most possible location Lv1 type
         "",
         # Most possible location Lv1 prob
         -1.0,
         # Most possible location Lv2 type
         "",
         # Most possible location Lv2 prob
         -1.0,
         # Max speed
         float("inf"),
         # Min speed
         -1.0,
         # Average speed
         -1.0
     ]
     self.feature_vector = defaultdict(default_vector)
Example #20
0
 def refresh_user_timeline(self):
     if self._last_refreshed < arrow.utcnow().replace(seconds=-120):
         since_id = self._user_timeline[0].id if self._user_timeline else None
         statuses = self._api.user_timeline(self.id, since_id=since_id)
         statuses.extend(self._user_timeline)
         self._user_timeline = statuses
         self._last_refreshed = arrow.utcnow()
Example #21
0
def init_repo_data():
    import arrow
    import uuid
    session = DBSession()
    for i in range(1, 10):
        f = FileRouteModule()
        f.id = int(i)
        f.src_path = '/Users/Kevin/workspace/distributionfile/data'
        f.src_name = i
        f.src_extension = 'txt'
        f.retry_times = 0
        f.retry_interval = 0
        f.create_dt = arrow.utcnow().to('local').naive
        f.update_dt = arrow.utcnow().to('local').naive
        f.valid_from = arrow.utcnow().to('local').naive
        f.valid_to = arrow.utcnow().to('local').replace(days=365).naive
        f.tar_path = 'tmp/'
        f.tar_name = '{file}_{YMD}'.format(file=i, YMD='{YMD}')
        f.transtype = 'SFTP'
        f.ftpname = '10.8.4.116'
        session.add(f)

    f = session.query(FileRouteModule).get(9)
    f.tar_name = '9_{YM}'

    session.commit()
    session.close()
Example #22
0
File: cli.py Project: fecori/Watson
def sync(watson):
    """
    Get the frames from the server and push the new ones.

    The URL of the server and the User Token must be defined via the
    `watson config` command.

    Example:

    \b
    $ watson config backend.url http://localhost:4242
    $ watson config backend.token 7e329263e329
    $ watson sync
    Received 42 frames from the server
    Pushed 23 frames to the server
    """
    last_pull = arrow.utcnow()
    pulled = watson.pull()
    click.echo("Received {} frames from the server".format(len(pulled)))

    pushed = watson.push(last_pull)
    click.echo("Pushed {} frames to the server".format(len(pushed)))

    watson.last_sync = arrow.utcnow()
    watson.save()
def modify_workflow_json(workflow):
    tasks = {}

    for task in workflow['tasks']:
        cur_task = Viewer_Task(task)
        tasks[cur_task.name] = vars(cur_task)

    workflow['tasks'] = tasks

    submitted_time = arrow.get(workflow['submitted_time']).timestamp
    cur_time = arrow.utcnow().timestamp
    if workflow['completed_time'] == 'null':
        completed_time = arrow.utcnow().timestamp
    else:
        completed_time = arrow.get(workflow['completed_time']).timestamp

    calc = Decimal(completed_time - submitted_time)/Decimal(60.0)
    duration = max(float(calc.quantize(Decimal('0e-1'), rounding=decimal.ROUND_HALF_UP)), 0.0)
    workflow['duration'] = duration

    calc = Decimal(cur_time - submitted_time)/Decimal(60.0)
    time_before_now = max(float(calc.quantize(Decimal('0e-3'), rounding=decimal.ROUND_HALF_UP)), 0.0)
    workflow['time_before_now'] = time_before_now

    workflow['startTime'] = arrow.get(workflow['submitted_time']).to('US/Mountain').format('MM-DD-YYYY HH:mm:ss')
    workflow['modified'] = True

    return workflow
Example #24
0
    def test_receipts_report(self):
        self.create_tariff("default_tariff", default=True)
        customer_info = self.create_customer_by_self("*****@*****.**")
        db.session.commit()
        customer_id = customer_info["customer_id"]
        self.admin_client.customer.update_balance(customer_id, "100", "test withdraw for test mode", "RUB")

        self.admin_client.customer.update(customer_id, detailed_info={"passport_series_number": "1234 567 890",
                                                                      "passport_issued_by": "UFMS Russia",
                                                                      "passport_issued_date": "2013-01-01"})
        customer_db = Customer.get_by_id(customer_id)
        customer_db.confirm_email()

        self.admin_client.customer.make_prod(customer_id)
        self.admin_client.customer.update_balance(customer_id, "100", "test withdraw for prod mode", "RUB")

        start = utcnow().datetime - timedelta(days=30)
        end = utcnow().datetime + timedelta(hours=1)

        report = self.get_report("receipts", start, end, "csv")
        self.assertTrue(report)
        self.assertGreater(report.count(b";"), 5)
        report = [row for row in report.split(b"\r\n") if row]
        self.assertEqual(len(report), 2)  # header + balance update after prod

        self.get_report("receipts", start, end, "tsv")
Example #25
0
def filter_by_rule():
    from_time = request.args.get("start_time", arrow.utcnow().replace(days=-7))
    to_time = request.args.get("end_time", arrow.utcnow())
    start = int(request.args.get("from", 0))
    end = int(request.args.get("size", 100))
    start_time = int(arrow.get(from_time).float_timestamp * 1000)
    end_time = int(arrow.get(to_time).float_timestamp * 1000)
    false_positive = request.args.get("false_positive", "false")
    sort_order = ElasticSearchHelpers.create_sort(True)
    time_filter = ElasticSearchHelpers.create_timestamp_filter(start_time, end_time)
    query_filter = ElasticSearchHelpers.create_query_string_filter("false_positive:" + false_positive)
    try:
        query = ElasticSearchHelpers.create_elasticsearch_filtered_query(filtered_query=query_filter,
                                                                         timestamp_filter=time_filter,
                                                                         sort_order=sort_order)
        datastore = get_data_store()
        params = dict(from_=start)
        params["size"] = end
        params["_source"] = "check_id"
        results = datastore.search(query=query, params=params)
        rules = set()
        for result in results["hits"]["hits"]:
            rule = result["_source"]["check_id"]
            rules .add(rule)
        # sets are not JSON serializable
        response = make_response(json.dumps(list(rules)))
        response.headers["Content-Type"] = "application/json"
        return response
    except DataStoreException:
        return "Failed to retrieve commits", 500
def test_store_elasticsearch_tokens_groups3(store, indicator):
    t = store.store.tokens.create({
        'username': '******',
        'groups': ['staff'],
        'write': True
    })

    t2 = store.store.tokens.create({
        'username': '******',
        'groups': ['staff2'],
        'read': True,
    })

    i = store.handle_indicators_create(t['token'], {
        'indicator': 'example.com',
        'group': 'staff',
        'provider': 'example.com',
        'tags': ['test'],
        'itype': 'fqdn',
        'lasttime': arrow.utcnow().datetime,
        'reporttime': arrow.utcnow().datetime

    }, flush=True)

    assert i

    i = store.handle_indicators_search(t2['token'], {'itype': 'fqdn'})
    i = json.loads(i)
    assert len(i) == 0

    i = store.handle_indicators_search(t2['token'], {'indicator': 'example.com'})
    i = json.loads(i)
    assert len(i) == 0
Example #27
0
    def last_metrics(cls):
        last_24_hours = arrow.utcnow().replace(hours=-24).datetime
        
        objects = cls.select().where(cls.timestamp >= last_24_hours)
        count = objects.count()
        if count == 0:
            return

        last_1_hour = arrow.utcnow().replace(hours=-1).datetime
        
        accepted = cls.select(fn.Sum(cls.accepts)).where(cls.timestamp >= last_24_hours)
        rejected = cls.select(fn.Sum(cls.rejects)).where(cls.timestamp >= last_24_hours)
        delay = cls.select(fn.Avg(cls.delay)).where(cls.timestamp >= last_24_hours, cls.accepts>=0, cls.delay>=0)
        
        metrics = {
            'count': count,
            'accepted': accepted or 0,
            'rejected': rejected or 0,
            'delay': delay or 0.0,
            'abandoned': objects.filter(cls.accepts==0, cls.timestamp<=last_1_hour).count(),
            #'count_accepts': objects.filter(accepts__gte=1).count(),
        }
        
        metrics['requests'] = metrics['accepted'] + metrics['rejected']
        
        return metrics
Example #28
0
    def validate_when(self, value):
        when = arrow.get(value).replace(year=arrow.utcnow().year)

        if when < arrow.utcnow():
            when = arrow.get(value).replace(year=arrow.utcnow().year + 1)

        return when.date()
Example #29
0
 def test_expires_arrow_to_timestamp(self):
     """ Converting expiration arrow instance to timestamp """
     utc = arrow.utcnow()
     msk = arrow.utcnow().to('Europe/Moscow')
     timestamp = calendar.timegm(datetime.utcnow().utctimetuple())
     self.assertEqual(timestamp, time.expires_to_timestamp(utc))
     self.assertEqual(timestamp, time.expires_to_timestamp(msk))
Example #30
0
 def present_staff(self):
     """Fetch all present staff members"""
     resolutions = Resolution.query.join(Inquiry).filter(
         Resolution.resolved_at >= arrow.utcnow().replace(hours=-3),
         Inquiry.queue_id == self.id).all()
     staff = set()
     for resolution in resolutions:
         user = User.query.get(resolution.user_id)
         user.resolution = resolution
         ns = [res.resolved_at - res.created_at for res in Resolution.query.filter(
             Resolution.resolved_at >= arrow.utcnow().replace(hours=-6),
             Resolution.user_id == user.id
         )]
         if ns:
             total = ns[0]
             for n in ns[1:]:
                 total = n + total
             user.average = total/len(ns)
         else:
             user.average = 'n/a'
         current = Resolution.query.filter_by(user_id=user.id,
             resolved_at=None).first()
         user.status = 'free' if not current else 'busy'
         staff.add(user)
     return staff
Example #31
0
def fetch_price(country_code='NI', session=None):
    """Requests the most recent known power prices in Nicaragua grid

    Arguments:
    country_code (optional) -- ignored, only information for Nicaragua is returned
    session (optional)      -- request session passed in order to re-use an existing session

    Return:
    A list of dictionaries in the form:
    [
        {
          'countryCode': 'FR',
          'currency': EUR,
          'datetime': '2017-01-01T01:00:00Z',
          'price': 0.0,
          'source': 'mysource.com'
        },
        {
          'countryCode': 'FR',
          'currency': EUR,
          'datetime': '2017-01-01T00:00:00Z',
          'price': 0.0,
          'source': 'mysource.com'
        }
    ]
    """

    requests_obj = session or requests.session()
    response = requests_obj.get(PRICE_URL)
    response.encoding = 'utf-8'
    prices_html = response.text

    now_local_time = arrow.utcnow().to(TIMEZONE)
    midnight_local_time = arrow.utcnow().to(TIMEZONE).replace(hour=0,
                                                              minute=0,
                                                              second=0,
                                                              microsecond=0)

    hours_text = prices_html.split('<br />')

    data = []
    for hour_data in hours_text:
        if not hour_data:
            # there is usually an empty item at the end of the list, ignore it
            continue

        # hour_data is like "Hora 13:&nbsp;&nbsp;   84.72"
        hour = int(extract_text(hour_data, 'Hora ', ':'))
        price = float(extract_text(hour_data, '&nbsp;   '))

        price_date = midnight_local_time.replace(hour=hour)
        if price_date > now_local_time:
            # data for previous day is also included
            price_date = price_date.replace(days=-1)

        data.append({
            'countryCode': country_code,
            'datetime': price_date.datetime,
            'currency': 'USD',
            'price': price,
            'source': 'cndc.org.ni'
        })

    return data
Example #32
0
async def clean_wf_cache(db: Database):
    cutoff = arrow.utcnow().timestamp - 86500
    await db[db.db_nam].WarframeCache.delete_many({'Created': {'$lt': cutoff}})
Example #33
0
 def GetDayOffset(self, day):
     dow = -(int(arrow.utcnow().to("Europe/Dublin").format("d")) - 1) + day
     if int(arrow.utcnow().to("Europe/Dublin").format("d")) > 5:
         dow += 7
     day_arw = arrow.utcnow().to("Europe/Dublin").shift(days=+dow)
     return day_arw
Example #34
0
 async def tomorrow(self, ctx):
     """Classes for tomorrow."""
     day_arw = arrow.utcnow().to("Europe/Dublin").shift(days=+1)
     embed = self.GetDayArwSchedEmbed(day_arw)
     await ctx.send(embed=embed)
Example #35
0
    def get_filename(self):
        """ Returns the file name for the solves export. """

        return self.filename_template.format(
            username=self.username,
            date=arrow.utcnow().format('YYYY-MM-DD_HH-mm'))
Example #36
0
 def is_disabled_by_date(cls):
     is_disabled_expr = sa.sql.and_(
         cls.disabled_utc.isnot(None),
         cls.disabled_utc <= arrow.utcnow(),
     )
     return sa_sql.case([(is_disabled_expr, sa.true())], else_=sa.false())
Example #37
0
 def is_disabled_by_date(self):
     return self.disabled_utc is not None and self.disabled_utc <= arrow.utcnow(
     )
Example #38
0
def utcnow():
    return arrow.utcnow().datetime
def parse(expression, now=None, tz='UTC', type=None, roundDown=True):
    '''
        the main meat and potatoes of this this whole thing
        takes our datemath expression and does our date math
        :param expression - the datemath expression
        :param now - what time is now; when will now be then?  soon
        :param type - if we are dealing with a arrow or datetime object
        :param roundDown - wether or not we should round up or round down on this.  default is roundDown=True, which means if it was 12:00:00, `/d` would be '00:00:00', and with roundDown=False, `/d` would be '29:59:59'
    '''
    if now is None:
        now = arrow.utcnow()

    if debug: print("Orig Expression: {0}".format(expression))

    math = ''
    time = ''

    if 'UTC' not in tz:
        if debug: print("will now convert tz to {0}".format(tz))
        now = now.to(tz)

    if expression == 'now':
        if debug: print("Now, no dm: {0}".format(now))
        if type:
            return getattr(now, type)
        else:
            return now
    elif re.match('\d{10,}', str(expression)):
        if debug: print('found an epoch timestamp')
        if len(str(expression)) == 13:
            raise DateMathException('Unable to parse epoch timestamps in millis, please convert to the nearest second to continue - i.e. 1451610061 / 1000')
        ts = arrow.get(int(expression))
        ts = ts.replace(tzinfo=tz)
        return ts
    elif expression.startswith('now'):
        ''' parse our standard "now+1d" kind of queries '''
        math = expression[3:]
        time = now
        if debug: print('now expression: {0}'.format(now))
    else:
        ''' parse out datemath with date, ex "2015-10-20||+1d"  '''
        if '||' in expression:
            timestamp, math = expression.split('||')
            time = parseTime(timestamp, tz)
        elif expression.startswith(('+', '-', '/')):
            '''
            this catches expressions that don't start with 'now' but we are assume are 'now', such as 
            '+1h', '-2/w', '/1d', '+2h-2m', etc
            '''
            math = expression
            time = now
        else:
            math = ''
            time = parseTime(expression, tz)

    if not math or math == '':
        rettime = time
    rettime = evaluate(math, time, tz, roundDown)

    if type:
        return getattr(rettime, type)
    else:
        return rettime
import sys
import logging
import arrow
import eventProducer

topic = 'ihr_atlas_probe_discolog'
# End time
startTime = arrow.utcnow()
startTime.replace(microsecond=0, second=0)
endTime = startTime.shift(days=1)

# Logging
FORMAT = '%(asctime)s %(processName)s %(message)s'
logging.basicConfig(format=FORMAT,
                    filename='ihr-kafka-disco-data-fetch.log',
                    level=logging.INFO,
                    datefmt='%Y-%m-%d %H:%M:%S')
logging.info("Started: %s" % sys.argv)

ep = eventProducer.EventProducer(topic)
# Get one day of live data
ep.startLive(endTime.timestamp)
def test_close_1050():
	here = arrow.utcnow()
	there = here.shift(hours=+75)
	assert there.isoformat() == close_time(1050, 1000, here)
Example #42
0
 def run(self, token):
     self.start_time = arrow.utcnow().timestamp
     current_time = datetime.datetime.now().time()
     current_time.isoformat()
     self.log.info('Sending Client Startup Signal...')
     super().run(token)
def test_close_0():
	here = arrow.utcnow()
	there = here.shift(hours=+1)
	print(here.isoformat())
	print(close_time(0, 600, here))
	assert there.isoformat() == close_time(0, 600, here)
            else:
                val = float(-val)
        elif re.match('[a-zA-Z]+', char):
            now = calculate(now, val, unitMap(char))
        else:
            raise DateMathException(''''{}' is not a valid timeunit for expression: '{}' '''.format(char, expression))
        
        i += 1
    if debug: print("Fin: {0}".format(now))
    if debug: print('\n\n')
    return now



if __name__ == "__main__":
    if debug: print('NOW: {0}'.format(arrow.utcnow()))
    if debug: print('\n\n')
    #parse('now-1h')
    #parse('now+12h')
    #parse('now+1h')
    #parse('now+1h+1m')
    #parse('now+1h/d')
    #parse('now-2d/d')
    #parse('2012-01-01||+1M/d')
    #parse('now+1w/w')
    #parse('+1d/d')
    #parse('/h')
    #parse('/d')
    #parse('2014-11-18||+1M/M')
    #parse('2014-11-18||+1M/M+1h')
    #parse('2014-11-18||/w')
def shift_by_some():
	here = arrow.utcnow()
	there = here.shift(hours=+1)
	there = there.shift(minutes=+27)
	assert there.isoformat() == shift_start_by(here.isoformat(), 1.45)
def test_close_210():
	here = arrow.utcnow()
	there = here.shift(hours=+13)
	there = there.shift(minutes=+30)
	assert there.isoformat() == close_time(210, 200, here)
Example #47
0
    print("Got database")
    collection = db.dated
    print("Using sample collection")
except Exception as err:
    print("Failed")
    print(err)
    sys.exit(1)

#
# Insertions:  I commented these out after the first
# run successfuly inserted them
#

record = {
    "type": "dated_memo",
    "date": arrow.utcnow().naive,
    "text": "This is a sample memo"
}
post_id = collection.insert_one(record).inserted_id

record = {
    "type": "dated_memo",
    "date": arrow.utcnow().replace(days=+1).naive,
    "text": "Sample one day later"
}
post_id = collection.insert_one(record).inserted_id

#
# Read database --- May be useful to see what is in there,
# even after you have a working 'insert' operation in the flask app,
# but they aren't very readable.  If you have more than a couple records,
def test_open_0(): 
	here = arrow.utcnow().isoformat()
	assert here == open_time(0, 600, here)
Example #49
0
    def get_data(self, siteID, start_date, end_date, method, metrics):
        self.form_data.update({
            'body': {
                'siteID': siteID,
                'start_date': start_date,
                'end_date': end_date,
                'method': method,
                'metrics': metrics
            }
        })
        res = requests.post(self.get_data_url,
                            data=bytes(json.dumps(self.form_data),
                                       encoding='utf-8'))
        data = res.json()
        data = data["body"]["data"][0]["result"]["items"][1][0]
        return data


if __name__ == '__main__':
    username, password, token = ('xxxx', 'xxxx', 'xxxxxxxx')  #TODO
    bdtj = BDTJ(username, password, token)

    today = arrow.utcnow().to('local').shift().format('YYYYMMDD')
    yesterday = arrow.utcnow().to('local').shift(days=-1).format('YYYYMMDD')
    method = 'overview/getTimeTrendRpt'
    metrics = 'pv_count,visitor_count,ip_count,bounce_ratio,avg_visit_time'

    for siteID in bdtj.siteIDs.values():
        data = bdtj.get_data(siteID, yesterday, today, method, metrics)
        print(data)
def shift_by_0():
	here = arrow.utcnow()
	assert here.isoformat() == shift_start_by(here.isoformat(), 0)
Example #51
0
File: flaw.py Project: rajivraj/RVD
    def markdown(self, disclose=False):
        """
        Return the markdown representation of the flaw

        Thought for generating reports, mainly PDF-based
        """
        # pylint: disable = line-too-long
        return_str = ""
        return_str += "# Vulnerability advisory: " + str(self.title) + "\n"
        return_str += "## General" + "\n"
        return_str += str(self.description) + "\n"
        return_str += "\n"
        return_str += "| Item | Value |" + "\n"
        return_str += "| ---- | ----- |" + "\n"
        return_str += "| RVD ID |" + str(self.id) + "|" + "\n"
        return_str += "| title |" + str(self.title) + "|" + "\n"
        return_str += "| type |" + str(self.type) + "|" + "\n"
        return_str += "| cwe |" + str(self.cwe) + "|" + "\n"
        return_str += "| cve |" + str(self.cve) + "|" + "\n"
        return_str += "| keywords |" + str(self.keywords) + "|" + "\n"
        return_str += "| vendor |" + str(self.vendor) + "|" + "\n"
        return_str += "| system |" + str(self.system) + "|" + "\n"

        return_str += "\n"
        # return_str += "\newpage"

        # severity
        return_str += "## Severity" + "\n"
        return_str += "\n"
        return_str += "| Item | Value |" + "\n"
        return_str += "| ---- | ----- |" + "\n"
        return_str += "| rvss-score | " + str(self.rvss_score) + " |" + "\n"
        return_str += "| rvss-vector | " + str(self.rvss_vector) + " |" + "\n"
        return_str += (
            "| severity-description | " + str(self.severity_description) + " |" + "\n"
        )
        return_str += "| cvss-score | " + str(self.cvss_score) + " |" + "\n"
        return_str += "| cvss-vector | " + str(self.cvss_vector) + " |" + "\n"

        # return_str += "\n"
        return_str += "\\newpage"

        # flaw
        return_str += "## The flaw" + "\n"
        return_str += (
            "This section describes de flaw in more detail and \
captures relevant elements of it. For full understanding of the \
taxonomy used for its categorization, refer to \
[our taxonomy](https://github.com/aliasrobotics/RVD/blob/master/docs/TAXONOMY.md)"
            + "\n"
        )
        return_str += "\n"
        return_str += "| Item | Value |" + "\n"
        return_str += "| ---- | ----- |" + "\n"
        return_str += "| phase | " + str(self.phase) + " |" + "\n"
        return_str += "| specificity | " + str(self.specificity) + " |" + "\n"
        return_str += (
            "| architectural-location | "
            + str(self.architectural_location)
            + " |"
            + "\n"
        )
        return_str += "| application | " + str(self.application) + " |" + "\n"
        return_str += "| subsystem | " + str(self.subsystem) + " |" + "\n"
        return_str += "| package | " + str(self.package) + " |" + "\n"
        return_str += "| languages | " + str(self.languages) + " |" + "\n"
        return_str += "| date-detected | " + str(self.date_detected) + " |" + "\n"
        return_str += "| detected-by | " + str(self.detected_by) + " |" + "\n"
        return_str += (
            "| detected-by-method | " + str(self.detected_by_method) + " |" + "\n"
        )
        return_str += (
            "| date-reported | "
            + str(arrow.utcnow().format("YYYY-MM-DD"))
            + " |"
            + "\n"
        )
        return_str += "| reported-by | " + str(self.reported_by) + " |" + "\n"
        return_str += (
            "| reported-by-relationship | "
            + str(self.reported_by_relationship)
            + " |"
            + "\n"
        )
        return_str += "| issue | " + str(self.issue) + " |" + "\n"
        return_str += "| links | " + str(self.links) + " |" + "\n"
        return_str += "| reproducibility | " + str(self.reproducibility) + " |" + "\n"
        return_str += "| trace | " + str(self.trace) + " |" + "\n"
        return_str += "| reproduction | " + str(self.reproduction) + " |" + "\n"
        return_str += (
            "| reproduction-image | " + str(self.reproduction_image) + " |" + "\n"
        )

        # additional_fields - flaw
        for key in self.additional_fields.keys():
            if isinstance(self.additional_fields[key], dict):
                if key == "flaw":
                    for key2 in self.additional_fields[key].keys():
                        return_str += (
                            "| "
                            + (key2)
                            + " | "
                            + str(self.additional_fields[key][key2])
                            + " | "
                            + "\n"
                        )

        return_str += "\\newpage" + "\n"

        # exploitation
        if disclose:
            return_str += "## Exploitation" + "\n"
            return_str += "\n"
            return_str += "| Item | Value |" + "\n"
            return_str += "| ---- | ----- |" + "\n"
            return_str += (
                "| description | " + str(self.description_exploitation) + "|" + "\n"
            )
            return_str += (
                "| exploitation-image | " + str(self.exploitation_image) + "|" + "\n"
            )
            return_str += (
                "| exploitation-vector | " + str(self.exploitation_vector) + "|" + "\n"
            )

            # TODO: make this prettier by separating this field from the table
            #  and into a dedicate yaml section of verbatim
            return_str += (
                "| exploitation-recipe | " + str(self.exploitation_recipe) + "|" + "\n"
            )
            # additional_fields - exploitation
            for key in self.additional_fields.keys():
                if isinstance(self.additional_fields[key], dict):
                    if key == "exploitation":
                        for key2 in self.additional_fields[key].keys():
                            return_str += (
                                "| "
                                + (key2)
                                + " | "
                                + str(self.additional_fields[key][key2])
                                + " | "
                                + "\n"
                            )
        else:
            return_str += "## Exploitation" + "\n"
            return_str += "\n"
            return_str += "| Item | Value |" + "\n"
            return_str += "| ---- | ----- |" + "\n"
            return_str += "| description | Not disclosed |" + "\n"
            return_str += "| exploitation-image | Not disclosed |" + "\n"
            return_str += "| exploitation-vector | Not disclosed |" + "\n"
            return_str += "| exploitation-recipe | Not disclosed |" + "\n"

        return_str += "\\newpage" + "\n"

        # mitigation
        if disclose:
            return_str += "## Mitigation" + "\n"
            return_str += "\n"
            return_str += "| Item | Value |" + "\n"
            return_str += "| ---- | ----- |" + "\n"
            return_str += (
                "| description | " + str(self.description_mitigation) + "|" + "\n"
            )
            return_str += "| pull-request | " + str(self.pull_request) + "|" + "\n"
            # additional_fields - mitigation
            for key in self.additional_fields.keys():
                if isinstance(self.additional_fields[key], dict):
                    if key == "mitigation":
                        for key2 in self.additional_fields[key].keys():
                            return_str += (
                                "| "
                                + (key2)
                                + " | "
                                + str(self.additional_fields[key][key2])
                                + " | "
                                + "\n"
                            )
        else:
            return_str += "## Mitigation" + "\n"
            return_str += "\n"
            return_str += "| Item | Value |" + "\n"
            return_str += "| ---- | ----- |" + "\n"
            return_str += "| description | Not disclosed |" + "\n"
            return_str += "| pull-request |  Not disclosed |" + "\n"

        return_str += "\n"

        # # additional_fields (others)
        # return_str += '## Mitigation' + "\n"
        # return_str += "\n"
        # return_str += '| Item | Value |' + "\n"
        # return_str += '| ---- | ----- |' + "\n"
        # for key in self.additional_fields.keys():
        #     if key in ['mitigation', 'exploitation', 'flaw']:  # the ones contemplated above with additional_fields
        #         continue
        #     if isinstance(self.additional_fields[key], dict):
        #         for key2 in self.additional_fields[key].keys():
        #             return_str +="| " + (key2) + " | " + str(self.additional_fields[key][key2]) + " | " + "\n"
        #     else:
        #         return_str +="| " + (key2) + " | " + str(self.additional_fields[key]) + " | " + "\n"
        # return_str += "\n"

        return return_str
Example #52
0
 async def execute(self, payload):
     """
     Runs necessary checks and executes the command function.
     :type payload: sigma.core.mechanics.payload.CommandPayload
     """
     if self.bot.ready:
         if payload.msg.guild:
             delete_command_message = payload.settings.get(
                 'delete_commands')
             if delete_command_message:
                 try:
                     await payload.msg.delete()
                 except (discord.Forbidden, discord.NotFound):
                     pass
             override = check_filter_perms(payload.msg, payload.settings,
                                           'arguments')
             if await self.check_black_args(payload.settings, payload.args):
                 if not any([
                         payload.msg.author.guild_permissions.administrator,
                         override
                 ]):
                     await self.respond_with_emote(payload.msg, '🛡')
                     return
         if not self.bot.cfg.dsc.bot and payload.msg.author.id != self.bot.user.id:
             self.log.warning(f'{payload.msg.author.name} tried using me.')
             return
         if not self.cd.is_cooling(payload.msg):
             if not await self.bot.cool_down.on_cooldown(
                     f'{self.name}_core', payload.msg.author):
                 await self.update_cooldown(payload.msg.author)
                 perms, guild_perms = await self.check_permissions(payload)
                 exec_timestamp = arrow.utcnow().float_timestamp
                 self.log_command_usage(payload.msg, payload.args,
                                        exec_timestamp)
                 self.cd.set_cooling(payload.msg)
                 if perms.permitted:
                     if guild_perms.permitted:
                         requirements = CommandRequirements(
                             self, payload.msg)
                         if requirements.reqs_met:
                             try:
                                 executed = False
                                 last_error = None
                                 client_os_broken_tries = 0
                                 while client_os_broken_tries < 3 and not executed:
                                     try:
                                         await self.add_detailed_stats(
                                             payload, exec_timestamp)
                                         await getattr(
                                             self.command,
                                             self.name)(self, payload)
                                         executed = True
                                     except CancelledError:
                                         pass
                                     except aiohttp.ClientOSError as err:
                                         last_error = err
                                         client_os_broken_tries += 1
                                         await asyncio.sleep(1)
                                 if not executed:
                                     raise last_error
                                 await add_cmd_stat(self)
                                 await self.add_usage_sumarum(payload.msg)
                                 self.bot.command_count += 1
                                 cmd_ev_pld = CommandEventPayload(
                                     self.bot, self, payload)
                                 event_task = self.bot.queue.event_runner(
                                     'command', cmd_ev_pld)
                                 self.bot.loop.create_task(event_task)
                             except self.get_exception() as e:
                                 error = SigmaError(self, e)
                                 await error.error_handler(payload)
                         else:
                             await self.respond_with_emote(payload.msg, '📝')
                             await self.send_requirements_error(
                                 requirements, payload)
                     else:
                         self.log.warning(
                             'ACCESS DENIED: This module or command is not allowed in this location.'
                         )
                         await self.respond_with_emote(payload.msg, '🔒')
                 else:
                     perms.log_unpermitted()
                     await self.respond_with_emote(payload.msg, '⛔')
                     if perms.response:
                         try:
                             await payload.msg.channel.send(
                                 embed=perms.response)
                         except (discord.Forbidden, discord.NotFound):
                             pass
             else:
                 await self.respond_with_emote(payload.msg, '❄')
         else:
             await self.respond_with_emote(payload.msg, '🕙')
Example #53
0
def habiticaRegister(username, email, password, our_uuid):
    user_dict = {}
    #if user is already in e-mission db, try to load user data
    if edb.get_habitica_db().find({'user_id': our_uuid}).count() == 1:
        try:
            result = habiticaProxy(our_uuid, 'GET', '/api/v3/user', None)
            user_dict = result.json()
            logging.debug("parsed json from GET habitica user = %s" %
                          user_dict)

        #if it fails, then user is in db but not in Habitica, so needs to create new account
        #FIX! Still need to test if this will throw an error correctly
        except urllib2.HTTPError:
            user_dict = newHabiticaUser(username, email, password, our_uuid)
            edb.get_habitica_db().update({"user_id": our_uuid}, {
                "$set": {
                    'metrics_data': {
                        'last_timestamp': arrow.utcnow().timestamp,
                        'bike_count': 0,
                        'walk_count': 0
                    },
                    'habitica_username': username,
                    'habitica_password': password,
                    'habitica_id': user_dict['data']['_id'],
                    'habitica_token': user_dict['data']['apiToken'],
                    'habitica_group_id': None
                }
            },
                                         upsert=True)
            if user_dict['data']['party']['_id']:
                edb.get_habitica_db().update({"user_id": our_uuid}, {
                    "$set": {
                        'habitica_group_id': user_dict['data']['party']['_id']
                    }
                },
                                             upsert=True)

        #now we have the user data in user_dict, so check if db is correct
        #Fix! should prob check here if our db is right, if it's in group, etc

    #if user is not in db, try to log in using email and password
    else:
        try:
            login_url = url + '/api/v3/user/auth/local/login'
            user_request = {
                'username': username,
                'email': email,
                'password': password
            }
            logging.debug("About to login %s" % user_request)
            login_response = requests.post(login_url, json=user_request)
            if login_response.status_code == 401:
                user_dict = newHabiticaUser(username, email, password,
                                            our_uuid)
            else:
                logging.debug("habitica http response from login = %s" %
                              login_response)
                user_auth = json.loads(login_response.text)
                logging.debug("parsed json from habitica has keys = %s" %
                              user_auth)
                #login only returns user auth headers, so now get authenticated user and put it in user_dict
                auth_headers = {
                    'x-api-user': user_auth['data']['id'],
                    'x-api-key': user_auth['data']['apiToken']
                }
                get_user_url = url + '/api/v3/user'
                result = requests.request('GET',
                                          get_user_url,
                                          headers=auth_headers,
                                          json={})
                logging.debug("result = %s" % result)
                result.raise_for_status()
                user_dict = result.json()
                user_dict['data']['apiToken'] = user_auth['data']['apiToken']
                logging.debug("parsed json from GET habitica user = %s" %
                              user_dict)

        #if it fails, then user is also not in Habitica, so needs to create new account and put it in user_dict
        #FIX!! throw except only if u returns a 401 error
        except:
            pass

        logging.debug("habitica user to be created in our db = %s" %
                      user_dict['data'])
        #Now save new user (user_dict) to our db
        #Since we are randomly generating the password, we store it in case users
        #want to access their Habitica account from the browser
        #Need to create a way from them to retrieve username/password
        #metrics_data is used to calculate points based on km biked/walked
        #last_timestamp is the last time the user got points, and bike/walk_count are the leftover km
        habitica_user_table = edb.get_habitica_db()
        habitica_user_table.insert({
            'user_id':
            our_uuid,
            'metrics_data': {
                'last_timestamp': arrow.utcnow().timestamp,
                'bike_count': 0,
                'walk_count': 0
            },
            'habitica_username':
            username,
            'habitica_password':
            password,
            'habitica_id':
            user_dict['data']['id'],
            'habitica_token':
            user_dict['data']['apiToken'],
            'habitica_group_id':
            None
        })

        #Since we have a new user in our db, create its default habits (walk, bike)
        setup_default_habits(our_uuid)
        #And invite new user to a group, or retrieve group id if user is already in one
        group_id = setup_party(our_uuid)
        edb.get_habitica_db().update({"user_id": our_uuid},
                                     {"$set": {
                                         'habitica_group_id': group_id
                                     }},
                                     upsert=True)
        user_dict['habitica_group_id'] = group_id
    return user_dict
Example #54
0
File: flaw.py Project: rajivraj/RVD
    def export_to_cve(self, filepath, version, mode, next_identifier):
        """
        Export flaw (self) to CVE JSON format in filepath

        :param filepath string, full path of the destiny file
        :param version int, version of CVE JSON, only 4 supported for now
        :param mode string, public, reserved or reject
        :returns None
        """
        if mode != "public":
            raise NotImplementedError

        if version == 4:
            file = open(filepath, "w")
            file.write("{\n")
            # CVE_data_meta
            file.write('    "CVE_data_meta": {\n')
            file.write('        "ASSIGNER": "*****@*****.**",\n')
            file.write(
                '        "DATE_PUBLIC": "'
                + str(arrow.utcnow().format("YYYY-MM-DDTHH:mm:ss ZZ"))
                + '",\n'
            )
            file.write('        "ID": "' + str(next_identifier) + '",\n')
            file.write('        "STATE": "PUBLIC",\n')
            file.write('        "TITLE": "' + str(self.title) + '"\n')
            file.write("    },\n")
            # affects
            file.write('    "affects": {\n')
            file.write('        "vendor": {\n')
            file.write('            "vendor_data": [\n')
            file.write("                {\n")
            file.write('                    "product": {\n')
            file.write('                        "product_data": [\n')
            file.write("                            {\n")
            file.write(
                '                                "product_name": "'
                + str(
                    self.system.split(",")[0].split(":")[0]
                )  # pick only one element for now, TODO: redo
                + '",\n'
            )
            file.write('                                "version": {\n')
            file.write('                                    "version_data": [\n')
            file.write("                                        {\n")
            if ":" in self.system.split(",")[0]:
                file.write(
                    '                                            "version_value": "'
                    + str(self.system.split(",")[0].split(":")[1])
                    + '"\n'
                )
            else:
                file.write(
                    '                                            "version_value": ""\n'
                )
            file.write("                                        }\n")
            file.write("                                    ]\n")
            file.write("                                }\n")
            file.write("                            }\n")
            file.write("                        ]\n")
            file.write("                    },\n")
            file.write(
                '                    "vendor_name": "'
                + str(self.vendor.split(",")[0])
                + '"\n'
            )
            file.write("                }\n")
            file.write("            ]\n")
            file.write("        }\n")
            file.write("    },\n")

            # credit
            file.write('    "credit": [\n')
            file.write("        {\n")
            file.write('            "lang": "eng",\n')
            file.write('            "value": "' + str(self.detected_by) + '"\n')
            file.write("        }\n")
            file.write("    ],\n")

            # format
            file.write('    "data_format": "MITRE",\n')
            file.write('    "data_type": "CVE",\n')
            file.write('    "data_version": "4.0",\n')

            # description
            file.write('    "description": {\n')
            file.write('        "description_data": [\n')
            file.write("            {\n")
            file.write('                "lang": "eng",\n')
            file.write('                "value": "' + str(self.description) + '"\n')
            file.write("            }\n")
            file.write("        ]\n")
            file.write("    },\n")

            # generator
            file.write('    "generator": {\n')
            file.write('        "engine": "Robot Vulnerability Database (RVD)"\n')
            file.write("    },\n")

            # impact
            file.write('    "impact": {\n')
            file.write('        "cvss": {\n')
            file.write(
                '            "attackComplexity": "'
                + str(self.cvss_vector_extract(self.cvss_vector, "AC"))
                + '",\n'
            )
            file.write(
                '            "attackVector": "'
                + str(self.cvss_vector_extract(self.cvss_vector, "AV"))
                + '",\n'
            )
            file.write(
                '            "availabilityImpact": "'
                + str(self.cvss_vector_extract(self.cvss_vector, "A"))
                + '",\n'
            )
            file.write('            "baseScore": ' + str(self.cvss_score) + ",\n")
            file.write(
                '            "baseSeverity": "'
                + str(self.cvss_vector_extract(self.cvss_vector, "severity"))
                + '",\n'
            )
            file.write(
                '            "confidentialityImpact": "'
                + str(self.cvss_vector_extract(self.cvss_vector, "C"))
                + '",\n'
            )
            file.write(
                '            "integrityImpact": "'
                + str(self.cvss_vector_extract(self.cvss_vector, "I"))
                + '",\n'
            )
            file.write(
                '            "privilegesRequired": "'
                + str(self.cvss_vector_extract(self.cvss_vector, "PR"))
                + '",\n'
            )
            file.write(
                '            "scope": "'
                + str(self.cvss_vector_extract(self.cvss_vector, "S"))
                + '",\n'
            )
            file.write(
                '            "userInteraction": "'
                + str(self.cvss_vector_extract(self.cvss_vector, "UI"))
                + '",\n'
            )
            file.write('            "vectorString": "' + str(self.cvss_vector) + '",\n')
            file.write('            "version": "3.0"\n')
            file.write("        }\n")
            file.write("    },\n")

            # problem-type
            file.write('    "problemtype": {\n')
            file.write('        "problemtype_data": [\n')
            file.write("            {\n")
            file.write('                "description": [\n')
            file.write("                    {\n")
            file.write('                        "lang": "eng",\n')
            file.write('                        "value": "' + str(self.cwe) + '"\n')
            file.write("                    }\n")
            file.write("                ]\n")
            file.write("            }\n")
            file.write("        ]\n")
            file.write("    },\n")

            # references
            file.write('    "references": {\n')
            for i in range(len(self.links)):
                file.write('    "reference_data": [\n')
                file.write("    {\n")
                file.write('    "name": "' + str(self.links[i]) + '",\n')
                file.write('    "refsource": "CONFIRM",\n')
                file.write('    "url": "' + str(self.links[i]) + '"\n')
                file.write("    }\n")
                if i == (len(self.links) - 1):
                    file.write("    ]\n")
                else:
                    file.write("    ],\n")
            file.write("    },\n")

            # source
            file.write('    "source": {\n')
            file.write('        "defect": [\n')
            file.write('            "RVD#' + str(self.id) + '"\n')
            file.write("        ],\n")
            file.write('        "discovery": "EXTERNAL"\n')
            file.write("    }\n")

            # end
            file.write("}\n")
            file.close()

        else:
            raise NotImplementedError
Example #55
0
 def requires(self):
   start = arrow.get('20090601', 'YYYYMMDD').ceil('week')
   end = arrow.utcnow().ceil('week')
   for batch in arrow.Arrow.range('week', start, end):
     batch_file = join(SPL_BATCH_DIR, batch.format('YYYYMMDD') + '.ids')
     yield SPL2JSON(batch_file)
def utcnow_datetime_aware():
    """Returns timezone-aware datetime for the current UTC moment"""

    return arrow.utcnow().datetime
Example #57
0
def scanfile(infile, ovrx={}, mforce=False):
    """
    Read audio file attributes, tags, and metadata for @infile
    """
    dasc = {}

    # get file parts
    xvreal = infile
    tdir, xv = os.path.split(xvreal)
    xvbase, xvext = os.path.splitext(xv)

    # get base & parent dir names
    tdir_base = os.path.split(tdir)[1]
    tdir_parent = os.path.split(os.path.split(tdir)[0])[1]

    logthis("Examining file:", suffix=xv, loglevel=LL.INFO)
    tstatus('scanfile', event='start', filename=xv)

    # Get xattribs
    fovr = {}
    fovr.update(ovrx)
    fovr.update(parse_xattr_overrides(xvreal))
    if 'ignore' in fovr:
        logthis("File has 'ignore' flag set via override; skipping",
                loglevel=LL.INFO)
        return None

    # Get file path information
    dasc['dpath'] = {'base': tdir_base, 'parent': tdir_parent, 'full': tdir}
    dasc['fpath'] = {
        'real': xvreal,
        'base': xvbase,
        'file': xv,
        'ext': xvext.replace('.', '')
    }

    # Stat, Extended Attribs, Ownership
    dasc['stat'] = util.dstat(xvreal)
    dasc['owner'] = {
        'user': util.getuser(dasc['stat']['uid']),
        'group': util.getgroup(dasc['stat']['gid'])
    }

    # Modification key (MD5 of inode number + mtime + filesize)
    mkey_id = util.getmkey(dasc['stat'])
    dasc['mkey_id'] = mkey_id
    dasc['status'] = 'new'

    # Get mediainfo & build codec string
    minfo = util.mediainfo(xvreal, config, format_lower=False)
    try:
        if minfo['audio'][0]['format'] == "mpeg audio":
            if minfo['audio'][0]['format'].endswith('3'):
                acodec = "MP3"
            elif minfo['audio'][0]['format'].endswith('2'):
                acodec = "MP2"
            else:
                acodec = "MPEG Audio"
                if 'format_profile' in minfo['audio'][0]:
                    acodec += " " + minfo['audio'][0]['format_profile']
        else:
            acodec = ""
            if minfo['general']['format'] != minfo['audio'][0]['format']:
                acodec = minfo['general']['format'] + " "
            acodec += minfo['audio'][0]['format']
            if 'format_profile' in minfo['audio'][0]:
                acodec += " " + minfo['audio'][0]['format_profile']
    except Exception as e:
        logexc(e, "Failed to determine codec")
        acodec = "Unknown"

    try:
        if 'bit_depth' in minfo['audio'][0]:
            bitdepth = minfo['audio'][0]['bit_depth']
        else:
            bitdepth = 16
    except Exception as e:
        logexc(e, "Failed to determine bit depth; assuming default 16-bit")
        bitdepth = 16

    # Open file with Mutagen
    try:
        mf = mutagen.File(xvreal)
    except OSError as e:
        logexc(e, "Failed to open file")
        return None
    except Exception as e:
        logexc(e, "Failed to parse tag data from file")
        return None

    # Determine MIME type
    try:
        amime = mf.mime[0]
    except:
        try:
            amime = mimetypes.guess_type(xvreal)[0]
        except:
            amime = None

    if amime is None:
        logthis("Failed to auto-detect MIME type for file:",
                suffix=xvreal,
                loglevel=LL.WARNING)
        amime = "audio/x-" + xvext.lower()
    logthis("Detected MIME-Type:", suffix=amime, loglevel=LL.DEBUG)

    # Set default subsong info
    dasc['subsong'] = {
        'index': 1,
        'start_time': 0.0,
        'duration': None,
        'cue': None
    }

    # Check for tag data
    tags = mf.tags
    if tags:
        tdate = parse_album_date(tags)
        if tdate is not None:
            tyear = tdate.format("YYYY")
            tstamp = tdate.timestamp
        else:
            tyear = None
            tstamp = None

        dasc['tags'] = {
            'artist':
            get_best_tag(tags, ('ARTIST', 'TPE1', 'TOPE', 'Author')),
            'album':
            get_best_tag(tags, ('ALBUM', 'TALB', 'TOAL', 'WM/AlbumTitle')),
            'title':
            get_best_tag(tags, ('TITLE', 'TIT2', 'TIT3', 'Title')),
            'year':
            tyear,
            'timestamp':
            tstamp,
            'genre':
            get_best_tag(tags, ('GENRE', 'TCON', 'WM/Genre')),
            'tracknum':
            parse_tracknum(tags, forceInt=True),
            'trackstr':
            parse_tracknum(tags, forceInt=False),
            'disc':
            get_best_tag(tags, ('DISCNUMBER', 'DISC', 'TPOS')),
            'album_artist':
            get_best_tag(tags, ('ALBUMARTIST', 'TXXX:ALBUM ARTIST',
                                'TXXX:ALBUM_ARTIST', 'WM/AlbumArtist'))
        }
        dasc['alltags'] = get_all_tags_once(tags)
        dasc['format'] = {
            'format': acodec,
            'mime': amime,
            'channels': get_info_safe(mf.info, 'channels', 2),
            'sampling_rate': get_info_safe(mf.info, 'sample_rate', 44100),
            'encoding_settings': minfo['audio'][0].get('encoding_settings'),
            'writing_library': minfo['audio'][0].get('writing_library'),
            'bitrate': get_info_safe(mf.info, 'bitrate', 1411000),
            'bit_depth': bitdepth,
            'length': get_info_safe(mf.info, 'length')
        }

        if dasc['tags']['album_artist'] is None:
            dasc['tags']['album_artist'] = dasc['tags']['artist']
    else:
        dasc['tags'] = {}
        dasc['alltags'] = {}
        dasc['format'] = {}
        logthis("** NO DATA **", loglevel=LL.WARNING)

    # Record last time this entry was updated (UTC)
    last_up = arrow.utcnow().timestamp
    logthis("last_updated =", suffix=last_up, loglevel=LL.DEBUG)
    dasc['last_updated'] = last_up

    # Check for matching cue file...
    subsongs = []
    cuepath = dasc['dpath']['full'] + '/' + dasc['fpath']['base'] + '.cue'
    if os.path.exists(cuepath):
        cue = parse_cue_file(cuepath)
        if cue:
            # Set subsong CUE sheet source file
            dasc['subsong']['cue'] = dasc['fpath']['base'] + '.cue'
            dasc['subsong']['cue_realpath'] = cuepath

            logthis("Using CUE sheet for subsong indexing",
                    suffix=dasc['fpath']['base'] + '.cue',
                    loglevel=LL.VERBOSE)

            for ssindex, ssdata in cue['tracks'].items():
                try:
                    if cue['tracks'].get(ssindex + 1):
                        ssduration = cue['tracks'][ssindex + 1]['index'][1][
                            0] - cue['tracks'][ssindex]['index'][1][0]
                    else:
                        ssduration = dasc['format']['length'] - cue['tracks'][
                            ssindex]['index'][1][0]
                except Exception as e:
                    logexc(e, "Failed to calculate subsong duration")
                    ssduration = 0.0
                tsubsong = clone_master_track(dasc, ssdata, ssindex,
                                              ssduration)
                logthis(
                    u"[SUBSONG] Title: {title} / Track: {tracknum} ({trackstr}) / Artist: {artist} / Album: {album} / AlbumArtist: {album_artist} / Year: {year}"
                    .format(**tsubsong['tags']),
                    loglevel=LL.DEBUG)
                subsongs.append(tsubsong)

    if len(subsongs) == 0:
        logthis(
            u"Title: {title} / Track: {tracknum} ({trackstr}) / Artist: {artist} / Album: {album} / AlbumArtist: {album_artist} / Year: {year}"
            .format(**dasc['tags']),
            loglevel=LL.DEBUG)
        return dasc
    else:
        return subsongs
Example #58
0
def scheduler(broker=None):
    """
    Creates a task from a schedule at the scheduled time and schedules next run
    """
    if not broker:
        broker = get_broker()
    db.close_old_connections()
    try:
        for s in Schedule.objects.exclude(repeats=0).filter(
                next_run__lt=timezone.now()):
            args = ()
            kwargs = {}
            # get args, kwargs and hook
            if s.kwargs:
                try:
                    # eval should be safe here because dict()
                    kwargs = eval('dict({})'.format(s.kwargs))
                except SyntaxError:
                    kwargs = {}
            if s.args:
                args = ast.literal_eval(s.args)
                # single value won't eval to tuple, so:
                if type(args) != tuple:
                    args = (args, )
            q_options = kwargs.get('q_options', {})
            if s.hook:
                q_options['hook'] = s.hook
            # set up the next run time
            if not s.schedule_type == s.ONCE:
                next_run = arrow.get(s.next_run)
                while True:
                    if s.schedule_type == s.MINUTES:
                        next_run = next_run.replace(minutes=+(s.minutes or 1))
                    elif s.schedule_type == s.HOURLY:
                        next_run = next_run.replace(hours=+1)
                    elif s.schedule_type == s.DAILY:
                        next_run = next_run.replace(days=+1)
                    elif s.schedule_type == s.WEEKLY:
                        next_run = next_run.replace(weeks=+1)
                    elif s.schedule_type == s.MONTHLY:
                        next_run = next_run.replace(months=+1)
                    elif s.schedule_type == s.QUARTERLY:
                        next_run = next_run.replace(months=+3)
                    elif s.schedule_type == s.YEARLY:
                        next_run = next_run.replace(years=+1)
                    if Conf.CATCH_UP or next_run > arrow.utcnow():
                        break
                s.next_run = next_run.datetime
                s.repeats += -1
            # send it to the cluster
            q_options['broker'] = broker
            q_options['group'] = q_options.get('group', s.name or s.id)
            kwargs['q_options'] = q_options
            s.task = django_q.tasks.async_task(s.func, *args, **kwargs)
            # log it
            if not s.task:
                logger.error(
                    _('{} failed to create a task from schedule [{}]').format(
                        current_process().name, s.name or s.id))
            else:
                logger.info(
                    _('{} created a task from schedule [{}]').format(
                        current_process().name, s.name or s.id))
            # default behavior is to delete a ONCE schedule
            if s.schedule_type == s.ONCE:
                if s.repeats < 0:
                    s.delete()
                    continue
                # but not if it has a positive repeats
                s.repeats = 0
            # save the schedule
            s.save()
    except Exception as e:
        logger.error(e)
Example #59
0
async def givecookie(cmd, message, args):
    if message.mentions:
        target = message.mentions[0]
        author_stamp = arrow.get(message.author.created_at).float_timestamp
        current_stamp = arrow.utcnow().float_timestamp
        time_diff = current_stamp - author_stamp
        if time_diff > 2592000:
            if message.author.id != target.id:
                if not target.bot:
                    if not cmd.bot.cool_down.on_cooldown(
                            cmd.name, message.author):
                        upgrade_file = cmd.db[
                            cmd.db.db_cfg.database].Upgrades.find_one(
                                {'UserID': message.author.id})
                        if upgrade_file is None:
                            cmd.db[cmd.db.db_cfg.database].Upgrades.insert_one(
                                {'UserID': message.author.id})
                            upgrade_file = {}
                        cookie_coll = cmd.db[cmd.db.db_cfg.database].Cookies
                        base_cooldown = 3600
                        if 'stamina' in upgrade_file:
                            stamina = upgrade_file['stamina']
                        else:
                            stamina = 0
                        cooldown = int(base_cooldown - ((base_cooldown / 100) *
                                                        (stamina * 0.2)))
                        file_check = cookie_coll.find_one(
                            {'UserID': target.id})
                        if not file_check:
                            cookies = 0
                            data = {'UserID': target.id, 'Cookies': 0}
                            cookie_coll.insert_one(data)
                        else:
                            cookies = file_check['Cookies']
                        cookies += 1
                        cookie_coll.update_one({'UserID': target.id},
                                               {'$set': {
                                                   'Cookies': cookies
                                               }})
                        cmd.bot.cool_down.set_cooldown(cmd.name,
                                                       message.author,
                                                       cooldown)
                        title = f'🍪 You gave a cookie to {target.display_name}.'
                        response = discord.Embed(color=0xd99e82, title=title)
                    else:
                        timeout_seconds = cmd.bot.cool_down.get_cooldown(
                            cmd.name, message.author)
                        if timeout_seconds > 60:
                            timeout_seconds = arrow.utcnow(
                            ).timestamp + timeout_seconds
                            timeout = arrow.get(timeout_seconds).humanize()
                        else:
                            timeout = f'in {timeout_seconds} seconds'
                        timeout_title = f'🕙 You can give another cookie {timeout}.'
                        response = discord.Embed(color=0x696969,
                                                 title=timeout_title)
                else:
                    response = discord.Embed(
                        color=0xBE1931, title=f'❗ Bots don\'t eat cookies.')
            else:
                response = discord.Embed(
                    color=0xBE1931,
                    title=f'❗ Nope, can\'t give cookies to yourself.')
        else:
            response = discord.Embed(
                color=0xBE1931,
                title=f'❗ Sorry, your account is too young to give cookies.')
    else:
        response = discord.Embed(color=0xBE1931, title=f'❗ No user targeted.')
    await message.channel.send(embed=response)
Example #60
0
 def _searchdate(self, timezone, arrow=None):
     if not arrow:
         arrow = utcnow()
     return arrow.to(timezone).format('YYYY-MM-DD')