Exemplo n.º 1
0
    def process_request(self, request):
        """
        Writes the signed_request into the Session
        """
        fb = get_session(request)
        setattr(request, 'fb_session', fb)
        application = get_app_dict()

        logger.debug('Request Method = %s\n, AccessToken=%s' % (request.method, fb.access_token))

        if 'feincms' in settings.INSTALLED_APPS:
            # if feincms is installed, try to get the application from the page
            from facebook.feincms.utils import get_application_from_request
            page_app = get_application_from_request(request)
            if application:
                application = get_app_dict(page_app)

        # Temporary OAuth2.0 fix due to missing access_token in cookie sr:
        if 'access_token' in request.GET:
            fb.store_token(request.GET.get('access_token'))

        # default POST/GET request from facebook with a signed request
        if 'signed_request' in request.POST:
            parsed_request = parseSignedRequest(request.POST['signed_request'], application['SECRET'])
            logger.debug(u'got signed_request from facebook: %s' % parsed_request)
            if 'user' in parsed_request:
                language = parsed_request['user']['locale']
                logger.debug('language: %s' %language)
                request.LANGUAGE_CODE = language
                translation.activate(language)
            fb.signed_request = parsed_request
            logger.debug('stored signed_request')
            expires = None
            # rewrite important data
            if 'oauth_token' in parsed_request:
                expires = datetime.fromtimestamp(float(parsed_request['expires']))
                fb.store_token(parsed_request['oauth_token'], expires)
            elif 'access_token' in parsed_request:
                expires = datetime.fromtimestamp(float(parsed_request['expires']))
                fb.store_token(parsed_request['access_token'], expires)
            else:
                #The chance is good that there is already a valid token in the session. Remove it.
                fb.store_token(None)

            if 'user_id' in parsed_request:
                fb.user_id = parsed_request['user_id']

            else:
                logger.debug("Signed Request didn't contain public user info.")
            if expires:
                logger.debug('Signed Request issued at: %s' % datetime.fromtimestamp(float(parsed_request['issued_at'])))
        
        # auth via callback from facebook
        elif 'code' in request.GET and 'facebook' in request.META.get('HTTP_REFERER', u''):
            authenticate(request.REQUEST['code'], fb, application,
                         request.build_absolute_uri().split('?')[0] \
                            .replace(application['CANVAS-URL'], application['CANVAS-PAGE']))
Exemplo n.º 2
0
def auto_schedule(request):
    if not request.user.is_plant_manager:
        raise UserFacingException("You are not authorized to use the auto-scheduler.")
    items = json.loads(request.POST.get("items", "[]"))
    existing = json.loads(request.POST.get("existing", "{}"))
    start = request.POST.get("start", "")
    end = request.POST.get("end", "")
    scheduler_name = request.POST.get("scheduler", "")
    current_timezone = timezone.get_current_timezone()
    try:
        start = datetime.fromtimestamp(int(start), tz=current_timezone)
        end = datetime.fromtimestamp(int(end), tz=current_timezone)
    except ValueError:
        raise UserFacingException("Unable to schedule: invalid start/end time")
    if start >= end:
        raise UserFacingException(
            "Unable to schedule: end time is less than start time"
        )
    if not items:
        # Return empty schedule
        return "[]"
    to_schedule = []
    existing_items = {}
    try:
        owned_lines = request.user.owned_lines
        for item in items:
            to_schedule.append(
                scheduling.Item(
                    GoalItem.objects.get(id=item["id"]),
                    int(item["hours"]),
                    set(item["groups"]).intersection(owned_lines),
                )
            )
        for group, items in existing.items():
            if not group in owned_lines:
                continue
            existing_items[group] = [
                scheduling.ExistingItem(
                    GoalItem.objects.get(id=item["id"]),
                    datetime.fromtimestamp(int(item["start"]), tz=current_timezone),
                    datetime.fromtimestamp(int(item["end"]), tz=current_timezone),
                )
                for item in items
            ]
    except Exception:
        logger.exception("Invalid auto-schedule request")
        raise UserFacingException("Unable to schedule: invalid request.")
    try:
        scheduler = scheduling.get_scheduler(scheduler_name)
        result = scheduler(to_schedule, existing_items, start, end)
    except scheduling.ScheduleException as e:
        raise UserFacingException(str(e))
    return json.dumps(result)
Exemplo n.º 3
0
    def save_result(proj, db_conf, bench_conf, summary_lines):
        res = Result()
        res.db_conf = db_conf
        res.benchmark_conf = bench_conf
        res.project = proj
        res.timestamp = datetime.fromtimestamp(
            summary_lines['Current Timestamp (milliseconds)'] // 1000,
            timezone("UTC"))

        latency_dict = summary_lines['Latency Distribution']

        res.avg_latency = \
            float(latency_dict['Average Latency (microseconds)'])
        res.min_latency = \
            float(latency_dict['Minimum Latency (microseconds)'])
        res.p25_latency = \
            float(latency_dict['25th Percentile Latency (microseconds)'])
        res.p50_latency = \
            float(latency_dict['Median Latency (microseconds)'])
        res.p75_latency = \
            float(latency_dict['75th Percentile Latency (microseconds)'])
        res.p90_latency = \
            float(latency_dict['90th Percentile Latency (microseconds)'])
        res.p95_latency = \
            float(latency_dict['95th Percentile Latency (microseconds)'])
        res.p99_latency = \
            float(latency_dict['99th Percentile Latency (microseconds)'])
        res.max_latency = \
            float(latency_dict['Maximum Latency (microseconds)'])
        res.throughput = \
            float(summary_lines['Throughput (requests/second)'])
        res.git_hash = upload_hash
        res.result_ok = result_ok
        res.save()
Exemplo n.º 4
0
def temperature(request):

    if request.method == 'GET':
        return HttpResponse('<html><head></head><body><form action="" method="post">'
                            '<input type="text" name="time" value="%s">'
                            '<input type="text" name="temperature" value="24.4">'
                            '<input type="text" name="hash" value="codeing">'
                            '<input type="submit" value="Submit">'
                            '</form></body></html>' % str(int(python_time.time())))  # TODO delete GET

    # check if there are active beers
    for beer in Beer.objects.all():
        if beer.is_brewing():
            break
    else:  # non of the beers is brewing, we do not save, but still respond to be nice ;)
        return HttpResponse('OK, not brewing though')

    received = request.POST['hash']
    temperature = request.POST['temperature']
    time = request.POST['time']
    correct = 'time=%s&temperature=%s&secret=%s' % (time, temperature, SECRET)
    correct = hashlib.md5(correct.encode()).hexdigest()
    if not received == correct:
        return HttpResponseForbidden()
    time = datetime.fromtimestamp(int(time), tz=utc)
    temp = MeasuredTemperature.objects.create(datetime=time, temperature=temperature)
    for beer in Beer.objects.all():
        if beer.is_brewing():
            beer.temperatures.add(temp)
    return HttpResponse('OK!')
Exemplo n.º 5
0
 def handle_alert_clear_message(self):
     # type: () -> Text
     alert_clear_template = "Alert [alert_name]({alert_url}) has cleared at {trigger_time}!"
     trigger_time = datetime.fromtimestamp((self.payload['trigger_time']))
     alert_id, alert_name, alert_url, alert_runbook_url = self.parse_alert()
     content = alert_clear_template.format(alert_name=alert_name, alert_url=alert_url, trigger_time=trigger_time)
     return content
Exemplo n.º 6
0
def uwsgi_context(request):
    try:
        # noinspection PyPackageRequirements
        import uwsgi

        return {
            'UWSGI': {
                'enabled': True,
                'numproc': uwsgi.numproc,
                'buffer_size': uwsgi.buffer_size,
                'started_on': datetime.fromtimestamp(uwsgi.started_on, tz=utc),
                'numworkers': len(uwsgi.workers()),
                'masterpid': uwsgi.masterpid(),
                'total_requests': uwsgi.total_requests(),
                'request_id': uwsgi.request_id(),
                'worker_id': uwsgi.worker_id(),
            }
        }

    except ImportError:
        return {
            'UWSGI': {
                'enabled': False,
            }
        }
Exemplo n.º 7
0
 def convert_datetimefield_value(self, value, expression, connection, *args, **kwargs):
     if value is None:
         return None
     value = datetime.fromtimestamp(value)
     if settings.USE_TZ:
         value = timezone.make_aware(value, self.connection.timezone)
     return value
Exemplo n.º 8
0
    def get_posts_by_ids(cls, posts_ids):
        r = RedisLink.factory()

        users_store = {}

        posts = []
        for post_id in posts_ids:
            post_str = r.get('post:%s' % post_id)
            (user_id, create_time, status) = post_str.split('|')

            if user_id in users_store:
                user = users_store[user_id]
            else:
                user = User.fetch_one(user_id)
                users_store[user_id] = user

            # @todo Maybe this should by Post object?
            posts.append({
                'id': post_id,
                'user_id': user_id,
                'create_time': datetime.fromtimestamp(float(create_time)),
                'status': status,
                'user': user
            })

        return posts
Exemplo n.º 9
0
 def handle_alert_clear_message(self):
     # type: () -> text_type
     alert_clear_template = "Alert [alert_name]({alert_url}) has cleared at {trigger_time}!"
     trigger_time = datetime.fromtimestamp((self.payload['trigger_time']))
     alert_id, alert_name, alert_url, alert_runbook_url = self.parse_alert()
     content = alert_clear_template.format(alert_name=alert_name, alert_url=alert_url, trigger_time=trigger_time)
     return content
Exemplo n.º 10
0
    def graphic_updated(self, data):
        channel = Channel.objects.get(cluster=data["channel"])

        if not channel.currentEpisode or not channel.currentEpisode.current_part:
            if "episode" in data:
                episode_slug = data["episode"].split("-")[-1]
                episode = Episode.objects.filter(slug=episode_slug)
                if len(episode) == 1:
                    episode = episode[0]
                    if len(episode.parts.all()) == 1:
                        part = episode.parts.all()[0]
                    elif "begin" in data:
                        begin = int(data["begin"])
                        begindt = datetime.fromtimestamp(begin)
                        for p in episode.parts.all():
                            if abs((p.begin() - begindt).seconds) < 120:
                                part = p
            if not part:
                error_handler("graphic_update: no current episode for %s" % channel.cluster, channel)
                return
        else:
            part = channel.currentEpisode.current_part

        g, created = Graphic.objects.get_or_create(type=data["type"], episode=part)
        if "image" in data:
            if created:
                g.file.save("", ContentFile(base64.b64decode(data["image"])))
            else:
                f = open(g.file.path, "w")
                f.write(base64.b64decode(data["image"]))
                f.close()
        elif "data" in data:
            g.data = data["data"]
            g.save()
Exemplo n.º 11
0
    def get_data(self):

        rss = self.get_rss()
        data_list = []

        for entry in rss['entries']:
            url = entry.get('link', '')
            text = entry.get('summary', '')
            title = entry.get('title', '')
            pub_date = entry.get('published_parsed') or entry.get(
                'published', None)

            if self.words_in_string(self.configuration.stop_words, text) or \
                    not self.words_in_string(self.configuration.keywords, text) and \
                    not self.words_in_string(self.configuration.keywords, title):
                continue

            try:
                source_datetime = datetime.fromtimestamp(time.mktime(pub_date))
            except TypeError:
                source_datetime = datetime.strptime(
                    pub_date, self.configuration.time_format).date()

            data_list.append({
                'url': url,
                'source_datetime': source_datetime,
                'text': self.normalize_text(text),
                'title': title
            })

        return data_list
Exemplo n.º 12
0
def admin_tan(request: HttpRequest):
    if not request.user.is_superuser:
        return HttpResponseRedirect('/admin/login')

    if 'action' in request.POST and request.POST['action'] == 'number':
        return JsonResponse({'number': TanRequest.active_requests().count()})

    if 'tan' in request.POST:
        tan = request.POST['tan']
        date = request.POST['date']
        tan = re.sub(r'[^a-zA-Z0-9]', "", tan)
        tan_request = TanRequest.objects.get(
            pk=datetime.fromtimestamp(float(date), tz=timezone.utc))
        if tan_request.answer or tan_request.expired:
            return render(request, 'admin_tan.html',
                          {'error': "TAN request expired"})
        tan_request.answer = tan
        tan_request.save()

        return render(request, 'admin_tan.html', {'tan': tan})

    tan_request = TanRequest.active_request()
    return render(
        request, 'admin_tan.html', {
            'tan_request': tan_request,
            'id': tan_request.date.timestamp() if tan_request else None
        })
Exemplo n.º 13
0
    def save_result(proj, db_conf, bench_conf, summary_lines):
        res = Result()
        res.db_conf = db_conf
        res.benchmark_conf = bench_conf
        res.project = proj
        res.timestamp = datetime.fromtimestamp(
            summary_lines['Current Timestamp (milliseconds)'] // 1000,
            timezone("UTC")
        )

        latency_dict = summary_lines['Latency Distribution']

        res.avg_latency = \
            float(latency_dict['Average Latency (microseconds)'])
        res.min_latency = \
            float(latency_dict['Minimum Latency (microseconds)'])
        res.p25_latency = \
            float(latency_dict['25th Percentile Latency (microseconds)'])
        res.p50_latency = \
            float(latency_dict['Median Latency (microseconds)'])
        res.p75_latency = \
            float(latency_dict['75th Percentile Latency (microseconds)'])
        res.p90_latency = \
            float(latency_dict['90th Percentile Latency (microseconds)'])
        res.p95_latency = \
            float(latency_dict['95th Percentile Latency (microseconds)'])
        res.p99_latency = \
            float(latency_dict['99th Percentile Latency (microseconds)'])
        res.max_latency = \
            float(latency_dict['Maximum Latency (microseconds)'])
        res.throughput = \
            float(summary_lines['Throughput (requests/second)'])
        res.git_hash = upload_hash
        res.result_ok = result_ok
        res.save()
Exemplo n.º 14
0
 def default(self, obj):
     if hasattr(obj, 'isoformat'): #handles both date and datetime objects
         return obj.isoformat()
     elif type(obj) == time.struct_time:
         return datetime.fromtimestamp(time.mktime(obj)).isoformat()
     else:
         return json.JSONEncoder.default(self, obj)
Exemplo n.º 15
0
def get_reqeusts(valid_data: Dict, user: User) -> Result[List[Dict], HttpError]:
    if not valid_data:
        return Ok([request_converter.to_dict(r) for r in Request.objects.filter(customer=user)])

    radius = valid_data.get('radius')
    lat = valid_data.get('lat')
    long = valid_data.get('long')
    finished = valid_data.get('finished')
    assigned = valid_data.get('assigned')
    starts_after = valid_data.get('starts_after')
    if starts_after is not None:
        starts_after = datetime.fromtimestamp(starts_after)

    filters = dict_filter({
        'finished': finished,
        'start_time__gte': starts_after,
    })
    if assigned is not None:
        filters['assigned_to__isnull'] = not assigned

    request_query = Request.objects.filter(**filters)
    if radius is None or lat is None or long is None:
        return Ok([request_converter.to_dict(r) for r in request_query])
    return Ok([
        request_converter.to_dict(r) for r in request_query if
        haversine((lat, long), (r.latitude, r.longitude)) <= radius
    ])
Exemplo n.º 16
0
 def create_request(
     self,
     customer,
     duration: timedelta,
     latitude,
     longitude,
     description: str,
     title: str,
     start_time: datetime = None,
 ):
     if isinstance(start_time, int):
         start_time = datetime.fromtimestamp(start_time)
     if isinstance(duration, int):
         duration = timedelta(seconds=duration)
     request = self.model(
         customer=customer,
         duration=duration,
         latitude=latitude,
         longitude=longitude,
         description=description,
         title=title,
         start_time=start_time,
     )
     request.clean()
     request.save()
     if start_time is None:
         with self.request_mutex:
             self.pending_requests.append(request)
     return request
Exemplo n.º 17
0
def timestamps_to_dates(timestamps: list):
    """
    Converts `listed` timestamps to it's `datetime` equivalent
    :param timestamps: timestamps to be converted to `datetime`
    :return: list of `datetime's` from `timestamps`
    """
    return list(
        map(lambda dt: str(datetime.fromtimestamp(float(dt))), timestamps))
Exemplo n.º 18
0
def update_notification_record(launch):
    notification = Notification.objects.get(launch=launch)
    notification.last_net_stamp = launch.netstamp
    notification.last_net_stamp_timestamp = datetime.now()
    logger.info('Updating Notification %s to timestamp %s' %
                (notification.launch.name,
                 datetime.fromtimestamp(
                     notification.launch.netstamp).strftime("%A %d %B %Y")))
    notification.save()
Exemplo n.º 19
0
def save_execution_time(start_ts, fn):
    end_ts = time.time()
    exec_time = end_ts - start_ts
    start_time = datetime.fromtimestamp(int(start_ts), timezone(TIME_ZONE))
    ExecutionTime.objects.create(module="celery.periodic_tasks",
                                 function=fn,
                                 tag="",
                                 start_time=start_time,
                                 execution_time=exec_time,
                                 result=None)
Exemplo n.º 20
0
def get_last_fetch_time(git_dir):
    with ChDir(git_dir):
        app.logger.info('[{}] : Git stat on git dir FETCH_HEAD file'.format(git_dir))
        try:
            mtime = os.stat('.git/FETCH_HEAD').st_mtime
            app.logger.debug('{} -> mtime: {}'.format(git_dir, mtime))
            return datetime.fromtimestamp(mtime)
        except FileNotFoundError as e:
            app.logger.warn('[{}] : FETCH_HEAD not found.'.format(git_dir))
            return None
Exemplo n.º 21
0
def datemailTodatetime(dateEmail):
    '''
    Retorna objecte datetime a partir d'un string IMAP4 INTERNALDATE
    '''

    date = None
    if dateEmail:
        date_tuple = email.utils.parsedate_tz(dateEmail)
        if date_tuple:
            date = datetime.fromtimestamp(email.utils.mktime_tz(date_tuple))
    return (date)
Exemplo n.º 22
0
 def save(self,
          force_insert=False,
          force_update=False,
          using=None,
          update_fields=None):
     if isinstance(self.start_time, (int, float)):
         self.start_time = datetime.fromtimestamp(int(self.start_time),
                                                  timezone(TIME_ZONE))
     super().save(force_insert=force_insert,
                  force_update=force_update,
                  using=using,
                  update_fields=update_fields)
Exemplo n.º 23
0
    def netstamp_changed(self, launch, notification, diff):
        logger.info('Netstamp change detected for %s - now launching in %d seconds.' % (launch.name, diff))
        date = datetime.fromtimestamp(launch.netstamp).replace(tzinfo=pytz.UTC)
        message = 'SCHEDULE UPDATE: %s now launching in %s at %s.' % (launch.name,
                                                                      seconds_to_time(diff),
                                                                      date.strftime("%H:%M %Z (%d/%m)"))

        old_diff = datetime.utcfromtimestamp(int(notification.last_net_stamp)) - datetime.now()
        if old_diff.total_seconds() < 86400:
            logger.info('Netstamp Changed and within window - sending mobile notification.')
            self.send_notification(launch, 'netstampChanged', notification)
        self.send_to_twitter(message, notification)

        notification.last_net_stamp = notification.launch.netstamp
        notification.last_net_stamp_timestamp = datetime.now()
        launch.save()

        # If launch is within 24 hours...
        if 86400 >= diff > 3600:
            logger.info('Launch is within 24 hours, resetting notifications.')
            notification.wasNotifiedTwentyFourHour = True
            notification.wasNotifiedOneHour = False
            notification.wasNotifiedTenMinutes = False

            notification.wasNotifiedTwentyFourHourTwitter = True
            notification.wasNotifiedOneHourTwitter = False
            notification.wasNotifiedTenMinutesTwitter = False
        elif 3600 >= diff > 600:
            logger.info('Launch is within one hour, resetting Ten minute notifications.')
            notification.wasNotifiedOneHour = True
            notification.wasNotifiedTwentyFourHour = True

            notification.wasNotifiedOneHourTwitter = True
            notification.wasNotifiedTwentyFourHourTwitter = True
        elif diff <= 600:
            logger.info('Launch is within ten minutes.')
            notification.wasNotifiedOneHour = True
            notification.wasNotifiedTwentyFourHour = True
            notification.wasNotifiedTenMinutes = True

            notification.wasNotifiedOneHourTwitter = True
            notification.wasNotifiedTwentyFourHourTwitter = True
            notification.wasNotifiedTenMinutesTwitter = True
        elif diff >= 86400:
            notification.wasNotifiedTwentyFourHour = False
            notification.wasNotifiedOneHour = False
            notification.wasNotifiedTenMinutes = False

            notification.wasNotifiedTwentyFourHourTwitter = False
            notification.wasNotifiedOneHourTwitter = False
            notification.wasNotifiedTenMinutesTwitter = False
        notification.save()
Exemplo n.º 24
0
def test_query(query, expected, create_full_user, client):
    user, password = create_full_user

    customers = [u for u, _ in generate_users(5)]
    workers = [u for u, _ in generate_users(5)]
    for u in chain(customers, workers):
        u.save()

    default_args = lambda: {
        'duration': fake.time_delta(),
        'latitude': fake.latitude(),
        'longitude': fake.longitude(),
        'description': fake.text(),
        'customer': random.choice(customers)
    }
    reqeusts = [
        Request(start_time=datetime.fromtimestamp(1000),
                assigned_to=random.choice(workers),
                finished=True,
                **default_args()),
        Request(start_time=datetime.fromtimestamp(30000),
                assigned_to=None,
                finished=False,
                **default_args())
    ]
    for r in reqeusts:
        r.clean()
        r.save()
    resp = client.get(f'/api/v1/request?{query}',
                      HTTP_AUTHORIZATION=encode_auth(user.email, password))
    assert resp.status_code == 200
    resp_dict = json.loads(resp.content)
    resp_requests = resp_dict['requests']
    assert len(resp_requests) == len(expected)
    assert sorted([request_converter.to_dict(reqeusts[i]) for i in expected],
                  key=lambda d: d['request_id']) == sorted(
                      resp_requests, key=lambda d: d['request_id'])
Exemplo n.º 25
0
def _before_exec_message_task(self, msg_handler, message: Message,
                              tguser: TgUser):
    tgmessage = TgMessage(
        tguser=tguser,
        tgchat=tguser.tgchat,
        tg_id=message.chat.id,
        from_tg_id=message.from_user.id,
        message_id=message.message_id,
        chat_type=message.chat.type,
        text=message.text or message.caption
        or 'content_type:%s' % message.content_type,
        message=base_utils.to_json(message),
        date=timezone.make_aware(datetime.fromtimestamp(message.date)),
    )
    self._exec_task(message_task, msg_handler['function'], tgmessage, message,
                    tguser)
Exemplo n.º 26
0
 def get_posts_by_keyword(self, keyword):
     feed = self.get_newsfeed(keyword)
     data_list = []
     for post in feed['items']:
         url = f'https://vk.com/wall{post["from_id"]}_{post["id"]}'
         pub_date = datetime.fromtimestamp(post.get('date'))
         text = post.get('text')
         if self.words_in_string(self.configuration.stop_words, text):
             continue
         if not text:
             continue
         data_list.append({
             'url': url,
             'source_datetime': pub_date,
             'text': text
         })
     return data_list
Exemplo n.º 27
0
    def to_representation(self, value):
        """
        If specified value is an epoch time, convert it first.

        :param value:
        :return:
        """

        # Handle epoch time
        timestamp = None
        if isinstance(value, str) and value.isdigit():
            timestamp = int(value)
        elif isinstance(value, Number):
            timestamp = value

        # ToDo: add additional time formats
        if timestamp:
            value = datetime.fromtimestamp(timestamp).isoformat()

        return value
Exemplo n.º 28
0
def crawl_products_prices():
    print("#TASK: Running Products Crawl")

    data = requests.get(
        "https://api.hypixel.net/skyblock/bazaar?key={}".format(
            settings.HYPIXEL_API_KEY))
    data = json.loads(data.content)

    if data["success"] is not True:
        raise RuntimeError("API returned Success False!")

    data_time = make_aware(datetime.fromtimestamp(data["lastUpdated"] / 1000))

    products = {prod.asset_id: prod for prod in Product.objects.all()}

    bulk_prices = []
    for product in data["products"].values():
        if product["product_id"] not in products.keys():
            _prod = Product.objects.create(asset_id=product["product_id"])
            products[_prod.asset_id] = _prod

        bulk_prices.append(
            ProductPrice(
                product=products[product["product_id"]],
                sell_price=product["quick_status"]["sellPrice"],
                sell_volume=product["quick_status"]["sellVolume"],
                sell_orders=product["quick_status"]["sellOrders"],
                buy_price=product["quick_status"]["buyPrice"],
                buy_volume=product["quick_status"]["buyVolume"],
                buy_orders=product["quick_status"]["buyOrders"],
                created_at=data_time,
            ))

    ProductPrice.objects.bulk_create(bulk_prices)

    print("Finished crawl")
Exemplo n.º 29
0
 def parse_date(date_str):
     dt = re.search(r'\d+', date_str).group()
     return datetime.fromtimestamp(int(int(dt) / 1000.0))
Exemplo n.º 30
0
 def filter_by_maximum_itemed_date(items_query, timestamp):
     from datetime import datetime
     return items_query.filter(
         itemed_date__lte=datetime.fromtimestamp(timestamp))
Exemplo n.º 31
0
 def parse_violation(self, violation):
     # type: (Dict[str, Any]) -> Tuple[Text, Text]
     metric_name = violation['metric']
     recorded_at = datetime.fromtimestamp((violation['recorded_at']))
     return metric_name, recorded_at
Exemplo n.º 32
0
from django.db import models
from django.conf import settings
from django.utils.datetime_safe import datetime

from .managers import WeChatMemberGroupManager
from .managers import WeChatMemberManager
from .managers import WeChatMenuMatchRuleManager
from .managers import WeChatMenuButtonManager

from .api import get_user_list
from .api import get_user_info_by_union_id
from .api import get_all_groups

tz = pytz.timezone(settings.TIME_ZONE)

dt = lambda stamp: datetime.fromtimestamp(stamp, tz)

logger = logging.getLogger('wechat')


class WeChatMemberGroup(models.Model):
    group_id = models.IntegerField(verbose_name=_('group id'))
    name = models.CharField(max_length=128,
                            blank=True,
                            verbose_name=_('group name'))
    count = models.IntegerField(default=0, verbose_name=_('user count'))
    dt_created = models.DateTimeField(auto_now_add=True)
    dt_updated = models.DateTimeField(auto_now=True)

    objects = WeChatMemberGroupManager()
Exemplo n.º 33
0
def to_local_date(commit_date):
    return timezone.make_aware(datetime.fromtimestamp(mktime(time.gmtime(commit_date))), timezone.get_default_timezone())
Exemplo n.º 34
0
 def parse_violation(self, violation):
     # type: (Dict[str, Any]) -> Tuple[text_type, text_type]
     metric_name = violation['metric']
     recorded_at = datetime.fromtimestamp((violation['recorded_at']))
     return metric_name, recorded_at
def parse_json(request):
    successful = 0
    fails = 0
    skips = 0
    count = 0
    fail_list = ""
    datadir = './data/'
    for filename in os.listdir(datadir):
        if filename.endswith('.json'):
            with open(os.path.join(datadir, filename)) as json_data:
                data = json.load(json_data)
                # successful = 0
                # fails = 0
                # count = 0
                # fail_list = ""

                data = data.get('ygData')
                msgId = data['msgId']
                data['sender'] = unescape(data.pop('from'))
                try:
                    data['postDate'] = datetime.fromtimestamp(int(data.pop('postDate')))
                except:
                    print("{} no postDate".format(msgId))
                    continue
                # data['']

                # print(msgId)
                rawmsg = data.get('rawEmail')
                splitlines = rawmsg.splitlines()
                # for item in splitlines:
                #     print(unescape(item))

                # print("data loaded: {}, starting serializer".format(splitlines))

                # for thing in data:
                count += 1
                if not TrubEmail.objects.get(msgId__exact=msgId):
                    serializer = EmailSerializer(data=data)

                    # print("fields: {}".format(serializer))
                    if serializer.is_valid():
                        serializer.save()
                        successful += 1
                        print("ok {}".format(msgId))
                    else:
                        print("{} serializer was not valid".format(msgId))
                        # print(person)
                        # print(serializer.errors)
                        fail_list += repr(serializer.errors) + ","
                        fails += 1
                else:
                    skips += 1

    print(
        "all done with process. ok: {}. fails: {}. Out of {} total records".format(
            successful, fails, count))
    if fail_list != "":
        print("fails: {}".format(fail_list))
    return render(request, 'trubYahooArchive/importjson.html',
                  {'successful': successful,
                   'fails': fails,
                   'count': count,
                   'skips': skips,
                   'data': splitlines,
                   },
                  )
Exemplo n.º 36
0
 def convert_datefield_value(self, value, expression, connection, *args, **kwargs):
     if value is None:
         return None
     return datetime.fromtimestamp(int(value)).date()
Exemplo n.º 37
0
def handle_result_files(app, files, cluster_name):
    from celery import chain

    # Load summary file and verify that the database/version is supported
    summary = JSONUtil.loads(''.join(files['summary_data'].chunks()))
    dbms_type = DBMSType.type(summary['DBMS Type'])
    # FIXME! bad hack until I have time to get the PG 9.3 metric/knob data in
    # the same form
    dbms_version = "9.6"
    #     dbms_version = DBMSUtil.parse_version_string(
    #        dbms_type, summary['DBMS Version'])

    try:
        dbms_object = DBMSCatalog.objects.get(type=dbms_type,
                                              version=dbms_version)
    except ObjectDoesNotExist:
        return HttpResponse('{} v{} is not yet supported.'.format(
            summary['DBMS Type'], dbms_version))

    if dbms_object != app.dbms:
        return HttpResponse('The DBMS must match the type and version '
                            'specified when creating the application. '
                            '(expected=' + app.dbms.full_name + ') '
                            '(actual=' + dbms_object.full_name + ')')

    # Load parameters, metrics, benchmark, and samples
    db_parameters = JSONUtil.loads(''.join(
        files['db_parameters_data'].chunks()))
    db_metrics = JSONUtil.loads(''.join(files['db_metrics_data'].chunks()))
    benchmark_config_str = ''.join(files['benchmark_conf_data'].chunks())

    benchmark_config = BenchmarkConfig.objects.create_benchmark_config(
        app, benchmark_config_str, summary['Benchmark Type'].upper())

    db_conf_dict, db_diffs = DBMSUtil.parse_dbms_config(
        dbms_object.pk, db_parameters)
    db_conf = DBConf.objects.create_dbconf(
        app, JSONUtil.dumps(db_conf_dict, pprint=True, sort=True),
        JSONUtil.dumps(db_diffs), dbms_object)

    db_metrics_dict, met_diffs = DBMSUtil.parse_dbms_metrics(
        dbms_object.pk, db_metrics)
    dbms_metrics = DBMSMetrics.objects.create_dbms_metrics(
        app, JSONUtil.dumps(db_metrics_dict, pprint=True, sort=True),
        JSONUtil.dumps(met_diffs), benchmark_config.time, dbms_object)

    timestamp = datetime.fromtimestamp(
        int(summary['Current Timestamp (milliseconds)']) / 1000,
        timezone("UTC"))
    result = Result.objects.create_result(
        app, dbms_object, benchmark_config, db_conf, dbms_metrics,
        JSONUtil.dumps(summary, pprint=True, sort=True), timestamp)
    result.summary_stats = Statistics.objects.create_summary_stats(
        summary, result, benchmark_config.time)
    result.save()

    wkld_cluster = WorkloadCluster.objects.create_workload_cluster(
        dbms_object, app.hardware, cluster_name)
    param_data = DBMSUtil.convert_dbms_params(dbms_object.pk, db_conf_dict)
    external_metrics = Statistics.objects.get_external_metrics(summary)
    metric_data = DBMSUtil.convert_dbms_metrics(dbms_object.pk,
                                                db_metrics_dict,
                                                external_metrics,
                                                int(benchmark_config.time))

    ResultData.objects.create(result=result,
                              cluster=wkld_cluster,
                              param_data=JSONUtil.dumps(param_data,
                                                        pprint=True,
                                                        sort=True),
                              metric_data=JSONUtil.dumps(metric_data,
                                                         pprint=True,
                                                         sort=True))

    nondefault_settings = DBMSUtil.get_nondefault_settings(
        dbms_object.pk, db_conf_dict)
    app.project.last_update = now()
    app.last_update = now()
    if app.nondefault_settings is None:
        app.nondefault_settings = JSONUtil.dumps(nondefault_settings)
    app.project.save()
    app.save()

    path_prefix = MediaUtil.get_result_data_path(result.pk)
    paths = [
        (path_prefix + '.summary', 'summary_data'),
        (path_prefix + '.params', 'db_parameters_data'),
        (path_prefix + '.metrics', 'db_metrics_data'),
        (path_prefix + '.expconfig', 'benchmark_conf_data'),
    ]

    for path, content_name in paths:
        with open(path, 'w') as f:
            for chunk in files[content_name].chunks():
                f.write(chunk)

    if app.tuning_session is False:
        return HttpResponse("Store success!")

    response = chain(aggregate_target_results.s(result.pk), map_workload.s(),
                     configuration_recommendation.s()).apply_async()
    taskmeta_ids = [response.parent.parent.id, response.parent.id, response.id]
    result.task_ids = ','.join(taskmeta_ids)
    result.save()
    return HttpResponse("Store Success! Running tuner... (status={})".format(
        response.status))
Exemplo n.º 38
0
def humane_time(timestamp):
    """Render time (number of second from epoch) to an human readable string"""
    return format_date(datetime.fromtimestamp(timestamp))
Exemplo n.º 39
0
def handle_result_file(app,
                       files,
                       use="",
                       hardware="hardware",
                       cluster="cluster"):

    summary = "".join(files['summary_data'].chunks())
    summary_lines = json.loads(summary)
    db_conf = "".join(files['db_conf_data'].chunks())
    db_conf_lines = json.loads(db_conf)

    status_data = "".join(files['db_status_data'].chunks())
    res_data = "".join(files['sample_data'].chunks())
    if 'raw_data' in files:
        raw_data = "".join(files['raw_data'].chunks())
    else:
        raw_data = ""
    bench_conf_data = "".join(files['benchmark_conf_data'].chunks())

    w = Workload_info()
    dom = xml.dom.minidom.parseString(bench_conf_data)
    root = dom.documentElement
    w.isolation = (root.getElementsByTagName('isolation'))[0].firstChild.data
    w.scalefactor = (
        root.getElementsByTagName('scalefactor'))[0].firstChild.data
    w.terminals = (root.getElementsByTagName('terminals'))[0].firstChild.data
    w.time = (root.getElementsByTagName('time'))[0].firstChild.data
    w.rate = (root.getElementsByTagName('rate'))[0].firstChild.data
    w.skew = (root.getElementsByTagName('skew'))  #[0].firstChild.data
    if len(w.skew) == 0:
        w.skew = -1.0
    else:
        w.skew = w.skew[0].firstChild.data
    weights = root.getElementsByTagName('weights')
    trans = root.getElementsByTagName('name')
    trans_dict = {}
    for i in range(trans.length):
        trans_dict[trans[i].firstChild.data] = weights[i].firstChild.data
    trans_json = json.dumps(trans_dict)
    w.trans_weights = trans_json
    w.workload = bench_conf_data
    w.save()

    db_type = summary_lines['dbms'].upper()
    db_version = summary_lines['version']
    bench_type = summary_lines['benchmark'].upper()

    bench = Oltpbench_info()
    bench.summary = summary
    bench.dbms_name = db_type
    bench.dbms_version = db_version
    bench.res = res_data
    bench.status = status_data
    bench.raw = raw_data
    bench.cfg = db_conf
    bench.wid = w
    bench.user = app.user

    bench.hardware = hardware
    bench.cluster = cluster

    bench.save()

    if use.lower() == 'store':
        return HttpResponse("Store Success !")

    knob_params = KNOB_PARAMS.objects.filter(db_type=db_type)
    knob_dict = {}

    for x in knob_params:
        name = x.params
        tmp = Knob_catalog.objects.filter(name=name)
        knob_dict[name] = tmp[0].valid_vals

    cfgs = Oltpbench_info.objects.filter(user=app.user)

    target_Xs = []
    target_Ys = []

    for x in cfgs:
        target_x, target_y = process_config(x.cfg, knob_dict, x.summary)
        target_Xs.append(target_x)
        target_Ys.append(target_y)

    exps = Oltpbench_info.objects.filter(dbms_name=db_type,
                                         dbms_version=db_version,
                                         hardware=hardware)

    #print target_Xs
    #print target_Ys

    ### workload mapping

    clusters_list = []
    for x in exps:
        t = x.cluster
        if t not in clusters_list and t != 'unknown':
            clusters_list.append(t)

    workload_min = []
    X_min = []
    Y_min = []
    min_dist = 1000000

    for name in clusters_list:
        exps_ = Oltpbench_info.objects.filter(dbms_name=db_type,
                                              dbms_version=db_version,
                                              hardware=hardware,
                                              cluster=name)

        X = []
        Y = []
        for x_ in exps_:
            x, y = process_config(x_.cfg, knob_dict, x_.summary)
            X.append(x)
            Y.append(y)

        sample_size = len(X)
        ridges = np.random.uniform(0, 1, [sample_size])
        print "workload"
        y_gp = gp_workload(X, Y, target_Xs, ridges)
        dist = np.sqrt(sum(pow(np.transpose(y_gp - target_Ys)[0], 2)))
        if min_dist > dist:
            min_dist = dist
            X_min = X
            Y_min = Y
            workload_min = name

    bench.cluster = workload_min
    bench.save()

    globals = db_conf_lines['global']
    globals = globals[0]

    db_cnf_names = globals['variable_names']
    db_cnf_values = globals['variable_values']
    db_cnf_info = {}
    for i in range(len(db_cnf_names)):
        db_cnf_info[db_cnf_names[i]] = db_cnf_values[i]

    if not db_type in DBConf.DB_TYPES:
        return HttpResponse(db_type + "  db_type Wrong")

    features = LEARNING_PARAMS.objects.filter(db_type=db_type)
    LEARNING_VARS = []
    for f in features:
        LEARNING_VARS.append(re.compile(f.params, re.UNICODE | re.IGNORECASE))

    db_conf_list = []
    similar_conf_list = []
    for i in range(len(db_cnf_info)):
        key = db_cnf_info.keys()[i]
        value = db_cnf_info.values()[i]
        for v in LEARNING_VARS:
            if v.match(key):
                similar_conf_list.append([key, value])
        db_conf_list.append([key, value])

    db_conf_str = json.dumps(db_conf_list)
    similar_conf_str = json.dumps(similar_conf_list)

    try:
        db_confs = DBConf.objects.filter(configuration=db_conf_str,
                                         similar_conf=similar_conf_str)
        if len(db_confs) < 1:
            raise DBConf.DoesNotExist
        db_conf = db_confs[0]
    except DBConf.DoesNotExist:
        db_conf = DBConf()
        db_conf.creation_time = now()
        db_conf.name = ''
        db_conf.configuration = db_conf_str
        db_conf.application = app
        db_conf.db_type = db_type
        db_conf.similar_conf = similar_conf_str
        db_conf.save()
        db_conf.name = db_type + '@' + db_conf.creation_time.strftime(
            "%Y-%m-%d,%H") + '#' + str(db_conf.pk)
        db_conf.save()
    bench_conf_str = "".join(files['benchmark_conf_data'].chunks())

    try:
        bench_confs = ExperimentConf.objects.filter(
            configuration=bench_conf_str)
        if len(bench_confs) < 1:
            raise ExperimentConf.DoesNotExist
        bench_conf = bench_confs[0]
    except ExperimentConf.DoesNotExist:
        bench_conf = ExperimentConf()
        bench_conf.name = ''
        bench_conf.application = app
        bench_conf.configuration = bench_conf_str
        bench_conf.benchmark_type = bench_type
        bench_conf.creation_time = now()
        bench_conf.isolation = summary_lines['isolation_level'].upper()
        bench_conf.terminals = summary_lines['terminals']
        bench_conf.scalefactor = summary_lines['scalefactor']
        bench_conf.save()
        bench_conf.name = bench_type + '@' + bench_conf.creation_time.strftime(
            "%Y-%m-%d,%H") + '#' + str(bench_conf.pk)
        bench_conf.save()

    result = Result()
    result.db_conf = db_conf
    result.benchmark_conf = bench_conf
    result.application = app
    result.timestamp = datetime.fromtimestamp(
        int(summary_lines['timestamp_utc_sec']), timezone("UTC"))

    latency_dict = {}
    names = summary_lines['variable_names']
    values = summary_lines['variable_values']
    for i in range(len(names)):
        latency_dict[names[i]] = values[i]

    result.avg_latency = float(latency_dict['avg_lat_ms'])
    result.min_latency = float(latency_dict['min_lat_ms'])
    result.p25_latency = float(latency_dict['25th_lat_ms'])
    result.p50_latency = float(latency_dict['med_lat_ms'])
    result.p75_latency = float(latency_dict['75th_lat_ms'])
    result.p90_latency = float(latency_dict['90th_lat_ms'])
    result.p95_latency = float(latency_dict['95th_lat_ms'])
    result.p99_latency = float(latency_dict['99th_lat_ms'])
    result.max_latency = float(latency_dict['max_lat_ms'])
    result.throughput = float(latency_dict['throughput_req_per_sec'])
    result.creation_time = now()
    result.save()

    path_prefix = get_result_data_dir(result.pk)
    with open(path_prefix + '_sample', 'wb') as dest:
        for chunk in files['sample_data'].chunks():
            dest.write(chunk)
        dest.close()

    # TODO (DVA): fixme
    with open(path_prefix + '_raw', 'wb') as dest:
        if 'raw_data' in files:
            for chunk in files['raw_data'].chunks():
                dest.write(chunk)
        else:
            dest.write('')
        dest.close()

    myfile = "".join(files['sample_data'].chunks())
    input = json.loads(myfile)
    sample_lines = input['samples']

    for line in sample_lines:
        sta = Statistics()
        nums = line
        sta.result = result
        sta.time = int(float(nums[0]))
        sta.throughput = float(nums[2])
        sta.avg_latency = float(nums[3])
        sta.min_latency = float(nums[4])
        sta.p25_latency = float(nums[5])
        sta.p50_latency = float(nums[6])
        sta.p75_latency = float(nums[7])
        sta.p90_latency = float(nums[8])
        sta.p95_latency = float(nums[9])
        sta.p99_latency = float(nums[10])
        sta.max_latency = float(nums[11])
        sta.save()

    app.project.last_update = now()
    app.last_update = now()
    app.project.save()
    app.save()

    id = result.pk
    task = Task()
    task.id = id
    task.creation_time = now()
    print "run_ml"
    response = run_ml.delay(X_min, Y_min, knob_dict.keys())
    task.status = response.status
    task.save()
    #time limits  default  300s
    time_limit = Website_Conf.objects.get(name='Time_Limit')
    time_limit = int(time_limit.value)

    for i in range(time_limit):
        time.sleep(1)
        if response.status != task.status:
            task.status = response.status
            task.save()
        if response.ready():
            task.finish_time = now()
            break

    response_message = task.status
    if task.status == "FAILURE":
        task.traceback = response.traceback
        task.running_time = (task.finish_time - task.creation_time).seconds
        response_message += ": " + response.traceback
    elif task.status == "SUCCESS":
        res = response.result
        with open(path_prefix + '_new_conf', 'wb') as dest:
            dest.write(res)
            dest.close()

        task.running_time = (task.finish_time - task.creation_time).seconds
        task.result = res
        task.save()
        return HttpResponse(res)
    else:
        task.status = "TIME OUT"
        task.traceback = response.traceback
        task.running_time = time_limit
        response_message = "TIME OUT: " + response.traceback
    task.save()
    #     return  HttpResponse(task.status)
    return HttpResponse(response_message)
Exemplo n.º 40
0
def handle_result_files(session, files):
    from celery import chain

    # Combine into contiguous files
    files = {k: ''.join(v.chunks()) for k, v in files.iteritems()}

    # Load the contents of the controller's summary file
    summary = JSONUtil.loads(files['summary'])
    dbms_type = DBMSType.type(summary['database_type'])
    dbms_version = summary['database_version']  # TODO: fix parse_version_string
    workload_name = summary['workload_name']
    observation_time = summary['observation_time']
    start_time = datetime.fromtimestamp(
        int(summary['start_time']) / 1000,
        timezone("UTC"))
    end_time = datetime.fromtimestamp(
        int(summary['end_time']) / 1000,
        timezone("UTC"))
    try:
        # Check that we support this DBMS and version
        dbms = DBMSCatalog.objects.get(
            type=dbms_type, version=dbms_version)
    except ObjectDoesNotExist:
        return HttpResponse('{} v{} is not yet supported.'.format(
            dbms_type, dbms_version))

    if dbms != session.dbms:
        return HttpResponse('The DBMS must match the type and version '
                            'specified when creating the session. '
                            '(expected=' + session.dbms.full_name + ') '
                            '(actual=' + dbms.full_name + ')')

    # Load, process, and store the knobs in the DBMS's configuration
    knob_dict, knob_diffs = Parser.parse_dbms_knobs(
        dbms.pk, JSONUtil.loads(files['knobs']))
    tunable_knob_dict = Parser.convert_dbms_knobs(
        dbms.pk, knob_dict)
    knob_data = KnobData.objects.create_knob_data(
        session, JSONUtil.dumps(knob_dict, pprint=True, sort=True),
        JSONUtil.dumps(tunable_knob_dict, pprint=True, sort=True), dbms)

    # Load, process, and store the runtime metrics exposed by the DBMS
    initial_metric_dict, initial_metric_diffs = Parser.parse_dbms_metrics(
        dbms.pk, JSONUtil.loads(files['metrics_before']))
    final_metric_dict, final_metric_diffs = Parser.parse_dbms_metrics(
        dbms.pk, JSONUtil.loads(files['metrics_after']))
    metric_dict = Parser.calculate_change_in_metrics(
        dbms.pk, initial_metric_dict, final_metric_dict)
    initial_metric_diffs.extend(final_metric_diffs)
    numeric_metric_dict = Parser.convert_dbms_metrics(
        dbms.pk, metric_dict, observation_time)
    metric_data = MetricData.objects.create_metric_data(
        session, JSONUtil.dumps(metric_dict, pprint=True, sort=True),
        JSONUtil.dumps(numeric_metric_dict, pprint=True, sort=True), dbms)

    # Create a new workload if this one does not already exist
    workload = Workload.objects.create_workload(
        dbms, session.hardware, workload_name)

    # Save this result
    result = Result.objects.create_result(
        session, dbms, workload, knob_data, metric_data,
        start_time, end_time, observation_time)
    result.save()

    # Save all original data
    backup_data = BackupData.objects.create(
        result=result, raw_knobs=files['knobs'],
        raw_initial_metrics=files['metrics_before'],
        raw_final_metrics=files['metrics_after'],
        raw_summary=files['summary'],
        knob_log=knob_diffs,
        metric_log=initial_metric_diffs)
    backup_data.save()

    nondefault_settings = Parser.get_nondefault_knob_settings(
        dbms.pk, knob_dict)
    session.project.last_update = now()
    session.last_update = now()
    if session.nondefault_settings is None:
        session.nondefault_settings = JSONUtil.dumps(nondefault_settings)
    session.project.save()
    session.save()

    if session.tuning_session is False:
        return HttpResponse("Result stored successfully!")

    response = chain(aggregate_target_results.s(result.pk),
                     map_workload.s(),
                     configuration_recommendation.s()).apply_async()
    taskmeta_ids = [response.parent.parent.id, response.parent.id, response.id]
    result.task_ids = ','.join(taskmeta_ids)
    result.save()
    return HttpResponse("Result stored successfully! Running tuner... (status={})".format(
        response.status))
Exemplo n.º 41
0
 def get_version(self):
     return datetime.fromtimestamp(os.path.getmtime(self._get_file('db-ip/dbip-location.csv'))).strftime('%Y-%m-%d')
Exemplo n.º 42
0
 def get_version(self):
     return datetime.fromtimestamp(self._reader.metadata().build_epoch).strftime('%Y-%m-%d')
Exemplo n.º 43
0
def handle_result_files(app, files, cluster_name):
    from celery import chain

    # Load summary file and verify that the database/version is supported
    summary = JSONUtil.loads(''.join(files['summary_data'].chunks()))
    dbms_type = DBMSType.type(summary['DBMS Type'])
    # FIXME! bad hack until I have time to get the PG 9.3 metric/knob data in
    # the same form
    dbms_version = "9.6"
#     dbms_version = DBMSUtil.parse_version_string(
#        dbms_type, summary['DBMS Version'])

    try:
        dbms_object = DBMSCatalog.objects.get(
            type=dbms_type, version=dbms_version)
    except ObjectDoesNotExist:
        return HttpResponse('{} v{} is not yet supported.'.format(
            summary['DBMS Type'], dbms_version))

    if dbms_object != app.dbms:
        return HttpResponse('The DBMS must match the type and version '
                            'specified when creating the application. '
                            '(expected=' + app.dbms.full_name + ') '
                            '(actual=' + dbms_object.full_name + ')')

    # Load parameters, metrics, benchmark, and samples
    db_parameters = JSONUtil.loads(
        ''.join(files['db_parameters_data'].chunks()))
    db_metrics = JSONUtil.loads(''.join(files['db_metrics_data'].chunks()))
    benchmark_config_str = ''.join(files['benchmark_conf_data'].chunks())
    samples = ''.join(files['sample_data'].chunks())

    benchmark_config = BenchmarkConfig.objects.create_benchmark_config(
        app, benchmark_config_str, summary['Benchmark Type'].upper())

    db_conf_dict, db_diffs = DBMSUtil.parse_dbms_config(
        dbms_object.pk, db_parameters)
    db_conf = DBConf.objects.create_dbconf(
        app, JSONUtil.dumps(db_conf_dict, pprint=True, sort=True),
        JSONUtil.dumps(db_diffs), dbms_object)

    db_metrics_dict, met_diffs = DBMSUtil.parse_dbms_metrics(
            dbms_object.pk, db_metrics)
    dbms_metrics = DBMSMetrics.objects.create_dbms_metrics(
        app, JSONUtil.dumps(db_metrics_dict, pprint=True, sort=True),
        JSONUtil.dumps(met_diffs), benchmark_config.time, dbms_object)

    timestamp = datetime.fromtimestamp(
        int(summary['Current Timestamp (milliseconds)']) / 1000,
        timezone("UTC"))
    result = Result.objects.create_result(
        app, dbms_object, benchmark_config, db_conf, dbms_metrics,
        JSONUtil.dumps(summary, pprint=True, sort=True), samples,
        timestamp)
    result.summary_stats = Statistics.objects.create_summary_stats(
        summary, result, benchmark_config.time)
    result.save()
    Statistics.objects.create_sample_stats(samples, result)

    wkld_cluster = WorkloadCluster.objects.create_workload_cluster(
        dbms_object, app.hardware, cluster_name)
    param_data = DBMSUtil.convert_dbms_params(
        dbms_object.pk, db_conf_dict)
    external_metrics = Statistics.objects.get_external_metrics(summary)
    metric_data = DBMSUtil.convert_dbms_metrics(
        dbms_object.pk, db_metrics_dict, external_metrics,
        int(benchmark_config.time))

    ResultData.objects.create(result=result,
                              cluster=wkld_cluster,
                              param_data=JSONUtil.dumps(param_data,
                                                        pprint=True,
                                                        sort=True),
                              metric_data=JSONUtil.dumps(metric_data,
                                                         pprint=True,
                                                         sort=True))

    nondefault_settings = DBMSUtil.get_nondefault_settings(dbms_object.pk,
                                                           db_conf_dict)
    app.project.last_update = now()
    app.last_update = now()
    if app.nondefault_settings is None:
        app.nondefault_settings = JSONUtil.dumps(nondefault_settings)
    app.project.save()
    app.save()

    path_prefix = MediaUtil.get_result_data_path(result.pk)
    paths = [
        (path_prefix + '.samples', 'sample_data'),
        (path_prefix + '.summary', 'summary_data'),
        (path_prefix + '.params', 'db_parameters_data'),
        (path_prefix + '.metrics', 'db_metrics_data'),
        (path_prefix + '.expconfig', 'benchmark_conf_data'),
    ]

    for path, content_name in paths:
        with open(path, 'w') as f:
            for chunk in files[content_name].chunks():
                f.write(chunk)

    if 'raw_data' in files:
        with open('{}.csv.tgz'.format(path_prefix), 'w') as f:
            for chunk in files['raw_data'].chunks():
                f.write(chunk)

    if app.tuning_session is False:
        return HttpResponse("Store success!")

    response = chain(aggregate_target_results.s(result.pk),
                     map_workload.s(),
                     configuration_recommendation.s()).apply_async()
    taskmeta_ids = [response.parent.parent.id, response.parent.id, response.id]
    result.task_ids = ','.join(taskmeta_ids)
    result.save()
    return HttpResponse("Store Success! Running tuner... (status={})".format(
        response.status))
Exemplo n.º 44
0
 def date(self, entry):
     if 'published_parsed' in entry:
         dt = datetime.fromtimestamp(mktime(entry.published_parsed))
         dt.replace(tzinfo=utc)
         return dt
     return None
Exemplo n.º 45
0
    def process_request(self, request):
        """
        Writes the signed_request into the Session 
        """
        fb = FBSession(request)
        application = get_app_dict()

        if "feincms" in settings.INSTALLED_APPS:
            # if feincms is installed, try to get the application from the page
            from facebook.feincms.utils import get_application_from_request

            page_app = get_application_from_request(request)
            if application:
                application = get_app_dict(page_app)

        # default POST/GET request from facebook with a signed request
        if "signed_request" in request.POST:
            parsed_request = parseSignedRequest(request.POST["signed_request"], application["SECRET"])
            logger.debug(u"got signed_request from facebook: %s" % parsed_request)
            if "language" in parsed_request:
                language = parsed_request["user"]["locale"]
                logger.debug("language: %s" % language)
                request.LANGUAGE_CODE = language
                translation.activate(language)
            fb.signed_request = parsed_request
            logger.debug("stored signed_request")
            expires = None
            # rewrite important data
            if "oauth_token" in parsed_request:
                expires = datetime.fromtimestamp(float(parsed_request["expires"]))
                fb.store_token(parsed_request["oauth_token"], expires)
            elif "access_token" in parsed_request:
                expires = datetime.fromtimestamp(float(parsed_request["expires"]))
                fb.store_token(parsed_request["access_token"], expires)
            else:
                # The chance is good that there is already a valid token in the session.
                fb.store_token(None)

            if "user_id" in parsed_request:
                fb.user_id = parsed_request["user_id"]

            else:
                logger.debug("Signed Request didn't contain public user info.")
            if expires:
                logger.debug(
                    "Signed Request issued at: %s" % datetime.fromtimestamp(float(parsed_request["issued_at"]))
                )

        # auth via callback from facebook
        elif "code" in request.REQUEST:
            args = dict(
                client_id=application["id"],
                client_secret=application["secret"],
                code=request.REQUEST["code"],
                redirect_uri=request.build_absolute_uri()
                .split("?")[0]
                .replace(application["CANVAS-URL"], application["CANVAS-PAGE"]),
            )

            response = urllib.urlopen("https://graph.facebook.com/oauth/access_token?" + urllib.urlencode(args))
            raw = response.read()
            parsed = urlparse.parse_qs(raw)  # Python 2.6 parse_qs is now part of the urlparse module
            if parsed.get("access_token", None):
                expires = datetime.fromtimestamp(float(parsed["expires"][-1]))
                fb.store_token(parsed["access_token"][-1], expires)
                logger.debug("Got access token from callback: %s. Expires at %s" % (parsed, expires))
            else:
                logger.debug("facebook did not respond an accesstoken: %s" % raw)