Exemple #1
0
    def dump_data(self):
        if not self.public_url:
            returnValue(False)
        stats = yield find_stats({'user': self.user}, filter=sortasc('timestamp'), timeout=120)
        dates = [s['timestamp'] for s in stats]
        tweets = [s['tweets'] for s in stats]
        tweets_diff = [a - b for a, b in zip(tweets[1:],tweets[:-1])]
        followers = [s['followers'] for s in stats]
        followers_diff = [a - b for a, b in zip(followers[1:], followers[:-1])]
        rts_diff = [s['rts_last_hour'] for s in stats]
        rts = []
        n = 0
        for a in rts_diff:
            n += a
            rts.append(n)

        jsondata = {}
        imax = len(dates) - 1
        for i, date in enumerate(dates):
            ts = int(time.mktime(date.timetuple()))
            jsondata[ts] = { 'tweets': tweets[i], 'followers': followers[i], 'rts': rts[i] }
            if i < imax:
                jsondata[ts].update({ 'tweets_diff': tweets_diff[i], 'followers_diff': followers_diff[i], 'rts_diff': rts_diff[i+1] })

        try:
            jsondir = os.path.join('web', 'data')
            if not os.path.exists(jsondir):
                os.makedirs(jsondir)
                os.chmod(jsondir, 0o755)
            with open(os.path.join(jsondir, 'stats_%s.json' % self.user), 'w') as outfile:
                write_json(jsondata, outfile)
        except IOError as e:
            loggerr("Could not write web/data/stats_%s.json : %s" % (self.user, e), action="stats")

        try:
            from plots import CumulativeCurve, DailyHistogram, WeekPunchCard
            imgdir = os.path.join('web', 'img')
            if not os.path.exists(imgdir):
                os.makedirs(imgdir)
                os.chmod(imgdir, 0o755)
            CumulativeCurve(dates, tweets, 'Total tweets', imgdir, 'tweets_%s' % self.user)
            CumulativeCurve(dates, followers, 'Total followers', imgdir, 'followers_%s' % self.user)
            CumulativeCurve(dates, rts, 'Total RTs since %s' % dates[0], imgdir, 'rts_%s' % self.user)
            DailyHistogram(dates[:-1], tweets_diff, 'New tweets', imgdir, 'new_tweets_%s' % self.user)
            DailyHistogram(dates[:-1], followers_diff, 'New followers', imgdir, 'new_followers_%s' % self.user)
            DailyHistogram(dates[:-1], rts_diff[1:], 'New RTs', imgdir, 'new_rts_%s' % self.user)
            WeekPunchCard(dates[:-1], tweets_diff, 'Tweets punchcard', imgdir, 'tweets_card_%s' % self.user)
            WeekPunchCard(dates[:-1], followers_diff, 'Followers punchcard', imgdir, 'followers_card_%s' % self.user)
            WeekPunchCard(dates[:-1], rts_diff[1:], 'RTs punchcard', imgdir, 'rts_card_%s' % self.user)
        except Exception as e:
            loggerr("Could not write images in web/img for %s : %s" % (self.user, e), action="stats")

        data = {'user': self.user, 'url': self.public_url}
        self.render_template("static_stats.html", self.user, data)
        returnValue(True)
Exemple #2
0
    def dump_data(self):
        if not self.url:
            return

        self.db.authenticate(config.MONGODB['USER'], config.MONGODB['PSWD'])
        stats = list(self.db['stats'].find({'user': self.user}, sort=[('timestamp', pymongo.ASCENDING)]))
        dates = [s['timestamp'] for s in stats]
        tweets = [s['tweets'] for s in stats]
        tweets_diff = [a - b for a, b in zip(tweets[1:],tweets[:-1])]
        followers = [s['followers'] for s in stats]
        followers_diff = [a - b for a, b in zip(followers[1:], followers[:-1])]
        rts_diff = [s['rts_last_hour'] for s in stats]
        rts = []
        n = 0
        for a in rts_diff:
            n += a
            rts.append(n)

        jsondata = {}
        imax = len(dates) - 1
        for i, date in enumerate(dates):
            ts = int(time.mktime(date.timetuple()))
            jsondata[ts] = { 'tweets': tweets[i], 'followers': followers[i], 'rts': rts[i] }
            if i < imax:
                jsondata[ts].update({ 'tweets_diff': tweets_diff[i], 'followers_diff': followers_diff[i], 'rts_diff': rts_diff[i+1] })

        try:
            jsondir = os.path.join('web', 'data')
            if not os.path.exists(jsondir):
                os.makedirs(jsondir)
            with open(os.path.join(jsondir, 'stats_%s.json' % self.user), 'w') as outfile:
                write_json(jsondata, outfile)
        except IOError as e:
            loggerr("Could not write web/data/stats_%s.json : %s" % (self.user, e), action="stats")

        try:
            from plots import CumulativeCurve, DailyHistogram, WeekPunchCard
            imgdir = os.path.join('web', 'img')
            if not os.path.exists(imgdir):
                os.makedirs(imgdir)
            CumulativeCurve(dates, tweets, 'Total tweets', imgdir, 'tweets_%s' % self.user)
            CumulativeCurve(dates, followers, 'Total followers', imgdir, 'followers_%s' % self.user)
            CumulativeCurve(dates, rts, 'Total RTs since %s' % dates[0], imgdir, 'rts_%s' % self.user)
            DailyHistogram(dates[:-1], tweets_diff, 'New tweets', imgdir, 'new_tweets_%s' % self.user)
            DailyHistogram(dates[:-1], followers_diff, 'New followers', imgdir, 'new_followers_%s' % self.user)
            DailyHistogram(dates[:-1], rts_diff[1:], 'New RTs', imgdir, 'new_rts_%s' % self.user)
            WeekPunchCard(dates[:-1], tweets_diff, 'Tweets punchcard', imgdir, 'tweets_card_%s' % self.user)
            WeekPunchCard(dates[:-1], followers_diff, 'Followers punchcard', imgdir, 'followers_card_%s' % self.user)
            WeekPunchCard(dates[:-1], rts_diff[1:], 'RTs punchcard', imgdir, 'rts_card_%s' % self.user)
        except Exception as e:
            loggerr("Could not write images in web/img for %s : %s" % (self.user, e), action="stats")

        self.render_template(os.path.join("web", "templates"), "static_stats.html")
Exemple #3
0
    def write_compiled_data(self):
        """Writes the compiled data to the file"""

        p = path.join(self.paths["internal"], "_compiled.sctpy")

        ftime = ftimefile(time())
        p2 = path.join(self.paths["internal"],
                       "_compiled_{}.sctpy".format(ftime))

        print("Saving compiled data to \"{}\"...".format(p), end="")

        try:
            cpfile = open(p, "w")
            write_json(self.compiled, cpfile)
            cpfile.close()

            cp2file = open(p2, "w")
            write_json(self.compiled, cp2file)
            cp2file.close()

            print("Done")
        except:
            print("Unable to write to file")
Exemple #4
0
 def __init__(self, msg='Unauthorized'):
     body = {'status': 401, 'message': msg}
     Response.__init__(self, write_json(body))
     self.status = 401
     self.content_type = 'application/json'
Exemple #5
0
 def __init__(self, msg='Not Found'):
     body = {'status': 404, 'message': msg}
     Response.__init__(self, write_json(body))
     self.status = 404
     self.content_type = 'application/json'
filtered_charges = list(
    filter(lambda charge: filtering_method(charge), charges["data"]))

# Add amounts, and refunded amounts to a large list for later summing
list_of_income = [entry['amount'] for entry in filtered_charges]
list_of_refund = [entry['amount_refunded'] for entry in filtered_charges]

# Sum the produced arrays to give totals (in USD cents)
total_income = reduce(lambda total, amount: total + amount, list_of_income)
total_refund = reduce(lambda total, amount: total + amount, list_of_refund)

# Convert to strings and format
total_charges = len(filtered_charges)
income_USD = total_income / 100
refund_USD = total_refund / 100
new_total = (total_income - total_refund) / 100

# Display information about the refund
print("Total charges: " + str(total_charges))
print("Income money: $" + str(income_USD))
print("Refund money: $" + str(refund_USD))
print("Total money: $" + str(new_total))

# Remove file if it exists
if isfile(output_file_name):
    remove(output_file_name)

# Output JSON to file
with open(output_file_name, 'w+') as output_file:
    write_json(filtered_charges, output_file, indent=4, sort_keys=True)
Exemple #7
0
PARAMETERS.update({
    "tag": timestamp.strftime("%d.%b.%Y_%H"),
    "log_file": os.path.join(args.dataset, "optimize_log_" + timestamp.strftime("%d.%b.%Y_%H") + ".txt"),
    "weight_string": varsList.weightStr,
    "cut_string": varsList.cutStr,
    "variables": variables,
    "weightLSig": sum(weightLSig),
    "weightQSig": sum(weightQSig)
    }
)

# Save used parameters to file
parameter_file = os.path.join(args.dataset, "parameters_" + PARAMETERS["tag"] + ".json")
with open(parameter_file, "w") as f:
    f.write(write_json(PARAMETERS, indent=2))
print("Parameters saved to dataset folder.")

# Start the logfile
logfile = open(PARAMETERS["log_file"], "w")
logfile.write("{:7}, {:7}, {:7}, {:7}, {:9}, {:14}, {:10}, {:7}\n".format(
      "Hidden",
      "Nodes",
      "Rate",
      "Batch",
      "Pattern",
      "Regulator",
      "Activation",
      "ROC"
    )
  )
Exemple #8
0
uri = "http://localhost:6543"


def utc_now():
    return datetime.now(tz=utc)


first_event = {
     "title": "Hack-a-thon",
     "description": "Bakersfield's first hackathon.",
     "startDate": to_seconds_string(utc_now()),
}


# Create an event.
response = post(uri + "/events", data=write_json(first_event), headers={
    "X-EVENT-TOKEN": "0-secret"
})
# Fetch it back.
event = get(uri + "/events/" + str(response.json()['id'])).json()
# Assert the id is in the event id list.
assert event['id'] in get(uri + "/events").json()['events']
print get(uri + "/events").json()['events']


# Fetch it back.
assert event['id'] in get(uri + "/events", params={
    'offset': 0,
    'limit': 10**10,
    'starts_before_date': long(first_event['startDate']) + 1,
    'starts_on_or_after_date': first_event['startDate'],