def deploy(): from git import Repo logconfig.configure('dev') repo = Repo('.') target = None if repo.active_branch.name == 'master': target = 'wigo2' elif repo.active_branch.name == 'staging': target = 'wigo2-stage' elif repo.active_branch.name == 'develop': target = 'wigo2-dev' else: logger.error('invalid branch for deployment, %s' % repo.active_branch.name) sys.exit(1) if target == 'wigo2' and os.system('nosetests -w tests/') != 0: logger.error('error running unit tests') sys.exit(1) remote = next(r for r in repo.remotes if r.name == target) if not remote: remote = repo.create_remote(target, '[email protected]:%s.git' % target) logger.info('deploying to remote %s, %s' % (remote.name, remote.url)) os.system('git push %s %s:master' % (target, repo.active_branch.name))
def migrate_top_friends(): logconfig.configure('dev') users = 0 for user_id, score in wigo_db.sorted_set_iter(skey('user'), count=50): for friend_id, score in wigo_db.sorted_set_iter(skey( 'user', user_id, 'friends'), count=50): if not wigo_db.sorted_set_is_member( skey('user', user_id, 'friends', 'top'), friend_id): wigo_db.sorted_set_add(skey('user', user_id, 'friends', 'top'), friend_id, 1) users += 1 if (users % 100) == 0: logger.info('fixed {} users'.format(users))
def update_facebook_token_expirations(): from server.services.facebook import Facebook, FacebookTokenExpiredException logconfig.configure('dev') for u in User.select(): if u.facebook_token_expires < datetime.utcnow(): facebook = Facebook(u.facebook_token, u.facebook_token_expires) try: token_expires = facebook.get_token_expiration() if token_expires and token_expires != u.facebook_token_expires: u.facebook_token_expires = token_expires u.save() print 'updated user {}'.format(u.id) except FacebookTokenExpiredException: pass
def get_options() -> Options: """Retrieve command-line arguments and options.""" args = parse_args() # configure logger verbosity = args.verbose - args.quiet logconfig.configure(args.log, verbosity) # get a list of the input files input_files = [] if args.finish < args.start and args.finish >= 0: raise CellAnnealerError( f'Invalid interval: start number must be less than finish number') if args.start < 0: raise CellAnnealerError( f'Invalid interval: start number must be greater than or equal to zero' ) for i in count(args.start): # check to see if the file exists file = Path(args.input % i) if file.exists() and file.is_file(): input_files.append(file) # break out if reached finish option if i == args.finish: break elif args.finish < 0 and args.start != i: break else: raise CellAnnealerError(f'Input file not found: \'{file}\'') # ensure output directory exists if not (args.output.exists() and args.output.is_dir()): raise CellAnnealerError( f'Output directory not found: \'{args.output}\'') # ensure config file exists if not (args.config.exists() and args.config.is_file()): raise CellAnnealerError(f'Config file not found: \'{args.config}\'') return Options(input_files, args.output, args.config)
# -*- coding: utf-8 -*- import logging import os.path import platform import logconfig from lisp import cons, lbool, llist, lstring, read_lisp, symbol, write_lisp __all__ = ['SwankProtocol'] logconfig.configure() logger = logging.getLogger(__name__) class SwankProtocol(object): """Swank Protocol implementation for Python. The most important function here is the dispatch function that takes care of parsing lisp data to detect the correct method to call and its arguments. Once the appropiate method is called it also takes care of converting to python result to a lisp expression to be returned to the client. All other functions part of the Swank protocol wont do any Lisp conversion or parsing. All of them get what they need in python code and return Python results. The read_lisp and write_lisp take care of doing proper conversions for all datatypes.
from __future__ import absolute_import import logconfig from config import Configuration logconfig.configure(Configuration.ENVIRONMENT) import os import ujson import logging import requests import click from newrelic import agent from datetime import datetime from urlparse import urlparse from flask.ext.restful import abort from flask.ext.sslify import SSLify from rq_dashboard import RQDashboard from flask import Flask, render_template, g, request, jsonify, Response from flask.ext.admin import Admin from flask.ext.compress import Compress from flask.ext.restplus import apidoc from server import ApiSessionInterface from server.admin import UserModelView, GroupModelView, ConfigView, \ MessageView, EventModelView, WigoAdminIndexView, EventMessageView from server.rest import api_blueprint from server.tasks.uploads import wire_uploads_listeners from server.tasks.images import wire_images_listeners from server.tasks.notifications import wire_notifications_listeners
from lisp import LispReader from protocol import SwankProtocol from repl import repl import ulisp try: import SocketServer as socketserver except ImportError: # Python 3 support import socketserver __all__ = [ 'HEADER_LENGTH', 'SwankServerRequestHandler', 'SwankServer', 'serve' ] logconfig.configure() HEADER_LENGTH = 6 PROMPT = "ULISP> " LOCALS = {"__name__": "__console__", "__doc__": None} class SwankServerRequestHandler(socketserver.BaseRequestHandler): """Request handler for the SwankServer. Handle protocol requests from swank client by dispatching received data to SwankProtocol.dispatch and returns to the client whatever it replies. """ def __init__(self, request, client_address, server):
def initialize(create_tables=False, import_cities=False): logconfig.configure('dev') if create_tables: from server.rdbms import db, DataStrings, DataSets, DataSortedSets, DataExpires, DataIntSets, DataIntSortedSets db.create_tables([ DataStrings, DataSets, DataIntSets, DataSortedSets, DataIntSortedSets, DataExpires ], safe=True) db.execute_sql(""" CREATE OR REPLACE FUNCTION timestamp_cast(VARCHAR) RETURNS TIMESTAMP AS 'select cast($1 as timestamp)' LANGUAGE SQL IMMUTABLE RETURNS NULL ON NULL INPUT; CREATE INDEX data_strings_gin ON data_strings USING gin (value); CREATE INDEX data_strings_id ON data_strings( (value->>'$type'), CAST(value->>'id' AS BIGINT) DESC ); CREATE INDEX data_strings_events ON data_strings( (value->>'$type'), CAST(value->>'expires' AS TIMESTAMP) ) WHERE value->>'$type' = 'Event'; CREATE INDEX data_strings_eventmessages ON data_strings( (value->>'$type'), CAST(value->>'id' AS BIGINT) DESC ) WHERE value->>'$type' = 'EventMessage'; CREATE INDEX data_strings_eventmessages_event_id ON data_strings( (value->>'$type'), CAST(value->>'event_id' AS BIGINT) DESC ) WHERE value->>'$type' = 'EventMessage'; CREATE INDEX data_strings_groups ON data_strings( (value->>'$type'), ) WHERE value->>'$type' = 'Group'; CREATE INDEX data_strings_first_name ON data_strings( (value->>'$type'), LOWER(value->>'first_name') varchar_pattern_ops ) WHERE value->>'$type' = 'User'; CREATE INDEX data_strings_last_name ON data_strings( (value->>'$type'), LOWER(value->>'last_name') varchar_pattern_ops ) WHERE value->>'$type' = 'User'; CREATE INDEX data_int_sorted_sets_attendees_event_id ON data_int_sorted_sets( cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 2) as BIGINT) ) WHERE key ~ '\{event:\d+\}:attendees'; CREATE INDEX data_int_sorted_sets_attendees_user_id ON data_int_sorted_sets( value ) WHERE key ~ '\{event:\d+\}:attendees'; CREATE INDEX data_int_sorted_sets_votes_message_id ON data_int_sorted_sets( cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 2) as BIGINT) ) WHERE key ~ '\{eventmessage:\d+\}:votes'; CREATE INDEX data_int_sorted_sets_votes_user_id ON data_int_sorted_sets( value ) WHERE key ~ '\{eventmessage:\d+\}:votes'; CREATE INDEX data_int_sorted_sets_taps_user_id ON data_int_sorted_sets( cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 2) as BIGINT) ) WHERE key ~ '\{user:\d+\}:tapped'; CREATE INDEX data_int_sorted_sets_taps_tapped_id ON data_int_sorted_sets( value ) WHERE key ~ '\{user:\d+\}:tapped'; CREATE INDEX data_int_sorted_sets_invites_user_id ON data_int_sorted_sets( cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 4) as BIGINT) ) WHERE key ~ '\{event:\d+\}:user:\d+:invited'; CREATE INDEX data_int_sorted_sets_invites_event_id ON data_int_sorted_sets( cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 2) as BIGINT) ) WHERE key ~ '\{event:\d+\}:user:\d+:invited'; CREATE INDEX data_int_sorted_sets_invites_invited_id ON data_int_sorted_sets( value ) WHERE key ~ '\{event:\d+\}:user:\d+:invited'; CREATE INDEX data_int_sorted_sets_friends_user_id ON data_int_sorted_sets( cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 2) as BIGINT) ) WHERE key ~ '\{user:\d+\}:friends'; CREATE INDEX data_int_sorted_sets_friends_friend_id ON data_int_sorted_sets( value ) WHERE key ~ '\{user:\d+\}:friends'; CREATE OR REPLACE VIEW users AS SELECT key, CAST(value->>'id' AS BIGINT) id, CAST(value->>'group_id' AS BIGINT) group_id, value->>'first_name' first_name, value->>'last_name' last_name, value->>'gender' gender, data_strings.value ->> 'role'::text AS "role", value->>'status' status, CAST(value->>'latitude' as float) latitude, CAST(value->>'longitude' as float) longitude, timestamp_cast(value->>'created') "created" FROM data_strings WHERE value->>'$type' = 'User'; CREATE OR REPLACE VIEW groups AS SELECT key, CAST(value->>'id' AS BIGINT) id, value->>'name' "name", value->>'code' code, value->>'city_id' city_id, value->>'state' state, value->>'country' country, CAST(value->>'latitude' as float) latitude, CAST(value->>'longitude' as float) longitude FROM data_strings WHERE value->>'$type' = 'Group'; CREATE OR REPLACE VIEW events AS SELECT key, CAST(value->>'id' AS BIGINT) id, CAST(value->>'owner_id' AS BIGINT) owner_id, CAST(value->>'group_id' AS BIGINT) group_id, value->>'name' "name", timestamp_cast(value->>'expires') "expires", (SELECT COUNT(key) FROM data_int_sorted_sets WHERE key = format('{event:%s}:attendees', (data_strings.value->>'id'))) num_attendees, timestamp_cast(value->>'date') "date", value->>'privacy' "privacy" FROM data_strings WHERE value->>'$type' = 'Event'; CREATE OR REPLACE VIEW taps AS SELECT key, cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 2) as BIGINT) user_id, value as tapped_id, to_timestamp(score) as created, modified FROM data_int_sorted_sets WHERE key ~ '\{user:\d+\}:tapped'; CREATE OR REPLACE VIEW friends AS SELECT key, cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 2) as BIGINT) user_id, value as friend_id, cast(null as timestamp) as created, modified FROM data_int_sorted_sets WHERE key ~ '\{user:\d+\}:friends'; CREATE OR REPLACE VIEW invites AS SELECT key, cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 2) as BIGINT) event_id, cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 4) as BIGINT) user_id, value as invited_id, to_timestamp(score) as created, modified FROM data_int_sorted_sets WHERE key ~ '\{event:\d+\}:user:\d+:invited'; CREATE OR REPLACE VIEW eventmessages AS SELECT key, CAST(value->>'id' AS BIGINT) id, CAST(value->>'user_id' AS BIGINT) user_id, CAST(value->>'event_id' AS BIGINT) event_id, value->>'media' "media", value->>'media_mime_type' "media_mime_type", timestamp_cast(value->>'created') "created" FROM data_strings WHERE value->>'$type' = 'EventMessage'; CREATE OR REPLACE VIEW attendees AS select key, cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 2) as BIGINT) event_id, value as user_id from data_int_sorted_sets where key ~ '\{event:\d+\}:attendees'; CREATE OR REPLACE VIEW votes AS select key, cast(split_part(replace(replace(key, '{', ''), '}', ''), ':', 2) as BIGINT) message_id, value as user_id from data_int_sorted_sets where key ~ '\{eventmessage:\d+\}:votes'; CREATE OR REPLACE VIEW current_week AS select xdate.*, xdate.start_ts at time zone 'US/Hawaii' at time zone 'UTC' as start_ts_utc, xdate.end_ts at time zone 'US/Hawaii' at time zone 'UTC' as end_ts_utc from ( select xdate.xdate + make_interval(days := (case when xdate.dow <= 3 then 3-xdate.dow-7 else 3-xdate.dow end)) as start_ts, xdate.xdate - interval '1 microseconds' as end_ts from ( select xdate.xdate, cast(extract(dow from xdate) as int) as dow from ( select date(current_timestamp at time zone 'US/Eastern') as xdate) xdate) xdate) xdate; """) if import_cities: from server.db import redis redis.delete(WigoCity.getGeohashIndexKey()) cities_file = os.path.join(geodis.__path__[0], 'data', 'cities1000.json') with open(cities_file) as f: pipe = redis.pipeline() lines = 0 imported = 0 skipped = 0 for line in f: try: row = [x.encode('utf-8') for x in ujson.loads(line)] loc = WigoCity(continent_id=row[0], continent=row[1], country_id=row[2], country=row[3], state_id=row[4], state=row[5], city_id=row[6], name=row[7], lat=float(row[8]), lon=float(row[9]), population=int(row[11])) if loc.population > 40000: loc.save(pipe) imported += 1 else: skipped += 1 lines += 1 if (lines % 2000) == 0: logger.info('imported {}, skipped {}'.format( imported, skipped)) pipe.execute() except Exception, e: logging.exception("Could not import line %s: %s", line, e) return pipe.execute()