class Command(BaseCommand): help = 'DEPRECATED: Migrates old data into the new django schema' option_list = BaseCommand.option_list + ( make_option('--commit', action='store_true', help='Commits the Changes to DB if all migrations are done right.', dest='commit_changes', default=False), make_option('--exclude', action='append', metavar='APP', help='Excludes the supplied app from beeing migrated.', dest='excluded_apps', default = []), make_option('--logquery', action='store_true', help='Print the corresponding Query for each migration.', dest='logquery', default=False), ) def handle(self, *args, **options): sys.stderr.write( u"This command is deprecated in favour of 'migrate_legacy_data'\n\n") management.call_command('migrate_legacy_data', *args, **options)
class Command(BaseCommand): help = "Extract topics for a dataset." args = "<dataset id>" option_list = BaseCommand.option_list + ( make_option('--topics', dest='num_topics', default=30, help='The number of topics to model'), make_option('--name', dest='name', default='my topic model', help="The name for your keyword dictionary"), ) def handle(self, dataset_id, *args, **options): num_topics = options.get('num_topics') name = options.get('name') if not dataset_id: raise CommandError("Dataset id is required.") try: dataset_id = int(dataset_id) except ValueError: raise CommandError("Dataset id must be a number.") from msgvis.apps.enhance.tasks import default_topic_context, standard_topic_pipeline context = default_topic_context(name, dataset_id=dataset_id) standard_topic_pipeline(context, dataset_id=dataset_id, num_topics=int(num_topics))
class Command(BaseCommand): help = 'Creates a sample event in Sentry (if applicable)' option_list = BaseCommand.option_list + ( make_option('--project', dest='project', help="project ID or team-slug/project-slug"), make_option('--platform', dest='platform'), ) def handle(self, **options): from django.conf import settings from sentry.constants import PLATFORM_LIST from sentry.models import Project from sentry.utils.samples import create_sample_event if not options['project']: project = Project.objects.get(id=settings.SENTRY_PROJECT) else: if options['project'].isdigit(): project = Project.objects.get(id=options['project']) elif '/' in options['project']: t_slug, p_slug = options['project'].split('/', 1) project = Project.objects.get(slug=p_slug, team__slug=t_slug) else: raise CommandError('Project must be specified as team-slug/project-slug or a project id') if options['platform'] not in PLATFORM_LIST: raise CommandError('Invalid platform. Must specify one of: %s' % ', '.join(PLATFORM_LIST)) platform = options['platform'] or project.platform event = create_sample_event(project, platform) if not event: raise CommandError('Unable to create an event for platform %r' % (str(platform),)) self.stdout.write('Event created: %s' % (event.group.get_absolute_url(),))
class Command(BaseCommand): help = 'Print out a base proxy conf file to use for this project.' option_list = BaseCommand.option_list + ( make_option('--commit', action='store_true', dest='commit', default=False, help='Write the file out to the destination.'), make_option( '--dry-run', action='store_true', dest='dry_run', default=False, help='Display the distination and changes to existing file if any.' ), make_option('--message', dest='msg', default='%s' % datetime.datetime.now(), help='Optional commit message to include.')) def handle(self, *args, **options): output = api.proxy.write_main_conf(**options) self.stdout.write(output)
class Command(BaseCommand): help = '''Generates Session tweets for a given timeslot.''' option_list = BaseCommand.option_list + ( make_option('--event-id', action='store', dest='event_id', default=Event.objects.current().id, help='''The ID of the event to tweet sessions for '''), make_option('--timeslot', action='store', dest='timeslot', default='next', help='''The ISO datetime that the events being tweeted should start at'''), make_option( '--skip-if-delta', action='store', dest='skipdelta', default='600', help='''A timedelta in seconds that the timeslot should fall within in order to trigger tweet generation'''), ) def handle(self, *args, **options): event = Event.objects.get(pk=int(options.get('event_id'))) timeslot = options.get('timeslot') skipdelta = options.get('skipdelta') if skipdelta: skipdelta = datetime.timedelta( seconds=int(options.get('skipdelta'))) else: skipdelta = None if timeslot == 'next': sessions = Session.objects.next().filter(event=event) timeslot = sessions[0].start_time else: timeslot = dateparse(timeslot).replace( tzinfo=timezone.get_current_timezone()) if skipdelta is not None and timezone.now() + skipdelta < timeslot: print 'Sessions are too far in the future, aborting.' return try: tweet = SessionBlockTweet.objects.get(event=event, timeslot=timeslot, previous=None, sent_at=None) except SessionBlockTweet.DoesNotExist: print 'No tweets have been generated for this timeslot, or tweets have been sent already. Run ./manage.py generatetweets --event-id=%s --timeslot=%s and try again' % ( event.id, timeslot.isoformat()) return tweet.send() print 'Sent %d tweets for block %s.' % (tweet.total, timeslot.isoformat())
class Command(BaseCommand): help = 'Sends fake data to the internal Sentry project' option_list = BaseCommand.option_list + ( make_option('--project', dest='project', help="project ID or organization-slug/project-slug"), make_option('--num', dest='num_events', type=int), ) def handle(self, **options): from django.conf import settings from raven.contrib.django.models import client from sentry.models import Project if not options['project']: project = Project.objects.get(id=settings.SENTRY_PROJECT) else: if options['project'].isdigit(): project = Project.objects.get(id=options['project']) elif '/' in options['project']: o_slug, p_slug = options['project'].split('/', 1) project = Project.objects.get(slug=p_slug, organization__slug=o_slug) else: raise CommandError('Project must be specified as organization-slug/project-slug or a project id') client.project = project.id self.stdout.write('Preparing to send events. Ctrl-C to exit.') time.sleep(2) functions = funcs() if options['num_events']: max_events = options['num_events'] else: max_events = -1 s = time.time() r = 0 try: while True: if r == max_events: break if options['verbosity'] > 1: self.stdout.write('Sending event..\n') random.choice(functions)(client) r += 1 except KeyboardInterrupt: pass finally: total_time = time.time() - s self.stdout.write('%d requests serviced in %.3fs\n' % (r, total_time)) if r: avg = total_time / r ravg = 1 / avg else: avg = ravg = 0 self.stdout.write('avg of %.3fs/req, %d req/s\n' % (avg, ravg))
class Command(BaseCommand): help = "Create a new experiment." args = '<dictionary_id> <output_folder>' option_list = BaseCommand.option_list + ( make_option('-p', '--num_pairs', default=3, dest='num_pairs', help='Num of pairs in each conditions'), make_option('-c', '--num_conditions', default=3, dest='num_conditions', help='Num of conditions in this experiment'), make_option('-s', '--num_stages', default=3, dest='num_stages', help='Num of stages in each condition'), make_option('-n', '--name', default='Experiment', dest='experiment_name', help='Name of this experiment'), ) def handle(self, dictionary_id, output_folder, **options): if not dictionary_id: raise CommandError("Dictionary id is required.") try: dictionary_id = int(dictionary_id) except ValueError: raise CommandError("Dictionary id must be a number.") if not output_folder: raise CommandError("Output folder path is required.") num_pairs = options.get('num_pairs') num_conditions = options.get('num_conditions') num_stages = options.get('num_stages') experiment_name = options.get('experiment_name') # make sure the folder exists check_or_create_dir(output_folder) output_filename = "%s/user_accounts.log" % output_folder with open(output_filename, "w") as output: # create an experiment experiment = experiment_models.Experiment( name=experiment_name, dictionary_id=dictionary_id) experiment.save() experiment.initialize_experiment(num_conditions=num_conditions, num_stages=num_stages, num_pairs=num_pairs, output=output)
class Command(BaseCommand): help = "Run tweet parser on a dataset. Results will be saved into files." args = '<dataset_id> <file_save_path>' option_list = BaseCommand.option_list + ( make_option('-a', '--action', default='all', dest='action', help='Action to run [all | dump | parse | lemmatize]'), make_option( '-p', '--path', default='/home/vagrant/textvisdrg/datasets/ark-tweet-nlp-0.3.2', dest='tweet_parser_path', help='Tweet parser path'), ) def handle(self, dataset_id, save_path, **options): action = options.get('action') tweet_parser_path = options.get('tweet_parser_path') if not dataset_id: raise CommandError("Dataset id is required.") try: dataset_id = int(dataset_id) except ValueError: raise CommandError("Dataset id must be a number.") if not save_path: raise CommandError("File save path is required.") check_or_create_dir(save_path) if action == 'all' or action == 'dump': from msgvis.apps.enhance.tasks import dump_tweets print "Dumping messages..." dump_tweets(dataset_id, save_path) if action == 'all' or action == 'parse': from msgvis.apps.enhance.tasks import parse_tweets output_path = "%s/parsed_tweets" % save_path check_or_create_dir(output_path) print "\n==========" print "Parsing messages..." parse_tweets(tweet_parser_path, save_path, output_path) if action == 'all' or action == 'lemmatize': from msgvis.apps.enhance.tasks import lemmatize_tweets input_path = "%s/parsed_tweets" % save_path output_path = "%s/converted_tweets" % save_path check_or_create_dir(output_path) print "\n==========" print "Lemmatizing messages..." lemmatize_tweets(input_path, output_path)
class Command(BaseCommand): args = "" help = _("Display differences between active and last hidden report for" " each submission") option_list = BaseCommand.option_list + ( make_option('-r', '--round', action='store', type='int', dest='round_id', help="Export only from this round"), make_option('-c', '--contest', action='store', type='string', dest='contest_id', help="Export only from this contest"), make_option('-a', '--all', action='store_false', default=True, dest='only_final', help="Check scored submissions, not only final."), ) def handle(self, *args, **options): q_expressions = Q(user__isnull=False) if options['contest_id']: contest = Contest.objects.get(id=options['contest_id']) q_expressions &= Q(problem_instance__contest=contest) if options['round_id']: round = Round.objects.get(id=options['round_id']) q_expressions &= Q(problem_instance__round=round) if options['only_final']: q_expressions &= Q( submissionreport__userresultforproblem__isnull=False) subs = Submission.objects.all() subs = subs.filter(q_expressions).select_related() for s in subs: reports = s.submissionreport_set try: old_report = reports.get(kind='NORMAL', status='ACTIVE') new_report = reports.filter(kind='HIDDEN').latest() except SubmissionReport.DoesNotExist: continue old_score = old_report.score_report.score new_score = new_report.score_report.score if old_score != new_score: print("%s: %s -> %s" % (s, old_score, new_score))
class Command(BaseCommand): help = "Detect the language of a given set of messages" args = '<dataset_id> <file_save_path>' option_list = BaseCommand.option_list + ( make_option('-a', '--action', default='all', dest='action', help='Action to run [all | dump | parse | lemmatize]'), make_option('-p', '--path', default='/home/vagrant/emoticon-analysis/datasets/ldig', dest='ldig_path', help='LDIG path'), ) def handle(self, dataset_id, save_path, **options): action = options.get('action') ldig_path = options.get('ldig_path') if not dataset_id: raise CommandError("Dataset id is required.") try: dataset_id = int(dataset_id) except ValueError: raise CommandError("Dataset id must be a number.") if not save_path: raise CommandError("File save path is required.") check_or_create_dir(save_path) if action == 'all' or action == 'dump': from emoticonvis.apps.enhance.tasks import dump_tweets print "Dumping messages..." dump_tweets(dataset_id, save_path, 'lang_detection') if action == 'all' or action == 'detect': from emoticonvis.apps.enhance.tasks import run_lang_detection output_path = "%s/lang_detection_results" % save_path check_or_create_dir(output_path) print "Detect message languages..." run_lang_detection(ldig_path, save_path, output_path) if action == 'all' or action == 'smoothing_non_en_fr': from emoticonvis.apps.enhance.tasks import run_non_en_fr_lang_smoothing original_output_path = "%s/lang_detection_results" % save_path smoothed_output_path = "%s/En_Fr_only_results" % save_path check_or_create_dir(smoothed_output_path) print "Smooth out non-English and non-French languages..." run_non_en_fr_lang_smoothing(original_output_path, smoothed_output_path)
class Command(BaseCommand): help = 'Start a socket server for event normalization' option_list = BaseCommand.option_list + ( make_option( '--unix', dest='socket_file', help='Unix socket to bind to. Example: "/tmp/normalize.sock"'), make_option( '--net', dest='network_socket', help='Network socket to bind to. Example: "127.0.0.1:1234"'), ) def _check_socket_path(self, socket_file): if os.path.exists(socket_file): file_mode = os.stat(socket_file).st_mode if not stat.S_ISSOCK(file_mode): raise CommandError('File already exists and is not a socket') # Make sure the socket does not already exist try: os.unlink(socket_file) except OSError: if os.path.exists(socket_file): raise def handle(self, **options): socket_file = options.get('socket_file') network_socket = options.get('network_socket') if socket_file and network_socket: raise CommandError('Only one socket allowed at a time') elif socket_file: self.socket_file = os.path.abspath(socket_file) self._check_socket_path(socket_file) self.stdout.write('Binding to unix socket: %s' % (socket_file, )) server = SocketServer.UnixStreamServer(socket_file, EventNormalizeHandler) elif network_socket: host, port = network_socket.split(':') port = int(port) self.stdout.write('Binding to network socket: %s:%s' % (host, port)) server = SocketServer.TCPServer((host, port), EventNormalizeHandler) else: raise CommandError('No connection option specified') server.serve_forever()
def __init__(self, *args, **kwargs): if hasattr(self, 'option_list'): self.option_list = BaseCommand.option_list + ( make_option('--model', dest='model', help='specify the django model name', metavar='MODEL', default=None), make_option('--view', dest='view', help='specify the django view file name', metavar='VIEW', default=None), ) super(Command, self).__init__(*args, **kwargs)
class Command(BaseCommand): help = 'Generate a link for a user to reset their password' option_list = BaseCommand.option_list + (make_option( '--noinput', dest='noinput', action='store_true', default=False, help='Dont ask for confirmation before merging accounts.'), ) def handle(self, username, **options): users = find_users(username, with_valid_password=False) if not users: sys.stdout.write("No account found with given username.\n") return for user in users: password_hash, created = LostPasswordHash.objects.get_or_create( user=user, ) if not password_hash.is_valid(): password_hash.date_added = timezone.now() password_hash.set_hash() password_hash.save() print('{} ({}) - {}'.format( user.username, user.email, password_hash.get_absolute_url(), ))
class Command(NoArgsCommand): help = "Django Emailer" option_list = NoArgsCommand.option_list + ( make_option('--verbose', action='store_true'), ) def handle_noargs(self, **options): import datetime from django.core.mail import send_mail from Users.userfunctions import ScheduleMessages from django.template.loader import get_template from django.template.context import Context from django.core.mail.message import EmailMessage t = get_template('emailTemplate.html') today = datetime.datetime.today() tomorrow = today + datetime.timedelta(1) day_after = tomorrow + datetime.timedelta(1) today_messages = ScheduleMessages(today.month,today.year).get_message_for_day(today.day) tomorrow_messages = ScheduleMessages(tomorrow.month,tomorrow.year).get_message_for_day(tomorrow.day) day_after_messages = ScheduleMessages(day_after.month,day_after.year).get_message_for_day(day_after.day) #stri = "Tasks:- \n Today's:" + "\n" + today_messages + "\n" + "Tomorrow:" + "\n" + tomorrow_messages + " Day After: " + "\n" + day_after_messages c = Context({'today':today , 'tomorrow':tomorrow , 'day_after':day_after}) msg = EmailMessage(subject = "Daily Notifications" , body = t.render(c), from_email ='*****@*****.**', to = ['*****@*****.**']) msg.content_subtype = "html" # Main content is now text/html msg.send()
class Command(BaseCommand): help = "Extract topics for a dataset." args = "<dataset id>" option_list = BaseCommand.option_list + (make_option( '--name', dest='name', default='my topic model', help="The name for your keyword dictionary"), ) def handle(self, dataset_id, *args, **options): name = options.get('name') if not dataset_id: raise CommandError("Dataset id is required.") try: dataset_id = int(dataset_id) except ValueError: raise CommandError("Dataset id must be a number.") dictionary = Dictionary.objects.filter(dataset_id=dataset_id).first() suite = unittest.TestSuite() suite.addTest( ParametrizedTestCase.parametrize(TestFeature, param=dictionary)) unittest.TextTestRunner().run(suite)
class Command(BaseCommand): help = 'Start a socket server for event normalization' option_list = BaseCommand.option_list + (make_option( '--socket', dest='socket_file', help='Unix socket to bind to'), ) def _check_socket_path(self, socket_file): if os.path.exists(socket_file): file_mode = os.stat(socket_file).st_mode if not stat.S_ISSOCK(file_mode): raise CommandError('File already exists and is not a socket') # Make sure the socket does not already exist try: os.unlink(socket_file) except OSError: if os.path.exists(socket_file): raise def handle(self, **options): socket_file = options.get('socket_file') if not socket_file: raise CommandError('Path to the socket file is required!') self.socket_file = os.path.abspath(socket_file) self._check_socket_path(socket_file) self.stdout.write('Binding to unix socket: %s' % (socket_file, )) server = SocketServer.UnixStreamServer(socket_file, EventNormalizeHandler) server.serve_forever()
class Command(BaseCommand): args = '<query>' help = 'Test the search engine' option_list = BaseCommand.option_list + (make_option( '-V', action="callback", callback=append_version, type="string"), ) def handle(self, *args, **kwargs): query = ' '.join(args) versions = kwargs.get('versions') db, enquiry = enquire(query, versions) mset = enquiry.get_mset(0, db.get_doccount()) pks = [match.document.get_data() for match in mset] # filter doesn't guarantee an order, so we need to get all the # possible models then look them up to get the ordering # returned by xapian. This hits the database all at once, rather # than pagesize times. extension_lookup = {} for extension in Extension.objects.filter(pk__in=pks): extension_lookup[str(extension.pk)] = extension extensions = [extension_lookup[pk] for pk in pks] for ext in extensions: print ext.name
class Command(LabelCommand): option_list = BaseCommand.option_list + ( make_option('--base64', action='store_true', dest='base64', default=False, help='Assume all input are base64 encoded.'), ) help = "Create manager" def handle(self, *labels, **options): if len(labels)!=5: raise CommandError("Enter manager's email, username, password, first_name, last_name") base64 = options.get('base64') if base64: email = b64decode(labels[0]) username = b64decode(labels[1]) password = b64decode(labels[2]) first_name = b64decode(labels[3]) last_name = b64decode(labels[4]) else: email = labels[0] username = labels[1] password = labels[2] first_name = labels[3] last_name = labels[4] email = email.decode('utf8') username = username.decode('utf8') password = password.decode('utf8') first_name = first_name.decode('utf8') last_name = last_name.decode('utf8') from cm.models import UserProfile UserProfile.objects._create_manager(email, username, password, first_name, last_name)
class Command(BaseCommand): help = "Loads raw JCT TE data files from specified path" option_list = BaseCommand.option_list + ( make_option("-p", "--path", dest="path", default=None), ) def handle(self, *args, **options): if options['path'] is not None: path = options['path'] parse_jct(os.path.join(path, 'JCS-1-98.txt'), 2001, 9, 1) parse_jct(os.path.join(path, 'JCS-13-99.txt'), 2002, 9, 1) parse_jct(os.path.join(path, 'JCS-1-01.txt'), 2003, 9, 1) parse_jct(os.path.join(path, 'JCS-1-05.txt'), 2007, 4, 1) parse_jct(os.path.join(path, 'JCS-2-06.txt'), 2008, 4, 1) parse_jct(os.path.join(path, 'JCS-3-07.txt'), 2009, 4, 1) parse_jct(os.path.join(path, 'JCS-2-08_combined.txt'), 2010, 2, 1) parse_jct(os.path.join(path, 'JCS-1-10.txt'), 2011, 2, 5) parse_jct(os.path.join(path, 'JCS-3-10.txt'), 2012, 8, 3) else: print 'No --path to data files set.'
class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option('-a', '--all', action='store_true', dest='allswitches', default=False, help="Backup all switches"), ) args = '<backupfilesbasedirectory> [switch_name]' help = 'Backups the configuration of a switch (or all the swiches if --all is specified).' def handle(self, *args, **options): allswitches = options.get('allswitches', False) if len(args) < 1 : raise CommandError('Error. No base directory specified') elif (not allswitches) and (len(args) < 2) : raise CommandError('Error. No switch name specified') basedir = args[0] if allswitches : for sw in Switch.objects.all() : backupsw(self, basedir, sw) else : # get associated NetworkBaseModel, NetworkedBuilding, sw = None try : sw = Switch.objects.get(name = args[1]) except : pass if sw == None : raise CommandError('Error. Switch %s not found' % args[1]) else : backupsw(self, basedir, sw)
class Command(BaseCommand): """ Create or update all scores, based on existing votes. This is useful if you have to migrate your votes from a legacy table, or you want to change the weight of current votes, e.g.:: ./manage.y upsert_scores -w 5 """ option_list = BaseCommand.option_list + (make_option( '-w', "--weight", action='store', dest='weight', default=0, type='int', help=('The weight used to calculate average score.')), ) help = "Create or update all scores, based on existing votes." def handle(self, **options): if int(options.get('verbosity')) > 0: verbose = True counter = 0 buffer = set() for vote in models.Vote.objects.all(): content = (vote.content_type, vote.object_id, vote.key) if content not in buffer: if verbose: counter += 1 print u'#%d - model %s id %s key %s' % ( (counter, ) + content) models.upsert_score(content[:2], content[2], options['weight']) buffer.add(content)
class Command(BaseCommand): """ Import the specified data directory into the default ModuleStore """ help = 'Import the specified data directory into the default ModuleStore' option_list = BaseCommand.option_list + (make_option( '--nostatic', action='store_true', help='Skip import of static content'), ) def handle(self, *args, **options): "Execute the command" if len(args) == 0: raise CommandError( "import requires at least one argument: <data directory> [--nostatic] [<course dir>...]" ) data_dir = args[0] do_import_static = not (options.get('nostatic', False)) if len(args) > 1: course_dirs = args[1:] else: course_dirs = None print("Importing. Data_dir={data}, course_dirs={courses}".format( data=data_dir, courses=course_dirs, dis=do_import_static)) import_from_xml(modulestore('direct'), data_dir, course_dirs, load_error_modules=False, static_content_store=contentstore(), verbose=True, do_import_static=do_import_static)
class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option("-n", "--naicsfile", dest="naics_infile", default=None), make_option("-p", "--pscfile", dest="psc_infile", default=None), ) def handle(self, *args, **options): try: NAICS.objects.load_naics(options['naics_infile']) except: NAICS.objects.load_naics() try: PSC.objects.load_psc(options['psc_infile']) except: PSC.objects.load_psc()
class Command(BaseCommand): help = "Loads TE categories from text file (e.g. scripts/data/tax_expenditures/data/omb_ap/omb_categories_2000.txt)" option_list = BaseCommand.option_list + (make_option( "-p", "--path", dest="path", default=None), ) def handle(self, *args, **options): if options['path'] is not None: indent_regex = re.compile('^-.*') file = open(options['path'], 'r') parent = None last_item = None for line in file.readlines(): line = line.strip() if indent_regex.match(line): line = line.replace('-', '') if parent == None: parent = last_item else: parent = None last_item = Category.objects.create(name=unicode(line), parent=parent, budget_function=True) else: print( 'No --path to categories file specified (probably need scripts/data/tax_expenditures/data/omb_ap/omb_categories_2000.txt).' ) exit()
class Command(BaseCommand): option_list = BaseCommand.option_list + (make_option( "-f", "--file", dest="file", default=None), ) def handle(self, *args, **options): if options['file'] is not None: ProgramDescription.objects.import_programs(options['file'])
class Command(NoArgsCommand): help = "Whatever you want to print here" option_list = NoArgsCommand.option_list + (make_option( '--verbose', action='store_true'), ) def handle_noargs(self, **options): print(Story.objects.all())
class Command(NoArgsCommand): option_list = NoArgsCommand.option_list + ( make_option('--pidfile', dest='pidfile', help="Pidfile", default=None), make_option('--logfile', dest='logfile', help="Log file", default=None), ) def handle_noargs(self, **options): if options['pidfile']: try: pid = os.fork() if pid > 0: sys.exit(0) # Exit first parent. except OSError, e: print >> sys.stderr, "fork #1 failed: (%d) %s" % (e.errno, e.strerror) sys.exit(1) # Decouple from parent environment. os.chdir('/') os.umask(0) os.setsid() # Do second fork. try: pid = os.fork() if pid > 0: sys.exit(0) except OSError, e: print >> sys.stderr, "fork #2 failed: (%d) %s" % (e.errno, e.strerror) sys.exit(1) # Redirect standard file descriptors. si = open('/dev/null', 'r') so = open('/dev/null', 'a+') se = open('/dev/null', 'a+', 0) os.dup2(si.fileno(), sys.stdin.fileno()) os.dup2(so.fileno(), sys.stdout.fileno()) os.dup2(se.fileno(), sys.stderr.fileno()) f = open(options['pidfile'], 'a+') f.write(str(os.getpid())) f.close()
class Command(BaseCommand): option_list = BaseCommand.option_list + (make_option( "-f", "--file", dest="file", default=None), ) def handle(self, *args, **options): if options['file'] is not None: NAICSCode.objects.load_naics(options['file']) else: NAICSCode.objects.load_naics()
class Command(BaseCommand): option_list = BaseCommand.option_list + (make_option( "-f", "--file", dest="file", default=None), ) def handle(self, *args, **options): if options['file'] is not None: ProductOrServiceCode.objects.load_psc(options['file']) else: ProductOrServiceCode.objects.load_psc()
class Command(NoArgsCommand): help = "Excecute Chron Wake Up Chron Roulette for the current batch of wake-ups." option_list = NoArgsCommand.option_list + (make_option( '--verbose', action='store_true'), ) def handle_noargs(self, **options): schedule = datetime.now() schedule.replace(microsecond=0) confname = str(schedule.strftime("%d:%m:%y:%H:%M:%S")) confurl = settings.WEB_ROOT + "wakeuprequest/" + confname noanswerurl = settings.WEB_ROOT + 'answercallback/' + confname fallbackurl = settings.WEB_ROOT + 'fallback/' + confname self.stdout.write("Wake Up Chron Roulette Started - " + str(schedule), ending='\n\n') towakeup = UserProfile.objects.filter(alarmon=True, activated=True) print towakeup # Creating all call objects for u in towakeup: c = Call() c.user = u.user c.datecreated = schedule c.save() tries = 0 # Iterate until we don't have any more people we need to wake up, or our tries have ran out while towakeup and tries < maxTries: tries = tries + 1 print "STARTING TRY", tries for p in towakeup: call_async(p.phone, confurl, fallbackurl, noanswerurl) time.sleep(waitingtime) # raw_input('Press enter to continue') # # Flush so that the changes reflect in the database flush_transaction() towakeup = UserProfile.objects.filter( user__call__datecreated=schedule, user__call__answered=False) Call.objects.filter(datecreated=schedule, answered=False).update(snoozed=True) # TODO Set Call's Snoozed boolean to true if they didn't answer by the time we arrive here # To finish turn everyone's alarm off UserProfile.objects.filter(user__call__datecreated=schedule).update( alarmon=False, anymatch=False)