Exemplo n.º 1
0
 def unsubscribe_from_basket(modeladmin, request, queryset):
     """Unsubscribe from Basket."""
     ts = [
         unsubscribe_from_basket_task.s(userprofile.email, [newsletter])
         for userprofile in queryset
     ]
     group(ts)()
     messages.success(request, 'Basket update started.')
Exemplo n.º 2
0
 def subscribe_to_basket(modeladmin, request, queryset):
     """Subscribe to Basket or update details of already subscribed."""
     ts = [
         subscribe_user_to_basket.s(userprofile.id, [newsletter])
         for userprofile in queryset
     ]
     group(ts)()
     messages.success(request, 'Basket update started.')
Exemplo n.º 3
0
def import_observations():
    watercourse_dates_to_import = get_dates_to_import(WatercourseObservation, 'HOD.dat')
    srazsae_dates_to_import = get_dates_to_import(WeatherObservation, 'srazsae.dat')

    try:
        g = group(import_hod_observation.s(date) for date in watercourse_dates_to_import)
        g.apply_async()
    except Exception as e:
        logger.critical(e)

    try:
        g = group(import_srazsae_observation.s(date) for date in srazsae_dates_to_import)
        g.apply_async()
    except Exception as e:
        logger.critical(e)
Exemplo n.º 4
0
def process_models_with_chunks(task, iterable, count_per_chunk,
                               delta_countdown=None):
    def model_chunker(iterable, count_per_chunk):
        results_ids = []

        for obj in iterable.only('id'):
            if len(results_ids) == count_per_chunk:
                yield results_ids
                results_ids = []

            results_ids.append(obj.id)

        if results_ids:
            yield results_ids

    count = 0
    tasks = []
    for model_ids in model_chunker(iterable, count_per_chunk):
        options = None
        if delta_countdown:
            options = {
                'countdown': delta_countdown * count,
            }
        t = task.subtask((model_ids, iterable.model), options=options)
        tasks.append(t)
        count += 1

    return group(*tasks).apply_async()
Exemplo n.º 5
0
def scan_all_plugins(plugin_list, add_to_store=True):
    """
     Query all plugins for their info/inspect json block
    :parameter plugin_list: Pass in list of (plugin, scriptpath) pairs, and all will be instantiated an interrogated.
    Note: They do not need to be installed yet. Just name, script pairs, or name, script, context tuples.
    :parameter add_to_store: Add the plugins in the list to the store
    :returns: A dictionary of PluginInfo object for all of the successfully loaded modules keyed by their path
    """
    plugin_info = dict()

    # fire off sub-tasks for each plugin to be scanned and collect results
    plugin_scan_tasks = [
        scan_plugin.s(data, add_to_store) for data in plugin_list
    ]
    try:
        result = group(plugin_scan_tasks).apply_async().join(timeout=300)
    except Exception as exc:
        logger.error("Error while scanning plugins: " + str(exc))
    else:
        for i, data in enumerate(plugin_list):
            path = data[1]
            plugin_info[path] = result[i]

        logger.info("Rescanned %d plugins", len(plugin_list))

    return plugin_info
Exemplo n.º 6
0
def owrb_well_logs_save(database=config.owrb_database, collection=config.owrb_welllog_collection):
    #dcommons = datacommons.toolkit(username,password)
    db = Connection(config.mongo_host)
    db[database][collection].remove()
    #set geometries
    polydata = []
    for itm in db.ows.watersheds.find():
        polydata.append(itm)
    aquifer_poly = []
    for itm in db.ows.aquifers.find():
        aquifer_poly.append(itm)
    #load owrb well logs
    res = urllib2.urlopen(config.well_logs_url)
    data = json.loads(res.read())
    stask = []
    taskname_tmpl = 'owsq.data.owrb.owrb_well_logs_portal'
    for site in data["features"]:
        row_data = {}
        row_data = site["properties"]
        row_data['geometry'] = site['geometry']
        rowid = db[database][collection].save(row_data)
        stask.append(subtask(taskname_tmpl, args=(rowid,)))
    print 'Done with inserts, starting group jobs'
    job = group(stask)
    result = job.apply_async()
    aggregate_results = result.join()
    return "Success- All Well logs stored locally in Mongo(%s, %s) Total = %d" % (
    database, collection, sum(aggregate_results))
Exemplo n.º 7
0
def group_one(task, seq, *args, **kwargs):
    """
    Create a group of tasks, each task handle one element of seq
    """
    tasks = []

    if "attr_name" in kwargs:
        attr_name = kwargs['attr_name']
        del kwargs["attr_name"]
    else:
        attr_name = None

    for elem in seq:
        if attr_name:
            kwargs[attr_name] = elem
            tasks.append(
                task.subtask(args=list(args),
                             kwargs=dict(kwargs),
                             immutable=True))
        else:
            tasks.append(
                task.subtask(args=[elem] + list(args),
                             kwargs=dict(kwargs),
                             immutable=True))

    return group(tasks)
Exemplo n.º 8
0
def import_all_stations(day=None):
    stations = util.get_or_create_stations()

    try:
        # Call another Celery tasks as GROUP. For more info see Celery documentation.
        g = group(import_station.s(station.pk, day) for station in stations)
        g.apply_async()
    except Exception as e:
        logger.error(e)
Exemplo n.º 9
0
def data_download(data, basedir='/data/static/', clustered=False, **kwargs):
    '''
        Download multiple data sets from multiple data sources. 
            Simple cart data: Example
                {"SCI-1":{"quantity":1,"id":"SCI-1","name":"North Canadian River at Shawnee, OK (07241800)",
                          "parameter":"Discharge, cubic feet per second",
                           "query":"{'source':'USGS',  'webservice_type':'uv','sites':'07241800','parameterCd':'00060','startDT':'2007-10-01','endDT':'2013-04-04'}"}
                }
        query['source'] used to import module which will have a save function. THis function returns a url to file just downloaded.
        filezip creates a zip file from the list of urls
        Task returns a url to the zip file of all data downloaded from different sources 
        Currently performing in a serial fashion. Need to update and perform with celery groups in which multiple parallel subtask are generated.
        
    '''
    if not data:
        raise Exception('No Data')
    try:
        data = json.loads(data)
    except:
        data = ast.literal_eval(data)
    newDir = os.path.join(basedir, 'ows_tasks/', str(data_download.request.id))
    call(["mkdir", '-p', newDir])
    os.chdir(newDir)
    logger = open(os.path.join(newDir, 'task_log.txt'), 'w')
    # consolidate sources- creates list of shopping cart items
    data_by_source = {}
    for itm, value in data.items():
        value['query'] = ast.literal_eval(value['query'])
        if value['query']['source'] in data_by_source:
            data_by_source[value['query']['source']].append(value)
        else:
            data_by_source[value['query']['source']] = [value]
    stask = []
    taskname_tmpl = 'owsq.data.download.%s.save'
    for itm, value in data_by_source.items():
        logger.write(log_info_tpl % (itm, str(len(value)), 'Subtask Created'))
        stask.append(subtask(taskname_tmpl % (itm), args=(newDir, itm,), kwargs={'data_items': value}))
    job = group(stask)
    result = job.apply_async()
    logger.write(log_info_tpl1 % ('Subtask Submission', 'Subtask Running'))
    aggregate_results = result.join()
    logger.write(log_info_tpl1 % ('Data query Successful', 'Subtasks completed'))

    urls = []
    for res in aggregate_results:
        urls.extend(res)
    outname = zip_name_tpl % (datetime.datetime.now().isoformat())
    zipurl = 'http://%s/%s/%s' % (socket.gethostname(), 'request', outname)
    logger.write(log_info_tpl2 % ('Data Zip URL', zipurl, '30 days'))
    logger.close()
    if clustered:
        return filezip.makezip(urls, zip_name_tpl % (datetime.datetime.now().isoformat()),
                               os.path.join(basedir, 'request/'))
    else:
        return filezip.makezip(newDir, zip_name_tpl % (datetime.datetime.now().isoformat()),
                               os.path.join(basedir, 'request/'), local=True)
Exemplo n.º 10
0
def group_chunks(task, seq, n, *args, **kwargs):
    """
    Creates a group of tasks, each subtask has <n> elements to handle
    """
    tasks = []
    for i in xrange(0, len(seq), n):
        tasks.append(
            task.subtask(args=[seq[i:i + n]] + list(args),
                         kwargs=kwargs,
                         immutable=True))
    return group(tasks)
Exemplo n.º 11
0
def group_chunks(task, seq, n, *args, **kwargs):
    """
    Creates a group of tasks, each subtask has <n> elements to handle
    """
    tasks = []
    for i in xrange(0, len(seq), n):
        tasks.append(
            task.subtask(args=[seq[i:i + n]] + list(args), kwargs=kwargs,
                         immutable=True)
        )
    return group(tasks)
Exemplo n.º 12
0
 def run(self, *args, **kwargs):
     with transaction.atomic():
         tcg_service = CardCatalogSyncService()
         set_list = CardSet.objects.all()
         load_tasks = []
         for each_set in set_list:
             logger.info("Syncing prices for {}".format(each_set))
             tcg_data = tcg_service.get_prices_for_set(
                 set_id=each_set.tcgplayer_group_id)
             load_tasks.append(
                 log_set_pricing.s(card_set=each_set, tcg_data=tcg_data))
         task_group = group(load_tasks)
         group_complete = task_group.apply()
         logger.info('CARD SYNC TASK COMPLETE!')
Exemplo n.º 13
0
 def run(self, *args, **kwargs):
     logger.info('BEGINNING SCRYFALL SYNC TASK')
     bulk_data_url = ScryfallAPIService.get_bulk_data_url()
     scryfall_data = ScryfallAPIService.retrieve_bulk_data(bulk_data_url)
     if kwargs.get('test'):
         scryfall_data = scryfall_data[:2]
     load_tasks = []
     for card in scryfall_data:
         card_check = ScryfallCard.objects.filter(
             oracle_id=card.get('oracle_id')).first()
         if not card_check:
             load_tasks.append(get_or_create_scryfall_card.s(card))
     task_group = group(load_tasks)
     group_complete = task_group.apply()
     logger.info('SCRYFALL SYNC TASK COMPLETE!')
Exemplo n.º 14
0
Arquivo: tasks.py Projeto: dkns/kuma
def render_stale_documents(immediate=False, log=None):
    """Simple task wrapper for rendering stale documents"""
    lock = MemcacheLock('render-stale-documents-lock')
    if lock.acquired and not immediate:
        # fail loudly if this is running already
        # may indicate a problem with the schedule of this task
        raise StaleDocumentsRenderingInProgress

    stale_docs = Document.objects.get_by_stale_rendering()
    stale_docs_count = stale_docs.count()
    if stale_docs_count == 0:
        # not stale documents to render
        return

    if log is None:
        # fetch a logger in case none is given
        log = render_stale_documents.get_logger()

    log.info("Found %s stale documents" % stale_docs_count)
    response = None
    if lock.acquire():
        try:
            subtasks = []
            for doc in stale_docs:
                if immediate:
                    doc.render('no-cache', settings.SITE_URL)
                    log.info("Rendered stale %s" % doc)
                else:
                    subtask = render_document.subtask((doc.pk, 'no-cache',
                                                       settings.SITE_URL))
                    subtasks.append(subtask)
                    log.info("Deferred rendering for stale %s" % doc)
            if subtasks:
                task_group = group(tasks=subtasks)
                if waffle.switch_is_active('render_stale_documents_async'):
                    # kick off the task group asynchronously
                    task_group.apply_async()
                else:
                    # kick off the task group synchronously
                    result = task_group.apply()
                    response = result.join()
        finally:
            lock.release()
    return response
Exemplo n.º 15
0
def render_stale_documents(immediate=False, log=None):
    """Simple task wrapper for rendering stale documents"""
    lock = MemcacheLock('render-stale-documents-lock')
    if lock.acquired and not immediate:
        # fail loudly if this is running already
        # may indicate a problem with the schedule of this task
        raise StaleDocumentsRenderingInProgress

    stale_docs = Document.objects.get_by_stale_rendering()
    stale_docs_count = stale_docs.count()
    if stale_docs_count == 0:
        # not stale documents to render
        return

    if log is None:
        # fetch a logger in case none is given
        log = render_stale_documents.get_logger()

    log.info("Found %s stale documents" % stale_docs_count)
    response = None
    if lock.acquire():
        try:
            subtasks = []
            for doc in stale_docs:
                if immediate:
                    doc.render('no-cache', settings.SITE_URL)
                    log.info("Rendered stale %s" % doc)
                else:
                    subtask = render_document.subtask(
                        (doc.pk, 'no-cache', settings.SITE_URL))
                    subtasks.append(subtask)
                    log.info("Deferred rendering for stale %s" % doc)
            if subtasks:
                task_group = group(tasks=subtasks)
                if waffle.switch_is_active('render_stale_documents_async'):
                    # kick off the task group asynchronously
                    task_group.apply_async()
                else:
                    # kick off the task group synchronously
                    result = task_group.apply()
                    response = result.join()
        finally:
            lock.release()
    return response
Exemplo n.º 16
0
 def run(self, *args, **kwargs):
     with transaction.atomic():
         tcg_service = CardCatalogSyncService()
         set_list = CardSet.objects.all()
         load_tasks = []
         for each_set in set_list:
             card_list = each_set.get_cards_for_set()
             tcg_data = tcg_service.retrieve_product_list_for_set(
                 each_set.tcgplayer_group_id)
             if kwargs.get('test'):
                 tcg_data = tcg_data[:2]
             if not card_list:
                 logger.info("Spawning task to create cards for {}".format(
                     each_set))
                 # If we have no cards at all for this set, it's a new set, make all new cards
                 load_tasks.append(
                     create_all_new_cards.s(
                         card_set_id=each_set.id,
                         tcg_data=tcg_data,
                         sync_all_products=kwargs.get('sync_all_products')))
             elif len(card_list) != len(tcg_data):
                 logger.info("Checking cards in {}".format(each_set))
                 # If the length of these sets doesn't match, likely a new card was added to
                 # the set since last sync (mostly applicable to promo sets)
                 for tcg_card in tcg_data:
                     # Filter to see if the card exists in the current set card list
                     card = card_list.filter(
                         tcg_product_id=tcg_card.get('productId'))
                     if not card:
                         create = True
                         if not kwargs.get('sync_all_products'):
                             for exclusion in EXCLUDED_CARD_NAMES:
                                 if exclusion in tcg_card.get('name'):
                                     create = False
                             if tcg_card.get('name').endswith(
                                     'Deck') or not create:
                                 continue
                         # Card doesn't exist, so create it
                         load_tasks.append(
                             create_new_card.s(card_set_id=each_set.id,
                                               tcg_data=tcg_card))
         task_group = group(load_tasks)
         group_complete = task_group.apply()
         logger.info('CARD SYNC TASK COMPLETE!')
Exemplo n.º 17
0
def generate_monthly_account_statements():
    """Spawn sub-tasks to generate a *Monthly* ``Statement`` for accounts
       without a *Monthly* statement.
    """
    today = date.today()
    last_month = today - relativedelta(months=1)
    from_date = date(last_month.year, last_month.month, 1)
    to_date = date(today.year, today.month, 1) - relativedelta(days=1)
    account_list = Account.objects.exclude(
        statement__type=Statement.TYPE_MONTHLY,
        statement__from_date=from_date,
        statement__to_date=to_date)

    task_list = []
    for account in account_list:
        task_list.append(
            generate_monthly_statement.s(account.id, from_date, to_date))

    return group(task_list)()
Exemplo n.º 18
0
def test_solution(solution):
    try:
        compile_solution(solution)
        solution.grader_message = 'Testing'

        taskList = []
        tests = solution.problem.test_set.all()
        for t in tests:
            curSubTask = run_test.si(solution, t)
            taskList.append(curSubTask)

        res = chord(group(taskList), save_result.s(solution))
        res.apply_async()

    except subprocess.CalledProcessError:
        solution.grader_message = 'Compilation error (syntax)'
    except subprocess.TimeoutExpired:
        solution.grader_message = 'Compilation error (timeout)'
    finally:
        solution.save()
Exemplo n.º 19
0
def group_one(task, seq, *args, **kwargs):
    """
    Create a group of tasks, each task handle one element of seq
    """
    tasks = []

    if "attr_name" in kwargs:
        attr_name = kwargs['attr_name']
        del kwargs["attr_name"]
    else:
        attr_name = None

    for elem in seq:
        if attr_name:
            kwargs[attr_name] = elem
            tasks.append(task.subtask(args=list(args), kwargs=dict(kwargs),
                         immutable=True))
        else:
            tasks.append(task.subtask(args=[elem] + list(args),
                         kwargs=dict(kwargs), immutable=True))

    return group(tasks)
Exemplo n.º 20
0
def test_solution(solution):
    try:
        compile_solution(solution)
        solution.grader_message = 'Testing'

        taskList = []
        print('adding tests')
        print(sys.getsizeof(solution))
        tests = Test.objects.filter(problem_id = solution.problem_id)
        for t in tests:
            curSubTask = run_test.si(solution.id, t.id)
            taskList.append(curSubTask)

        res = chord(group(taskList), save_result.s(solution))
        res.apply_async()
        print('tests added')

    except subprocess.CalledProcessError:
        solution.grader_message = 'Compilation error (syntax)'
    except subprocess.TimeoutExpired:
        solution.grader_message = 'Compilation error (timeout)'
    finally:
        solution.save()
    def form_valid(self, form):
        super(ScaleImageView, self).form_valid(form)
        subtasks = [image_tasks.scale_image.si(self.object.image.name, 50),
            image_tasks.scale_image.si(self.object.image.name, 100),
            image_tasks.scale_image.si(self.object.image.name, 150),
            image_tasks.scale_image.si(self.object.image.name, 200),
            image_tasks.scale_image.si(self.object.image.name, 250),
            image_tasks.scale_image.si(self.object.image.name, 300),
            image_tasks.scale_image.si(self.object.image.name, 400)]

        subtasks_async = group(subtasks).apply_async()

        upon_completion = email_tasks.send_email.si("*****@*****.**",
                                                    [self.object.notify],
                                                    "Yo",
                                                    "All your images are scaled")

        chord(subtasks)(upon_completion)

        task_ids = [t.task_id for t in subtasks_async.subtasks]

        return self.render_to_response(self.get_context_data(form=form,
                                       task_ids=json.dumps(task_ids),
                                       success=True))
Exemplo n.º 22
0
def scan_all_plugins(plugin_list, add_to_store=True):
    """
     Query all plugins for their info/inspect json block
    :parameter plugin_list: Pass in list of (plugin, scriptpath) pairs, and all will be instantiated an interrogated.
    Note: They do not need to be installed yet. Just name, script pairs, or name, script, context tuples.
    :parameter add_to_store: Add the plugins in the list to the store
    :returns: A dictionary of PluginInfo object for all of the successfully loaded modules keyed by their path
    """
    plugin_info = dict()

    # fire off sub-tasks for each plugin to be scanned and collect results
    plugin_scan_tasks = [scan_plugin.s(data, add_to_store) for data in plugin_list]
    try:
        result = group(plugin_scan_tasks).apply_async().join(timeout=300)
    except Exception as exc:
        logger.error("Error while scanning plugins: " + str(exc))
    else:
        for i, data in enumerate(plugin_list):
            path = data[1]
            plugin_info[path] = result[i]

        logger.info("Rescanned %d plugins", len(plugin_list))

    return plugin_info
Exemplo n.º 23
0
def import_imdb_list(titles, list_id):
    return group(
        add_movie_to_list.subtask((title, list_id))
        for title in titles).apply_async()
Exemplo n.º 24
0
 def unsubscribe_from_basket(modeladmin, request, queryset):
     """Unsubscribe from Basket."""
     ts = [unsubscribe_from_basket_task.s(userprofile.email, [newsletter])
           for userprofile in queryset]
     group(ts)()
     messages.success(request, 'Basket update started.')
Exemplo n.º 25
0
 def subscribe_to_basket(modeladmin, request, queryset):
     """Subscribe to Basket or update details of already subscribed."""
     ts = [subscribe_user_to_basket.s(userprofile.id, [newsletter])
           for userprofile in queryset]
     group(ts)()
     messages.success(request, 'Basket update started.')