Example #1
0
    def __new__(cls, name, bases, attrs):
        super_new = super(TaskType, cls).__new__
        task_module = attrs["__module__"]

        # Abstract class, remove the abstract attribute so
        # any class inheriting from this won't be abstract by default.
        if attrs.pop("abstract", None) or not attrs.get("autoregister", True):
            return super_new(cls, name, bases, attrs)

        # Automatically generate missing name.
        if not attrs.get("name"):
            task_module = sys.modules[task_module]
            task_name = ".".join([task_module.__name__, name])
            attrs["name"] = task_name

        # Because of the way import happens (recursively)
        # we may or may not be the first time the task tries to register
        # with the framework. There should only be one class for each task
        # name, so we always return the registered version.

        task_name = attrs["name"]
        if task_name not in tasks:
            task_cls = super_new(cls, name, bases, attrs)
            tasks.register(task_cls)
        return tasks[task_name].__class__
Example #2
0
    def __new__(cls, name, bases, attrs):
        super_new = super(TaskType, cls).__new__
        task_module = attrs["__module__"]

        # Abstract class, remove the abstract attribute so
        # any class inheriting from this won't be abstract by default.
        if attrs.pop("abstract", None) or not attrs.get("autoregister", True):
            return super_new(cls, name, bases, attrs)

        # Automatically generate missing name.
        if not attrs.get("name"):
            task_module = sys.modules[task_module]
            task_name = ".".join([task_module.__name__, name])
            attrs["name"] = task_name

        # Because of the way import happens (recursively)
        # we may or may not be the first time the task tries to register
        # with the framework. There should only be one class for each task
        # name, so we always return the registered version.

        task_name = attrs["name"]
        if task_name not in tasks:
            task_cls = super_new(cls, name, bases, attrs)
            tasks.register(task_cls)
        return tasks[task_name].__class__
Example #3
0
    def __new__(cls, name, bases, attrs):
        new = super(TaskType, cls).__new__
        task_module = attrs.get("__module__") or "__main__"

        # Abstract class: abstract attribute should not be inherited.
        if attrs.pop("abstract", None) or not attrs.get("autoregister", True):
            return new(cls, name, bases, attrs)

        # Automatically generate missing/empty name.
        autoname = False
        if not attrs.get("name"):
            try:
                module_name = sys.modules[task_module].__name__
            except KeyError:
                # Fix for manage.py shell_plus (Issue #366).
                module_name = task_module
            attrs["name"] = '.'.join([module_name, name])
            autoname = True

        # Because of the way import happens (recursively)
        # we may or may not be the first time the task tries to register
        # with the framework.  There should only be one class for each task
        # name, so we always return the registered version.
        task_name = attrs["name"]
        if task_name not in tasks:
            task_cls = new(cls, name, bases, attrs)
            if autoname and task_module == "__main__" and task_cls.app.main:
                task_name = task_cls.name = '.'.join([task_cls.app.main, name])
            tasks.register(task_cls)
        task = tasks[task_name].__class__
        return task
Example #4
0
    def setup_tasks(self, consumer_dict):
        for calendar_id, transformers in consumer_dict.iteritems():
            for Transformer in transformers:
                _tasks.append(self.create_task(calendar_id, Transformer()))

        for Task in _tasks:
            tasks.register(Task)
Example #5
0
    def __new__(cls, name, bases, attrs):
        new = super(TaskType, cls).__new__
        task_module = attrs.get("__module__") or "__main__"

        # Abstract class: abstract attribute should not be inherited.
        if attrs.pop("abstract", None) or not attrs.get("autoregister", True):
            return new(cls, name, bases, attrs)

        # Automatically generate missing/empty name.
        autoname = False
        if not attrs.get("name"):
            try:
                module_name = sys.modules[task_module].__name__
            except KeyError:
                # Fix for manage.py shell_plus (Issue #366).
                module_name = task_module
            attrs["name"] = '.'.join([module_name, name])
            autoname = True

        # Because of the way import happens (recursively)
        # we may or may not be the first time the task tries to register
        # with the framework.  There should only be one class for each task
        # name, so we always return the registered version.
        task_name = attrs["name"]
        if task_name not in tasks:
            task_cls = new(cls, name, bases, attrs)
            if autoname and task_module == "__main__" and task_cls.app.main:
                task_name = task_cls.name = '.'.join([task_cls.app.main, name])
            tasks.register(task_cls)
        task = tasks[task_name].__class__
        return task
Example #6
0
    def setup_tasks(self, consumer_dict):
        for calendar_id, transformers in consumer_dict.iteritems():
            for Transformer in transformers:
                _tasks.append(self.create_task(calendar_id, Transformer()))

        for Task in _tasks:
            tasks.register(Task)
Example #7
0
    def test_periodic_taskmeta(self):
        tasks.register(TestPeriodicTask)
        p = self.createPeriodicTaskMeta(TestPeriodicTask.name)
        # check that repr works.
        self.assertTrue(unicode(p).startswith("<PeriodicTask:"))
        self.assertFalse(p in PeriodicTaskMeta.objects.get_waiting_tasks())
        # Have to avoid save() because it applies the auto_now=True.
        PeriodicTaskMeta.objects.filter(name=p.name).update(
                last_run_at=datetime.now() - TestPeriodicTask.run_every)
        self.assertTrue(p in PeriodicTaskMeta.objects.get_waiting_tasks())
        self.assertTrue(isinstance(p.task, TestPeriodicTask))

        p.delay()
Example #8
0
    def test_periodic_taskmeta(self):
        tasks.register(TestPeriodicTask)
        p = self.createPeriodicTaskMeta(TestPeriodicTask.name)
        # check that repr works.
        self.assertTrue(unicode(p).startswith("<PeriodicTask:"))
        self.assertFalse(p in PeriodicTaskMeta.objects.get_waiting_tasks())
        p.last_run_at = datetime.now() - (TestPeriodicTask.run_every +
                timedelta(seconds=10))
        p.save()
        self.assertTrue(p in PeriodicTaskMeta.objects.get_waiting_tasks())
        self.assertTrue(isinstance(p.task, TestPeriodicTask))

        p.delay()
Example #9
0
 >>> class TwitterUpdateTask(WebhookSignal):
 ...     name = "myapp.tasks.TwitterWebhookSignal"
 ... 
 ...     def run(self, username, password, message, \*\*kwargs):
 ...         import twitter
 ...         api = twitter.Api(username=username, password=password)
 ...         api.PostUpdate(message)
 >>> tasks.register(TwitterUpdateTask)
Example #10
0
        items = soup.findAll('item')

        for item in items:
            post_type = item.find('wp:post_type').string
            post_status = item.find('wp:status').string

            if post_type == 'attachment':
                get_media(item, uri_parser, user)
                # Note! This script assumes all the attachments come before
                # posts and pages in the xml. If this ends up changing,
                # do two loops, one with attachments and the second with posts and pages.
            elif post_type == 'post' and post_status == 'publish':
                get_posts(item, uri_parser, user)
            elif post_type == 'page' and post_status == 'publish':
                get_pages(item, uri_parser, user)

        if user.email:
            context_instance = {
                'SITE_GLOBAL_SITEDISPLAYNAME': get_setting('site', 'global', 'sitedisplayname'),
                'SITE_GLOBAL_SITEURL': get_setting('site', 'global', 'siteurl'),
            }
            subject = ''.join(render_to_string(('notification/wp_import/short.txt'), context_instance).splitlines())
            body = render_to_string(('notification/wp_import/full.html'), context_instance)

            #send_mail(subject, body, settings.DEFAULT_FROM_EMAIL, [user.email], fail_silently=False)
            email = EmailMessage(subject, body, settings.DEFAULT_FROM_EMAIL, [user.email])
            email.content_subtype = 'html'
            email.send(fail_silently=True)

tasks.register(WPImportTask)
    
    def transfer(self, name, local, remote, **kwargs):
        """
        `name` is the filename, `local` the local backend instance, `remote` 
        the remote backend instance. 
        
        Returns `True` when the transfer succeeded, `False` if not. Retries 
        the task when returning `False`.
        """
        try:
            remote.save(name, local.open(name))
            return True
        except Exception, e:
            logger = self.get_logger(**kwargs)
            logger.exception("Unable to save '%s' to remote storage. About "
                "to retry." % name)
            logger.exception(e)
            return False

class TransferAndDelete(Transfer):
    def transfer(self, name, local, remote, **kwargs):
        result = super(TransferAndDelete, self).transfer(name, local, remote, **kwargs)

        if result:
            local.delete(name)
        
        return result

tasks.register(Transfer)
tasks.register(TransferAndDelete)
Example #12
0
from celery.task import PeriodicTask
from celery.registry import tasks
from datetime import timedelta
from datetime import datetime
from django.core import management


#Test only
#Deprecated,please see settings.py CELERYBEAT_SCHEDULE
class MyTask(PeriodicTask):
    run_every = timedelta(minutes=1)

    def run(self, **kwargs):
        self.get_logger().info("Time now: " + datetime.now())
        print("Time now: " + datetime.now())


tasks.register(MyTask)


#Test only
@task()
def add(x, y):
    print 'add****'
    return x + y


@task()
def update_index():
    management.call_command('update_index')
Example #13
0
from celery.registry import tasks

from friends.contrib.suggestions.backends import importers
from friends.contrib.suggestions.backends.runners import AsyncRunner
from friends.contrib.suggestions.settings import RUNNER

if issubclass(RUNNER, AsyncRunner):
    tasks.register(importers.GoogleImporter)
    tasks.register(importers.FacebookImporter)
    tasks.register(importers.TwitterImporter)
    tasks.register(importers.YahooImporter)
    tasks.register(importers.LinkedInImporter)

                url_boleto = gera_boleto_bradesco(user.id, inv)

                email = user.email
                if email is not None and len(email) > 0:
                    bcc_list = ['*****@*****.**', user.email]
                else:
                    bcc_list = ['*****@*****.**']
                msg = EmailMessage()
                msg.subject = 'Teste: Cobrança de mensalidade'
                #temp = request.META
                # if request is None:
                msg.body = render_to_string(
                    'async_tasks/email_cobranca_mensalidade.html', locals())
                # else:
                #    from celery.schedules import discard_all
                #    discard_all()
                #    return render_to_response('async_tasks/email_cobranca_mensalidade.html', locals())
                # msg.from = 'GestoPSI <*****@*****.**>'
                msg.to = ['*****@*****.**', ]
                msg.bcc = bcc_list
                msg.content_subtype = "html"  # Main content is now text/html
                msg.send()

        logger.info("CheckAndCharge Finished.\n\n")

tasks.register(CheckAndCharge)


def check_and_charge():
    CheckAndCharge().run()
Example #15
0
def job(_context, class_, name=None):
    if name is not None:
        class_.name = name
    tasks.register(class_)
Example #16
0
        raise ImproperlyConfigured("when used celery<3.1, djcelery is required!")

    if 'djcelery' not in settings.INSTALLED_APPS:
        from django.core.exceptions import ImproperlyConfigured
        raise ImproperlyConfigured("djcelery not in INSTALLED_APPS!")

from beproud.django.notify import notify_now


class Notify(Task):

    def run(self, targets, notify_type, extra_data={}, include_media=None, exclude_media=[],
            max_retries=3, retry_countdown=10, **kwargs):
        try:
            return notify_now(
                targets,
                notify_type,
                extra_data=extra_data,
                include_media=include_media,
                exclude_media=exclude_media,
            )
        except Exception, e:
            return self.retry(
                exc=e,
                countdown=retry_countdown,
                max_retries=max_retries,
            )


tasks.register(Notify)
Example #17
0
from django.conf import settings
from django.core.cache import cache
from django.core.files.storage import get_storage_class

from celery.registry import tasks
from celery.task import Task

MAX_RETRIES = getattr(settings, 'QUEUED_REMOTE_STORAGE_RETRIES', 5)
RETRY_DELAY = getattr(settings, 'QUEUED_REMOTE_STORAGE_RETRY_DELAY', 60)

class SaveToRemoteTask(Task):
    max_retries = MAX_RETRIES
    default_retry_delay = RETRY_DELAY

    def run(self, name, local, remote, cache_key, **kwargs):
        local_storage = get_storage_class(local)()
        remote_storage = get_storage_class(remote)()

        try:
            remote_storage.save(name, local_storage.open(name))
        except:
            # something went wrong while uploading the file, retry
            self.retry([name, local, remote, cache_key], **kwargs)
            return False

        cache.set(cache_key, True)
        return True

tasks.register(SaveToRemoteTask)
Example #18
0
from celery.task import Task
from celery.registry import tasks
from subprocess import *
from django_wikinetwork import settings

class AnalyseTask(Task):
    def run(self, lang, options):
        import os
        from glob import glob
        
        logger = self.get_logger()
        logger.info("Running: %s" % (lang,))
        
        files = '%s/%swiki-*_rich.pickle' % (settings.DATASET_PATH, lang,)
        
        # find the most recent file
        fn = sorted(glob(files))[-1]
        logger.info("Running: %s, filename: %s" % (lang, fn))
        
        cmd = "/sra0/sra/setti/Source/wiki-network/analysis.py --as-table --group %s %s" % (' '.join(options), fn)
        logger.info(cmd)
        
        p = Popen(cmd, shell=True, stderr=PIPE)
        
        return fn
        
        
tasks.register(AnalyseTask)
Example #19
0
from celery import task
from celery.task import PeriodicTask
from celery.registry import tasks
from datetime import timedelta
from datetime import datetime
from django.core import management    

#Test only
#Deprecated,please see settings.py CELERYBEAT_SCHEDULE
class MyTask(PeriodicTask):
    run_every = timedelta(minutes=1)

    def run(self, **kwargs):
        self.get_logger().info("Time now: " + datetime.now())
        print("Time now: " + datetime.now())

tasks.register(MyTask)

#Test only
@task()
def add(x, y):
    print 'add****'
    return x + y

@task()
def update_index():
    management.call_command('update_index')
Example #20
0
from celery.task import Task
from celery.registry import tasks
from django.core.files.base import ContentFile


class AsyncSaveInBackup(Task):
    def run(self, primary_storage, backup_storage, filename):
        primary_file = primary_storage.open(filename)
        real_filename = primary_file.real_filename
        primary_file = ContentFile(primary_file.read())
        primary_file.name = real_filename
        backup_storage.save(filename, primary_file)


class AsyncDeleteFromBackup(Task):
    def run(self, backup_storage, filename):
        backup_storage.delete(filename)


tasks.register(AsyncSaveInBackup)
tasks.register(AsyncDeleteFromBackup)
Example #21
0
        self.partners_gates = {}
        for p in Partner.objects.select_related().filter(gate__isnull=False):
            self.partners_gates[p.id] = self.gate_interfaces[
                p.gate.gate_module]

    def run(self, queue_item):
        status_message = str(datetime.now())

        queue_item.status = STATUS_SENDING
        queue_item.status_message = status_message
        queue_item.save()

        gate = self.partners_gates[queue_item.partner_id]

        try:
            gate.send(queue_item)
            queue_item.status = STATUS_OK
            queue_item.status_message = ''
        except ProviderFailure as ex:
            queue_item.status = STATUS_PROVIDER_FAILURE
            queue_item.status_message = str(ex.encode('utf8'))
        except Exception as ex:
            queue_item.status = STATUS_INNER_FAILURE
            queue_item.status_message = str(ex.encode('utf8'))
        finally:
            queue_item.save()


tasks.register(SendSms)
Example #22
0
from celery.task import Task
from celery.registry import tasks

from threadless_router.router import Router

from aremind.apps.adherence.models import Feed


class ReminderSchedulerTask(Task):
    def run(self):
        router = Router()
        app = router.get_app("aremind.apps.adherence")
        app.cronjob()


tasks.register(ReminderSchedulerTask)


class FeedUpdatesTask(Task):
    def run(self):
        return Feed.objects.fetch_feeds()


tasks.register(FeedUpdatesTask)
Example #23
0
from celery.registry import tasks
from celery.task import Task
from celery import task
from django.core import management


# This is having some problems with celery 3.0.13 and mod_wsgi
class CreateInstancesTask(Task):
    def run(self, pk):
        management.call_command('create_instances', verbosity=0, pk=pk)


tasks.register(CreateInstancesTask)


@task()
def create_instances_task(pk):
    management.call_command('create_instances', verbosity=0, pk=pk)
Example #24
0
from theme.models import Theme

class ThemeDelegate(Task):
	""" This task will be responsible for getting the ball rolling for finding 
	    information for this task
	"""
	name = "theme_delegate"

	def run(self):
		print "This run got called"

	def run(self, theme_id):
		if theme_id:
			try:
				theme = Theme.objects.get(pk=theme_id)
				event_type = theme.event_type
				event_types = [event_type] if event_type else []
				print "Calling basic agent to search for theme: %s" % theme
				gsa = GazetteerSearchAlgorithm()
				ba = BasicAgent()
				reports = ba.search(event_types=event_types, algorithm=gsa)
				if reports:
					print "We found %s reports based on our theme %s" % (len(reports), theme)
				else:
					print "No reports found for our theme %s" % theme
			except Theme.DoesNotExist:
				print "No theme exists with id %s" % theme_id


tasks.register(ThemeDelegate)
Example #25
0
from celery.registry import tasks
from datetime import datetime
from brigitte.repositories.models import RepositoryUpdate

class UpdateGitoliteTask(Task):
    default_retry_delay = 10
    max_retries = 3

    def run(self, **kwargs):
        logger = self.get_logger(**kwargs)

        try:
            logger.info('updating gitolite..')
            if RepositoryUpdate.objects.filter(is_exported=False).count() > 0:
                from brigitte.repositories.utils import update_gitolite
                update_gitolite()
                RepositoryUpdate.objects.filter(is_exported=False).update(
                    is_exported=True,
                    exported=datetime.now()
                )
                logger.info('updated!')
            else:
                logger.info('no update needed!')
            return True
        except Exception, exc:
            logger.error('failed: %s' % exc)
            self.retry([], kwargs, exc=exc)
            return False

tasks.register(UpdateGitoliteTask)
Example #26
0
from celery.task     import Task
from celery.registry import tasks
from emp.apps.videos.utils    import convert_uploaded_video

""" Task to be started from videos.views.video_upload to process uploaded video
Process contained in videos.utils """
class ProcessVideoTask(Task):
	def run(self, video_id, **kwargs):
		convert_uploaded_video(video_id) # convert video, pass in Video id for further processing

tasks.register(ProcessVideoTask)
Example #27
0
                profile.mobile_phone,
                profile.email2,
                profile.url,
                profile.url2,
                profile.dob,
                profile.ssn,
                profile.spouse,
                profile.department,
                profile.education,
                profile.student,
                profile.remember_login,
                profile.exported,
                profile.direct_mail,
                profile.notes,
                profile.admin_notes,
                profile.referral_source,
                profile.hide_in_search,
                profile.hide_address,
                profile.hide_email,
                profile.hide_phone,
                profile.first_responder,
                profile.agreed_to_tos,
                profile.original_username,
                '\n',
            ]
            data_rows.append(data_row)

        return render_excel(filename, field_list, data_rows, '.csv')

tasks.register(ExportProfilesTask)
Example #28
0
class AddProject_Run(Task):
    def run(self,svn_path='',git_path='',svn_user='',svn_password=''):
        
        try:
     

            resultsFile=os.path.join(ANSIBLE_TEMP,str(self.request.id)+'.out').replace('\\','/')
            result=open(resultsFile,'w')
            if not os.path.exists(git_path):
                os.mkdir(git_path)
            cmd='svn checkout {svn_path} --username {svn_user} --password {svn_password} {git_path}'.format(svn_path=svn_path,svn_user=svn_user,svn_password=svn_password,git_path=git_path)
            print cmd
            p=subprocess.Popen(cmd, shell=True, universal_newlines=True, stdin=subprocess.PIPE,stdout=result, stderr=result)
            p.wait()

            result.close()
            #t=AnsibleTask.objects.get(task_id=self.request.id)
           
           # t.status='完成'
           # t.save()
        except Exception, e:
            #t=AnsibleTask.objects.get(task_id=self.request.id)
           
            #t.status='错误'
            #t.save()
            print e

        return "OK"
        
tasks.register(AddProject_Run)
Example #29
0
#            processing.trigger_update(logger=logger)
#        else:
#            logger.info('start internal poller')
#            processing.poll_internal(logger=logger)

#tasks.register(PollingTask)


class PollingTaskExternal(PeriodicTask):
    run_every = timedelta(seconds=5)

    def run(self, **kwargs):
        logger = self.get_logger(**kwargs)
        logger.info('PollingTaskExternal')
        logger.info('start external poller')
        processing.poll_external(logger=logger)


tasks.register(PollingTaskExternal)


class ProcessResponses(PeriodicTask):
    run_every = timedelta(seconds=5)

    def run(self, **kwargs):
        logger = self.get_logger(**kwargs)
        logger.info('ProcessResponses')
        processing.process_responses(logger)


tasks.register(ProcessResponses)
Example #30
0
from tendenci.apps.site_settings.utils import get_setting
from tendenci.apps.wp_exporter.models import XMLExport
from tendenci.apps.wp_exporter.utils import gen_xml

class WPExportTask(Task):

    def run(self, form, user, **kwargs):
        xml = gen_xml(form.cleaned_data)
        file_content = ContentFile(xml.content.encode(encoding='UTF-8',errors='strict'))

        export = XMLExport()
        export.author = user
        export.xml.save('export.xml', file_content, save=True)

        if user.email:
            context_instance = {
                'SITE_GLOBAL_SITEDISPLAYNAME': get_setting('site', 'global', 'sitedisplayname'),
                'SITE_GLOBAL_SITEURL': get_setting('site', 'global', 'siteurl'),
                'export': export,
            }
            subject = ''.join(render_to_string(('notification/wp_export/short.txt'), context_instance).splitlines())
            body = render_to_string(('notification/wp_export/full.html'), context_instance)

            #send_mail(subject, body, settings.DEFAULT_FROM_EMAIL, [user.email], fail_silently=False)
            email = EmailMessage(subject, body, settings.DEFAULT_FROM_EMAIL, [user.email])
            email.content_subtype = 'html'
            email.send(fail_silently=True)

tasks.register(WPExportTask)
Example #31
0
try:
    from celery.task import PeriodicTask
    from celery.registry import tasks
except ImportError:
    PeriodicTask = object
    tasks = None

from datetime import timedelta
import logging
from .models import Answer
from . import settings as local_settings

logger = logging.getLogger('crowdsourcing.tasks')


class SyncFlickr(PeriodicTask):
    run_every = timedelta(minutes=5)

    def run(self, *args, **kwargs):
        logger.debug("Syncing flickr")
        Answer.sync_to_flickr()


if tasks and not local_settings.SYNCHRONOUS_FLICKR_UPLOAD:
    tasks.register(SyncFlickr)
Example #32
0
from datetime import datetime
from celery.task import Task
from celery.registry import tasks
from tendenci.apps.locations.importer.tasks import ImportLocationsTask

tasks.register(ImportLocationsTask)
        text, user.phone_number, user.name)).save()


class ContentProducerTask(Task):
    def run(self, content_producer_pk):
        content = ContentProducer.objects.get(pk=content_producer_pk)
        self.text = content.text_body
        conditions = content.conditions.all()
        end_execution = datetime.now() + timedelta(seconds=15)

        users = SimpleUser.objects.all()
        # Build the query with the given conditions
        for condition in conditions:
            filters = Q(**{condition.field: condition.value})
            users = users.filter(filters)

        jobs = TaskSet(tasks=[
            send_sms.subtask((
                content.text_body,
                user,
            ),
                             expires=end_execution) for user in users
        ])
        jobs.apply_async()

        # Check for time limit exceeded
        # Then, revoke evey task waiting for execution


tasks.register(ContentProducerTask)
Example #34
0
class ScaleScheduler(PeriodicTask):

    run_every = timedelta(seconds=5)  # Used for tick frequency

    def is_due(self, last_run_at):
        for app in Application.objects.exclude(
                to_be_run_at__gt=datetime.now()):
            app.to_be_run_at = datetime.now() + timedelta(
                seconds=app.scale_interarrival)
            app.save()
            Scale.delay(app)
        return (False,
                self.timedelta_seconds(ScaleScheduler.run_every.run_every))


tasks.register(ScaleScheduler)


class Scale(Task):
    def run(self, app):
        # Create an instance of the logger
        log = get_logger('vdi')

        # Create the cluster object to help us manage the cluster
        cluster = AppCluster(app.pk)

        # Clean up all idle users on all nodes for this application cluster
        log.debug('APP NAME %s' % app.name)
        cluster.logout_idle_users()

        log.debug("Checking for active clusters")
Example #35
0
from django_wikinetwork import settings


class AnalyseTask(Task):
    def run(self, lang, options):
        import os
        from glob import glob

        logger = self.get_logger()
        logger.info("Running: %s" % (lang, ))

        files = '%s/%swiki-*_rich.pickle' % (
            settings.DATASET_PATH,
            lang,
        )

        # find the most recent file
        fn = sorted(glob(files))[-1]
        logger.info("Running: %s, filename: %s" % (lang, fn))

        cmd = "/sra0/sra/setti/Source/wiki-network/analysis.py --as-table --group %s %s" % (
            ' '.join(options), fn)
        logger.info(cmd)

        p = Popen(cmd, shell=True, stderr=PIPE)

        return fn


tasks.register(AnalyseTask)
                xml_content = xml_response.read()

                #
                docket_xml_filepath = "%s%s" % (XML_DOWNLOAD_FOLDER_PATH,
                                                docket_filename)
                with open(docket_xml_filepath, 'w') as docket_file:
                    docket_file.write(xml_content)

                with open(result_csv_filename, 'a') as result_csv_file:
                    result_writer = csv.writer(result_csv_file,
                                               delimiter=',',
                                               quotechar='"')
                    result_writer.writerow([counter, docket_filename])

        # Renaming the file for status COMPLETE
        os.rename(result_csv_filename, completed_csv_filename)

    return


tasks.register(downloader)


def main():
    args = get_args()
    downloader.delay(recap_download_csv_file_name=args.docket_csv)


if __name__ == '__main__':
    main()
Example #37
0
        if response.status != 200 or response.reason != 'OK':
            raise EventTracker.FailedEventRequest(
                "The tracking request failed. "
                "Non-200 response code was: "
                "[%s] reason: [%s]" % (response.status, response.reason)
            )

        # Successful requests will generate a log
        response_data = response.read()
        if response_data != '1':
            return False

        return True

tasks.register(EventTracker)


class PeopleTracker(EventTracker):
    endpoint = mp_settings.MIXPANEL_PEOPLE_ENDPOINT
    event_map = {
        'set': '$set',
        'add': '$increment',
        'track_charge': '$append',
    }

    def run(
        self, event_name, properties=None, token=None, test=None, **kwargs
    ):
        """
        Track an People event occurrence to mixpanel through the API.
Example #38
0
        execute the check.

        Return tuple of (RESULT, MSG).
        MSG is empty for UP states.
        """

        plugin = get_cls_by_name(plugin_cls_name)()
        plugin.set_logger(logger or get_task_logger('django'))

        msg = None
        try:
            msg = plugin.run_check(settings)
        except PluginImplementationError as e:
            return (Service.STATE_UNKNOWN, e.message)
        except PluginConfigurationError as e:
            return (Service.STATE_UNKNOWN, "Plugin {} is misconfigured: {}".format(
                plugin_cls_name, e.message))
        except ServiceCheckFailed as e:
            return (Service.STATE_UNKNOWN, "Service check failed: " + e.reason)
        except ServiceIsDown as e:
            return (Service.STATE_DOWN, e.message)
        except ServiceHasWarning as e:
            return (Service.STATE_WARNING, e.message)
        except Exception as e:
            return (Service.STATE_UNKNOWN, 'Unknown exception: {}: {}'.format(
                e.__class__, e))   

        return (Service.STATE_OK, msg or '')

tasks.register(ArgusChecker)
Example #39
0
@task()
def fetch_rate(symbol_from, symbol_to):
    logger.info('Requesting currency rate from %s to %s ', symbol_from,
                symbol_to)
    try:
        ratehandler = urlopen(RATE_URL % (symbol_from, symbol_to))
        new_rate = Decimal(ratehandler.read().rstrip())
        ratehandler.close()

        cache.set('%s%s' % (symbol_from, symbol_to), new_rate,
                  CURRENCY_RATE_VALIDITY)

        try:
            rate = CurrencyRate.objects.get(source=symbol_from,
                                            destination=symbol_to)
        except ObjectDoesNotExist:
            rate = CurrencyRate(source=symbol_from, destination=symbol_to)

        rate.rate = new_rate
        rate.last_update = datetime.utcnow()
        rate.save()
        return new_rate
    except URLError:
        logger.error('Failed to get currency rate from %s to %s', symbol_from,
                     symbol_to)
        raise MoneypitException('Failed to get currency')


tasks.register(fetch_rate)
Example #40
0
from celery.contrib.abortable import Task
from celery.registry import tasks
from sample.scripts.stream import StreamManage
from pprint import pprint

class StreamTask(Task):
	
	sm = None
	th = None
	
	def __init__(self):
		pass
		
	def run(self, coords, **kwargs):
		try:
			print "StreamTask ::: Stream already instantiated, %s" % coords['stop']
			try:
				self.th = self.sm.stream(coords, obj = self.th)
			except AttributeError, e:
				print "StreamTask ::: the asshole is not an instance %s" % e
				self.retry(args = [coords], exc=e, countdown=30, kwargs=kwargs)
		except KeyError, e:				
			print "StreamTask ::: Stream instantiated, for exception %s" % e
			self.sm = StreamManage()
			self.th = self.sm.stream(coords)
		print "From sample.task  Task is %s" % self.request.id + " the returned result from script called is %s" % self.th + " instance of Straming is %s" % self.sm
		return 'task executed'
		
tasks.register(StreamTask)
Example #41
0
            url = Response.generateMediaURL(photo)
            try:
                # valid = image_validator.validate(tag, url)
                # if valid == 1:
                # 	logger.info(database.write_item("Pictures", "url", tag=tag, id=id, url=url))
                # else:
                # 	logger.debug("HERE " + str(valid))
                # 	raise
                database.write_item("Pictures", "url", tag=tag, id=id, url=url)
            except:
                continue
            id += 1
        logger.info("All images for %s is stored to database" % tag)


tasks.register(StoreAllPhotos)

# Entire file needs to be refactored


# task object: stores all variables and methods which are purpose-specific
# Template - to be further implemented with template functions
class Task(object):
    def __init__(self):
        logger.info("Task created")

    def Run():
        logger.info("Start task run")


# Refactor to remove all static methods
Example #42
0
class SendTestRabbitMQ2(Task):
    def run(self, N, args):
        return


class SendResponse(Task):
    def run(self):
        with Connection('amqp://*****:*****@localhost:5672//') as conn:
            try:
                worker = Worker(conn)
                worker.run()
            except KeyboardInterrupt:
                print('bye bye')


@task
def Deliver(self, args):
    print(args)


tasks.register(Deliver)
tasks.register(CreateDeviceTask)
tasks.register(UpdatePersonalAccount)
tasks.register(SendChangePersonalAccountReport)
tasks.register(SendGetPayBillReport)
tasks.register(SendAuthReport)
tasks.register(SendAuthReportChecker)
tasks.register(SendTestRabbitMQ)
tasks.register(SendResponse)
Example #43
0
                                                            ", ".join(errors)),
                        state="ERROR")

        errors = []
        if not repo_utils.rebalance_sync_schedule(errors):
            for error in errors:
                self.update(error, state="ERROR")

        if self.errors:
            raise TaskExecutionError("Created %s (%s), but encountered errors: "
                                     "%s" %
                                     (name, repo_id, self.errors))
        else:
            return "Successfully created repo %s" % repo['name']

tasks.register(CreateRepo)


class CloneRepo(TrackedTask):
    def run(self, clone_id, name=None, parent=None, groups=None, filters=None):
        if groups is None:
            groups = []
        if filters is None:
            filters = []

        repoapi = RepositoryAPI()

        try:
            repoapi.clone(parent['id'], clone_id, name,
                          relative_path=clone_id)
            self.update("Cloned %s to %s" % (parent['id'], clone_id))
Example #44
0
    def run(self, dry_run=False, reset=False):
        logger = self.get_logger()
        logger.info("Starting aggregates task")
        update_all_aggregates(dry_run, reset)
        logger.info("Stopping aggregates task")


class DailyAlertsTask(PeriodicTask):
    alert_frequency = 'daily'

    name = 'openrural.dailyalerts'
    run_every = crontab(hour=0, minute=1)

    def run(self):
        logger = self.get_logger()
        logger.info('Starting %s alerts task' % self.alert_frequency)
        send_alerts(['-f', self.alert_frequency])
        logger.info('Stopping %s alerts task' % self.alert_frequency)


class WeeklyAlertsTask(DailyAlertsTask):
    alert_frequency = 'weekly'

    name = 'openrural.weeklyalerts'
    run_every = crontab(hour=1, minute=1, day_of_week='wednesday')


tasks.register(AggregatesTask)
tasks.register(DailyAlertsTask)
tasks.register(WeeklyAlertsTask)
Example #45
0
    name = 'openrural.incidents'
    run_every = timedelta(days=1)

    def run(self, clear=False):
        logger = self.get_logger()
        logger.info("Starting incidents task")
        WhitevilleIncidentsScraper(clear=clear).run()
        logger.info("Stopping incidents task")


class WhitevilleArrestsTask(PeriodicTask):

    name = 'openrural.arrests'
    run_every = timedelta(days=1)

    def run(self, clear=False):
        logger = self.get_logger()
        logger.info("Starting arrests task")
        WhitevilleArrestsScraper(clear=clear).run()
        logger.info("Stopping arrests task")

tasks.register(CorporationsTask)
tasks.register(AddressesTask)
tasks.register(PropertyTransactionsTask)
tasks.register(RestaurantInspectionsTask)
tasks.register(WhitevilleNewsTask)
tasks.register(WhitevilleSchoolsTask)
tasks.register(WhitevilleIncidentsTask)
tasks.register(WhitevilleArrestsTask)
Example #46
0
        
        try:
     

            resultsFile=os.path.join(ANSIBLE_TEMP,str(self.request.id)+'.out').replace('\\','/')
            hostFile=os.path.join(ANSIBLE_TEMP,str(self.request.id)+'.hosts').replace('\\','/')
            f=open(hostFile,'w')
            for ip in hosts:
                f.write(ip+'\n')
            f.close()
            result=open(resultsFile,'w')
            #cmd='ansible -i "{host}" all -m shell -f 100 -a "{cmd}"'.format(host=hostFile,cmd=cmd)
            cmd='{cmd} -i "{host}"'.format(host=hostFile,cmd=cmd)
            print cmd
            p=subprocess.Popen(cmd, shell=True, universal_newlines=True, stdin=subprocess.PIPE,stdout=result, stderr=result)
            p.wait()
            result.close()
            t=AnsibleTask.objects.get(task_id=self.request.id)
           
            t.status='完成'
            t.save()
        except Exception, e:
            t=AnsibleTask.objects.get(task_id=self.request.id)
           
            t.status='错误'
            t.save()

        return "OK"
        
tasks.register(Ansible_Run)
Example #47
0
import requests
import logging

from celery.decorators import task
from celery.registry import tasks
from celery.task import Task

class IssueTask(Task):
	def __init__(self):
		logging.info("My log")

	def run(self, url, method, urlData):
		try:
			if (method == "POST"):
				logging.info(url)
				logging.info("Here to POST!")
				r = requests.post("http://localhost:8000/creative/", data=urlData)
				logging.info(r.status_code)
			else:
				requests.get(url, data=urlData)
		except:
			logging.info("Something gone wrong")

tasks.register(IssueTask)
Example #48
0
    def on_failure(self, exc, task_id, args, kwargs, einfo):
        """
        If the processing failed we should mark that in the database.

        Assuming that the exception raised is a subclass of
        BaseProcessingFail, we can use that to get more information
        about the failure and store that for conveying information to
        users about the failure, etc.
        """
        entry_id = args[0]
        mark_entry_failed(entry_id, exc)

        entry = mgg.database.MediaEntry.query.filter_by(id=entry_id).first()
        json_processing_callback(entry)
        mgg.database.reset_after_request()

    def after_return(self, *args, **kwargs):
        """
        This is called after the task has returned, we should clean up.

        We need to rollback the database to prevent ProgrammingError exceptions
        from being raised.
        """
        # In eager mode we get DetachedInstanceError, we do rollback on_failure
        # to deal with that case though when in eager mode.
        if not celery.app.default_app.conf['CELERY_ALWAYS_EAGER']:
            mgg.database.reset_after_request()


tasks.register(ProcessMedia)
Example #49
0
            ni, created = NewsItem.objects.get_or_create(user=post.user_to,
                                                         post=post)
        elif isinstance(post, SharePost):
            ni, created = NewsItem.objects.get_or_create(user=post.user_to,
                                                         post=post)
        elif isinstance(post, PagePost):
            users = post.page.get_lovers_active()
            for user in users:
                ni, created = NewsItem.objects.get_or_create(user=user,
                                                             post=post)
        else:
            ni, created = NewsItem.objects.get_or_create(user=post.user_to,
                                                         post=post)


tasks.register(UpdateNewsFeeds)


class DeleteNewsFeeds(Task):
    def run(self, post, user=None, **kwargs):
        from models import NewsItem, FriendPost
        #rdb.set_trace()
        post_wrapper = post
        if user:
            users = [user]
        else:
            users = post_wrapper.get_involved()
        for user in users:
            post_news = NewsItem.objects.filter(user=user,
                                                post=post_wrapper.post)
            if post_news:
Example #50
0
from celery.task import Task
from celery.registry import tasks
from django.core import management


class DeleteInactiveUsers(Task):
    def run(self, **kwargs):
        management.call_command('cleanupregistration',
                                verbosity=0,
                                interactive=False)


tasks.register(DeleteInactiveUsers)


class SendReminderToInactiveUsers(Task):
    def run(self, **kwargs):
        management.call_command('send_reminder',
                                verbosity=0,
                                interactive=False)


tasks.register(SendReminderToInactiveUsers)
Example #51
0
from celery.task import Task
from celery.registry import tasks

from stats.models import DataPoint

class DataPointUpdate(Task):
    def run(self, key, value, date, **kwargs):
        point, created = DataPoint.objects.get_or_create(key=key, date_added=date)
        if value > 0:
            point.value += value
        elif value < 0:
            point.value -= abs(value)
        point.save()
tasks.register(DataPointUpdate)
Example #52
0
from celery.registry import tasks
from celery.task import Task
from django.template.loader import render_to_string
from django.utils.html import strip_tags

from django.core.mail import EmailMultiAlternatives


class SignUpTask(Task):
    def product(x, y):
        return x * y


tasks.register(SignUpTask)
Example #53
0
                call_command('compilemessages')
                os.chdir(curr)

            logger.info("handling template")
             # handling the template excepation
            os.chdir("templates")
            # TODO: fix a way for unix platfroms
            os.system ("xcopy /S /Y ..\locale\*.* .\locale")
            call_command('makemessages',all=True)
            os.system ("xcopy /S /Y .\locale\*.* ..\locale")

            logger.info("coming back to curr folder")
            os.chdir(curr)
            call_command('compilemessages')
            '''
        except Exception, e:
            logger.info ( "Exception:  %s" % str( e) )
            raise e
        finally:
             os.chdir(curr)
        return True


tasks.register( findMovieByTitleTask )
tasks.register( findPersonByNameTask )
tasks.register( findShowByIDTask )
tasks.register( fillActorDataTask)
tasks.register( fillShowEpisodesTask )

tasks.register(maintenanceTranslateTask)
tasks.register(maintenanceReIndexTask)
Example #54
0
        html_url += "&news=%s" % form.cleaned_data.get('news')
        html_url += "&news_days=%s" % form.cleaned_data.get('news_days')
        html_url += "&jobs=%s" % form.cleaned_data.get('jobs')
        html_url += "&jobs_days=%s" % form.cleaned_data.get('jobs_days')
        html_url += "&pages=%s" % form.cleaned_data.get('pages')
        html_url += "&pages_days=%s" % form.cleaned_data.get('pages_days')

        if template.zip_file:
            if hasattr(settings, 'USE_S3_STORAGE') and settings.USE_S3_STORAGE:
                zip_url = unicode(template.get_zip_url())
            else:
                zip_url = unicode("%s%s" % (site_url, template.get_zip_url()))
        else:
            zip_url = unicode()

        #sync with campaign monitor
        try:
            t = CST(template_id=template.template_id)
            t.update(unicode(template.name), html_url, zip_url)
        except BadRequest, e:
            messages.add_message(
                request, messages.ERROR,
                'Bad Request %s: %s' % (e.data.Code, e.data.Message))
            return redirect('campaign_monitor.campaign_generate')
        except Exception, e:
            messages.add_message(request, messages.ERROR, 'Error: %s' % e)
            return redirect('campaign_monitor.campaign_generate')


tasks.register(CampaignGenerateTask)
Example #55
0
from celery.registry import tasks
from celery.task import Task



class SignupTask(Task):
    def run(self,user):
        return "manikandan"


tasks.register(SignupTask)
Example #56
0
                msg = EmailMultiAlternatives(subject, text_content, from_email,  [to])
                msg.attach_alternative(html_content, "text/html")
                msg.send()




class SignUpTask(Task):

    def run(self, user):
        """This method is responsible for asynchronously 
           sending welcome email to users when they sign up. 
        
        Keyword agruments:
        user- user object who just signed up

        """
        email = user.email
        user = user
        subject, from_email, to = 'Welcome', '*****@*****.**', email
        html_content = render_to_string('email_signup.html', {'user':user.first_name})
        text_content = strip_tags(html_content)
        # create the email, and attach the HTML version as well.
        msg = EmailMultiAlternatives(subject, text_content, from_email,  [to])
        msg.attach_alternative(html_content, "text/html")
        msg.send()
        

# register the task with celery
tasks.register(SignUpTask)
Example #57
0
                )
                ravenclient.captureException()
            except Profile.DoesNotExist:
                # user must have profile.
                logger.critical(
                    "Caught DoesNotExist exception for {}, user_id{}, \
                    in {}".format(
                        Profile.__name__,
                        user_id, __file__
                    ), exc_info=True
                )
                ravenclient.captureException()
            except Exception:
                logger.critical(
                    "Caught exception in {}".format(__file__),
                    exc_info=True
                )
                ravenclient.captureException()
            return False
        else:
            logger.error(
                "Email type not given, {}, user_id {}".
                format(
                    __file__,
                    user["id"]
                )
            )

# Registered the task
tasks.register(BaseSendMail)