def get_rotation_script(request): workspace = request.session.get('workspace') r_script = Pipeline.objects.filter(workspace = workspace, name = 'image renditions').distinct() triggers_name = 'upload' items = request.POST.getlist('items') items = Item.objects.filter(pk__in = items) rotation = request.POST.get('rotation') params = {'adapt_image':{'rotation': int(rotation)}} try: _run_pipelines(items, triggers_name, request.user, workspace, params) except Exception, ex: logger.exception(ex) return HttpResponse(simplejson.dumps({'success': False}))
def get_rotation_script(request): workspace = request.session.get('workspace') r_script = Pipeline.objects.filter(workspace=workspace, name='image renditions').distinct() triggers_name = 'upload' items = request.POST.getlist('items') items = Item.objects.filter(pk__in=items) rotation = request.POST.get('rotation') params = {'adapt_image': {'rotation': int(rotation)}} try: _run_pipelines(items, triggers_name, request.user, workspace, params) except Exception, ex: logger.exception(ex) return HttpResponse(simplejson.dumps({'success': False}))
def get_rotation_script(request): workspace = request.session.get('workspace') # r_script = Pipeline.objects.filter(workspace = workspace, name = 'image renditions').distinct() triggers_name = 'upload' items = request.POST.getlist('items') items = Item.objects.filter(pk__in = items) rotation = request.POST.get('rotation') params = {'adapt_image':{'rotation': int(rotation)}} try: _run_pipelines(items, triggers_name, request.user, workspace, params) # Actually launch all processes added by _run_pipelines() (after # committing the transaction, to make new data visible by the # MProcessor) transaction.commit() processor.run() except Exception, ex: logger.exception(ex) return HttpResponse(simplejson.dumps({'success': False}))
def add_items_to_ws(request): try: item_ids = request.POST.getlist('item_id') try: ws_id = request.POST.get('ws_id') except Exception, err: logger.debug('request POST get ws_id err: %s' % err) ws = Workspace.objects.get(pk = ws_id) user = request.user current_ws = request.session['workspace'] remove = request.POST.get('remove') move_public = request.POST.get('move_public', 'false') items = Item.objects.filter(pk__in = item_ids) item_imported = [] run_items = [] for item in items: if _add_items_to_ws(item, ws, current_ws, remove): run_items.append(item) _run_pipelines(run_items, 'upload', user, ws) if remove == 'true': _remove_items(request, current_ws, items) #if len(item_imported) > 0: #imported = Node.objects.get(depth = 1, label = 'Imported', type = 'inbox', workspace = ws) #time_imported = time.strftime("%Y-%m-%d", time.gmtime()) #node = Node.objects.get_or_create(label = time_imported, type = 'inbox', parent = imported, workspace = ws, depth = 2)[0] #node.items.add(*item_imported) resp = simplejson.dumps({'success': True, 'errors': []}) # Actually launch all processes added by _run_pipelines() (after # committing the transaction, to make new data visible by the # MProcessor) transaction.commit() processor.run() return HttpResponse(resp)
def execpipes(self, trigger, filepaths): items = _create_items(filepaths, 'original', self.user, self.ws) ret = _run_pipelines(items, trigger, self.user, self.ws) print('Executed processes %s' % ' '.join(ret))
def runmany(self, times, trigger, pk): items = [Item.objects.get(pk=pk) for x in xrange(int(times))] ret = _run_pipelines(items, trigger, self.user, self.ws) print('Executed process %s' % ' '.join(ret))