Beispiel #1
0
def upload_session_finished(request):
    try:
        from treeview.models import Node
        session = request.POST['session']
        workspace = request.session.get('workspace')
        user = User.objects.get(pk = request.session['_auth_user_id'])
        
        tmp_dir = '/tmp/'+ session
        logger.debug('tmp_dir %s'%tmp_dir)


        inbox_label = None
        if os.path.exists(tmp_dir):
            items_deleted ,processes = import_dir(tmp_dir, user, workspace)

            uploaded = workspace.tree_nodes.get(depth = 1, label = 'Uploaded', type = 'inbox')
            try:
                logger.error('processi : %s' % len(processes))
                if len(processes) > 0:
                    inbox = Node.objects.get(parent = uploaded, items = processes[0].processtarget_set.all()[0].target_id)
                
                    logger.debug('----------------------inbox %s'%inbox)
                    inbox_label = inbox.label
                else: 
                    inbox_label = None
            
            except Exception, ex:
                logger.exception(ex)
                inbox_label = None
                #something strange, no inbox found
            

            item_in_progress = 0
            for process in processes:
                item_in_progress += process.processtarget_set.all().count()

            # Actually launch all processes added by import_dir() (after
            # committing the transaction, to make new data visible by the
            # MProcessor)
            transaction.commit()
            processor.run()

            try:
                os.rmdir(tmp_dir)
            except Exception, ex:
                logger.exception(ex)
 
            return HttpResponse(simplejson.dumps({'success': True, 'uploads_success': item_in_progress, 'inbox': inbox_label}))
Beispiel #2
0
def get_rotation_script(request):
    workspace = request.session.get('workspace')
    # r_script = Pipeline.objects.filter(workspace = workspace, name = 'image renditions').distinct()
    triggers_name = 'upload'
    items = request.POST.getlist('items')
    items = Item.objects.filter(pk__in = items)
    rotation =  request.POST.get('rotation')
    params = {'adapt_image':{'rotation': int(rotation)}}
    try:
        _run_pipelines(items, triggers_name, request.user, workspace, params)

        # Actually launch all processes added by _run_pipelines() (after
        # committing the transaction, to make new data visible by the
        # MProcessor)
        transaction.commit()
        processor.run()
    except Exception, ex:
        logger.exception(ex)
        return HttpResponse(simplejson.dumps({'success': False}))
Beispiel #3
0
def add_items_to_ws(request):
    try:
        item_ids = request.POST.getlist('item_id')
        try:
            ws_id = request.POST.get('ws_id')
        except Exception, err:
            logger.debug('request POST get ws_id err:  %s' % err)
            
        
        ws = Workspace.objects.get(pk = ws_id)
        user = request.user
        current_ws = request.session['workspace']
        remove = request.POST.get('remove')
        move_public = request.POST.get('move_public', 'false')
        items = Item.objects.filter(pk__in = item_ids)        
        
        item_imported = []
        run_items = []
        
        for item in items:
            if _add_items_to_ws(item, ws, current_ws, remove):
                run_items.append(item)

        _run_pipelines(run_items, 'upload', user, ws)

        if remove == 'true':
            _remove_items(request, current_ws, items)
                
        #if len(item_imported) > 0:
            #imported = Node.objects.get(depth = 1,  label = 'Imported',  type = 'inbox',  workspace = ws)
            #time_imported = time.strftime("%Y-%m-%d", time.gmtime())
            #node = Node.objects.get_or_create(label = time_imported,  type = 'inbox',  parent = imported,  workspace = ws,  depth = 2)[0]
            #node.items.add(*item_imported)
        
        resp = simplejson.dumps({'success': True, 'errors': []})

        # Actually launch all processes added by _run_pipelines() (after
        # committing the transaction, to make new data visible by the
        # MProcessor)
        transaction.commit()
        processor.run()

        return HttpResponse(resp)
Beispiel #4
0
def upload_archive(request):
    try:
        import tempfile
        
        file_name = unquote(request.META['HTTP_X_FILE_NAME'])
        output_file_path = os.path.join('/tmp/', new_id())
        
        if not isinstance(output_file_path, unicode):
            file_name = unicode(output_file_path, 'utf-8')
        _write_file(request, output_file_path)
        if file_name.endswith('.zip'):
            import zipfile
            archive = zipfile.ZipFile(output_file_path, 'r')
        elif file_name.endswith('.tar') or file_name.endswith('.gz'):
            import tarfile
            archive = tarfile.TarFile(output_file_path, 'r')
        
        
        extracting_dir = tempfile.mkdtemp()
        
        logger.debug('extracting into %s...'%extracting_dir)
        archive.extractall(extracting_dir)
        logger.debug('extracted')
        import_dir(extracting_dir, request.user, request.session['workspace'], make_copy = True, recursive = True, force_generation = False, link = False, remove_orphans=False)
        shutil.rmtree(extracting_dir, True)

        # Actually launch all processes added by import_dir() (after
        # committing the transaction, to make new data visible by the
        # MProcessor)
        transaction.commit()
        processor.run()

        return HttpResponse(simplejson.dumps({'success': True}))

    except Exception, ex:
        transaction.rollback()
        logger.exception(ex)
        raise ex