def git_annex_copy_to(repository_path): u"""Envia os conteudos binarios para o repositório remoto.""" # TODO: Next release with dynamic "origin" logger.info("git annex copy --fast --to origin ") cmd = "git annex copy --fast --to origin" pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def handle(self, *args, **options): for repository in args: try: repository_instance = Repository.objects.get(name=repository) except Repository.DoesNotExist: return False logger.debug('Repository sync started ... [sync_repository]') repository_instance.sync_repository() logger.debug('Repository sync ......... [update_mucua_list]') update_mucuas_list(repository_instance) logger.debug('Repository sync .... [create_user_from_files]') create_user_from_files(repository_instance) logger.debug('Repository sync . [create_objects_from_files]') create_objects_from_files(repository_instance) logger.debug('Repository sync ...... [remove_deleted_media]') remove_deleted_media(repository_instance) # Atualiza o arquivo lastSyncMark path = os.path.join(REPOSITORY_DIR, repository_instance.name) output = subprocess.check_output( ["git", "log", "--pretty=format:'%H'", "-n 1"], cwd=path) logger.debug(u"%s: %s" % (_('Revision is'), output)) logger.info('<<<') last_sync_mark = open(os.path.join(path, 'lastSync.txt'), 'w+') last_sync_mark.write(output) last_sync_mark.close()
def git_ls_remote(remote, repository_path): u"""Verifica se um remote esta disponivel.""" logger.info('git ls-remote ' + remote) cmd = 'git ls-remote ' + remote pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) output = pipe.poll() return pipe.returncode
def git_annex_metadata(file_name, repository_path): u"""Visualiza os metadatas do arquivo.""" logger.info("git annex metadata " + file_name + " --json") cmd = "git annex metadata " + file_name + " --json" pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path, stdout=subprocess.PIPE) output, error = pipe.communicate() return output
def check_functional_tags(request, tags): """ verifica se existem tags funcionais para as tags passadas e que tipo são caso verdadeiro, retorna as funcionalidades """ response_data = {} functional_tags_folder = os.path.join(os.path.dirname(__file__), 'functional_tags') tags = tags.split('/') for tag in tags: if os.path.isdir(os.path.join(functional_tags_folder, tag)): if os.path.isfile(os.path.join(functional_tags_folder, tag, tag + '.json')): descriptor = os.path.join(functional_tags_folder, tag, tag + '.json') with open(descriptor) as json_data: content = json.load(json_data) json_data.close() response_data[tag] = {} response_data[tag]['description'] = content['description'] response_data[tag]['code'] = _get_functional_tag(tag) else: response_data[tag] = {} response_data[tag]['error'] = 'Functional tag\'s json descriptor not provided!' else: logger.info('no functionalities for tag: ' + tag) return HttpResponse(json.dumps(response_data), mimetype=u'application/json')
def git_annex_merge(repository_path): u"""Executa o *merge* do repositório, reunindo eventuais diferencias entre o repositório local e remoto.""" logger.info("git annex merge ") cmd = "git annex merge " pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def git_annex_copy_to(repository_path): u"""Envia os conteudos binarios para o repositório remoto.""" # TODO: Next release with dynamic "origin" logger.info('git annex copy --fast --to origin ') cmd = 'git annex copy --fast --to origin' pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def search_tags(request, repository, mucua, args): """ busca tags a partir de uma string / fragmento """ logger.info(mucua) logger.info(args) # hack pra funcionar autocomplete # recebe também urls como /[repo]/[mucua]/tags/search/?q=[args] args_q = request.GET.get('q') if args_q != None: args = args_q # limpa url e pega argumentos args = args.split('/sort')[0] args = args.split('/limit')[0] args = args.split('/') for tag in args: tags = Tag.objects.all().filter(name__contains = tag) if args_q != None: response_data = [] for t in tags: response_data.append(t.name) return HttpResponse(json.dumps(response_data), mimetype=u'application/json') else: serializer = TagSerializer(tags, many=True) return Response(serializer.data)
def git_annex_get(repository_path): u"""Baixa os conteudos binarios desde o repositório remoto.""" # TODO: Next release with possibility to choice what to get logger.info('git annex get .') cmd = 'git annex get .' pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def default_lang(request): response_data = { 'defaultLang': LANGUAGE_CODE } logger.info('default_lang') return HttpResponse(json.dumps(response_data), mimetype=u'application/json')
def get_media_size(instance): u"""Retorna tamanho da mídia""" cmd = "git annex info " + instance.get_file_name() + " --json" pipe = subprocess.Popen(cmd, shell=True, cwd=get_file_path(instance)) output, error = pipe.communicate() try: media_size = json.loads(output)["size"].split(" ") except ValueError: logger.info("Error while trying to get file size.") size_list = { "bytes": "B", "kilobytes": "KB", "megabyte": "MB", "megabytes": "MB", "gigabyte": "GB", "gigabytes": "GB", "terabyte": "TB", "terabytes": "TB", } media_size[1] = size_list[media_size[1]] media_str = media_size[0] + " " + media_size[1] return media_str
def available_langs(request): response_data = { 'availableLangs': LANGUAGES } logger.info('availableLangs') return HttpResponse(json.dumps(response_data), mimetype=u'application/json')
def get_media_size(instance): u"""Retorna tamanho da mídia""" cmd = 'git annex info ' + instance.get_file_name() + ' --json' output = subprocess.check_output(cmd, shell=True, cwd=get_file_path(instance)) try: media_size = json.loads(output)['size'].split(' ') except ValueError: logger.info('Error while trying to get file size.') size_list = { 'bytes': 'B', 'kilobytes': 'KB', 'megabyte': 'MB', 'megabytes': 'MB', 'gigabyte': 'GB', 'gigabytes': 'GB', 'terabyte': 'TB', 'terabytes': 'TB' } media_size[1] = size_list[media_size[1]] media_str = media_size[0] + ' ' + media_size[1] return media_str
def get_media_size(instance): u"""Retorna tamanho da mídia""" cmd = 'git annex info ' + instance.get_file_name() + ' --json' output = subprocess.check_output(cmd, shell=True, cwd=get_file_path(instance)) media_size = ["0", "bytes"] try: media_size = json.loads(output)['size'].split(' ') except ValueError: logger.info('Error while trying to get file size.') except KeyError: logger.info('Key error while trying to get file size.') size_list = {'bytes': 'B', 'kilobytes': 'KB', 'megabyte': 'MB', 'megabytes': 'MB', 'gigabyte': 'GB', 'gigabytes': 'GB', 'terabyte': 'TB', 'terabytes': 'TB' } media_size[1] = size_list[media_size[1]] media_str = media_size[0] + ' ' + media_size[1] return media_str
def media_file_rename(instance, new_file_name): u"""Renomeia o media no repositório.""" logger.info('Media renamed: ' + new_file_name) cmd = 'git mv ' + os.path.basename( instance.media_file.name) + new_file_name pipe = subprocess.Popen(cmd, shell=True, cwd=get_file_path(instance)) pipe.wait()
def git_annex_merge(repository_path): u"""Executa o *merge* do repositório, reunindo eventuais diferencias entre o repositório local e remoto.""" logger.info('git annex merge ') cmd = 'git annex merge ' pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def git_ls_remote(remote, repository_path): u"""Verifica se um remote esta disponivel.""" logger.info("git ls-remote " + remote) cmd = "git ls-remote " + remote pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) output = pipe.poll() return pipe.returncode
def authenticate(self, username=None, password=None): match = re.findall("(.*)@(.*)\.(.*)\.(.*)$", username) if match: (current_mocambola, current_mucua, current_repository, term) = match[0] # verifica se mucua e repositorio sao validos try: current_mucua = Mucua.objects.get(description=current_mucua) except Mucua.DoesNotExist: return None try: current_repository = Repository.objects.get(name=current_repository) except Repository.DoesNotExist: return None else: print "invalid address" return None # Get file from MOCAMBOLA_DIR mocambola_path = os.path.join(str(REPOSITORY_DIR), str(current_repository), str(current_mucua), MOCAMBOLA_DIR) print "Mocambola Path: " + mocambola_path for jmocambola in os.listdir(mocambola_path): if jmocambola == username + ".json": # Deserialize the customized User object mocambola_json_file = open(os.path.join(mocambola_path, jmocambola)) data = JSONParser().parse(mocambola_json_file) u = User() serializer = UserSerializer(u, data=data) if serializer.errors: logger.debug(u"%s %s" % (_("Error deserialing"), serializer.errors)) serializer.is_valid() current_user = serializer.object login_valid = username == current_user.username pwd_valid = check_password(password, current_user.password) if login_valid and pwd_valid: logger.info(u"%s %s %s" % (_("User"), current_mocambola, _("logged in"))) try: user = User.objects.get(username=username) except User.DoesNotExist: logger.debug(u"%s" % (_("Exception caught, UserDoesNotExist"))) # Create a new user. Note that we can set password # to anything, because it won't be checked; the # password from settings.py will. user = User( username=username, password=current_user.password, is_staff=current_user.is_staff, is_superuser=current_user.is_superuser, ) user.save() return user else: logger.info(u"%s %s %s" % (_("User"), current_mocambola, _("doesn't exist or password is wrong!"))) return None return True # fim do if # fim do for return None
def git_commit(file_title, author_name, author_email, repository_path): u"""Executa o *commit* no repositório impostando os dados do author.""" logger.info('git commit --author="' + author_name + ' <' + author_email + '>" -m "' + file_title + '"') cmd = ('git commit --author="' + author_name + ' <' + author_email + '>" -m "' + file_title + '"') pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def git_annex_metadata_del(file_name, repository_path, key, value): u"""Remove um metadata do arquivo.""" cmd = 'git annex metadata ' + file_name + ' -s ' + key + '-=' + "'" + value + "'" logger.info('Removing metadata with: ' + cmd) pipe = subprocess.Popen(cmd.encode('UTF-8'), shell=True, cwd=repository_path) pipe.wait()
def git_commit(file_title, author_name, author_email, repository_path, file_path): u"""Executa o *commit* no repositório impostando os dados do author.""" logger.info('git commit --author="' + author_name + ' <' + author_email + '>" -m "' + file_title + '"') cmd = ('git commit --author="' + author_name + ' <' + author_email + '>" -m "' + file_title + '" -- ' + file_path) pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def git_annex_drop(media): u"""Mostra quais mucuas tem copia do media.""" cmd = "git annex drop --force " + os.path.basename(media.media_file.name) logger.debug("Dropping filepath: " + get_file_path(media) + media.get_file_name()) pipe = subprocess.Popen(cmd, shell=True, cwd=get_file_path(media), stdout=subprocess.PIPE) output, error = pipe.communicate() logger.debug(error) logger.info(output) return output
def git_get_SHA(repository_path): u"""Resgata o codigo identificativo (SHA) da ultima revisao do repositório, retorna o codigo.""" logger.info("git rev-parse HEAD") cmd = "git rev-parse HEAD" pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) output, error = pipe.communicate() logger.debug(">>> Revision is: " + output) return output
def git_get_SHA(repository_path): u"""Resgata o codigo identificativo (SHA) da ultima revisao do repositório, retorna o codigo.""" logger.info('git rev-parse HEAD') cmd = 'git rev-parse HEAD' pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) output, error = pipe.communicate() logger.debug('>>> Revision is: ' + output) return output
def git_annex_metadata(file_name, repository_path): u"""Visualiza os metadatas do arquivo.""" logger.info('git annex metadata ' + file_name + ' --json') cmd = 'git annex metadata ' + file_name + ' --json' pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path, stdout=subprocess.PIPE) output, error = pipe.communicate() return output
def git_annex_get(repository_path, media_path): u""" Baixa os conteudos binarios desde o repositório remoto. Retorna o output do git annex get. """ # TODO: Next release with possibility to choice what to get cmd = 'git annex get ' + media_path logger.info(cmd) pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) output, error = pipe.communicate()
def get_media_type_by_filename(file_path): try: mime = magic.from_file(file_path, mime=True) except IOError: logger.info('Error while reading uploaded file.') if mime in VALID_MIMETYPES: return VALID_MIMETYPES[mime] else: logger.debug('Mime: ' + mime) logger.info('Mime type not accepted.') return False
def get_media_type_by_filename(file_path): try: mime = magic.from_file(file_path, mime=True) except IOError: logger.info('Error while reading uploaded file.') if mime in VALID_MIMETYPES: return VALID_MIMETYPES[mime] else: logger.debug('Mime: '+ mime) logger.info('Mime type not accepted.') return False
def git_annex_status(repository_path): u"""View all mucuas in a given repository""" logger.info('git annex info/status') # a partir da versao 5 if (float(git_annex_version()) <= 5): cmd = 'git annex status --json' else: cmd = 'git annex info --json' pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path, stdout=subprocess.PIPE) return pipe.stdout.read()
def git_annex_drop(media): u"""Mostra quais mucuas tem copia do media.""" cmd = 'git annex drop --force ' + os.path.basename(media.media_file.name) logger.debug('Dropping filepath: ' + get_file_path(media) + media.get_file_name()) pipe = subprocess.Popen(cmd, shell=True, cwd=get_file_path(media), stdout=subprocess.PIPE) output, error = pipe.communicate() logger.debug(error) logger.info(output) return output
def update_mucuas_list(repository): u"""Atualiza a lista de mucuas disponivéis no repositório""" mucuas = get_available_mucuas(None, repository) for mucua in mucuas: mucua_description = str(mucua[1].split(' ')[0]) mucua_uuid = str(mucua[0]) try: mucua = Mucua.objects.get(uuid=mucua_uuid) logger.info("Vi a mucua " + mucua_description + ", UUID: " + mucua_uuid) except Mucua.DoesNotExist: m = Mucua(description=mucua_description, uuid=mucua_uuid) m.save() logger.info("Criei a mucua " + mucua_description + ", UUID: " + mucua_uuid)
def get_latest_media(repository=DEFAULT_REPOSITORY): u"""Retorna uma lista de caminhos dos novos medias no repositório, desde a ultima sincronização (last_sync).""" try: current_repository = Repository.objects.get(name=repository) except Repository.DoesNotExist: return [] try: last_sync_mark = open( os.path.join(repository_dir, current_repository.name, 'lastSync.txt'), 'r+') last_sync = last_sync_mark.readline() last_sync = last_sync.replace("'", "") print "Alterações a partir do commit: " + last_sync except IOError: cwd = os.path.join(repository_dir, current_repository.name) p1 = subprocess.Popen(['git', 'rev-list', 'HEAD'], cwd=cwd, stdout=PIPE) p2 = subprocess.Popen(['tail', '-n 1'], stdin=p1.stdout, stdout=PIPE) output, error = p2.communicate() last_sync = output.rstrip() # Este é um exemplo do comando para pegar os ultimos medias desde # last_sync # cmd = 'git diff --pretty="format:" --name-only ' + # last_sync + 'HEAD' \ + '| sort | uniq | grep json | grep -v # mocambolas' cwd = os.path.join(repository_dir, current_repository.name) p1 = subprocess.Popen([ 'git', 'log', '--diff-filter=AM', '--pretty=format:', '--name-only', last_sync + '..HEAD' ], cwd=cwd, stdout=PIPE) p2 = subprocess.Popen(["sort"], stdin=p1.stdout, stdout=PIPE) p3 = subprocess.Popen(["uniq"], stdin=p2.stdout, stdout=PIPE) p4 = subprocess.Popen(["grep", "json"], stdin=p3.stdout, stdout=PIPE) p5 = subprocess.Popen(["grep", "-v", "mocambola"], stdin=p4.stdout, stdout=PIPE) output, error = p5.communicate() updated = [line.strip('\n') for line in output.splitlines()] logger.info("Updated:\n%s", updated) deleted = [line.strip('\n') for line in get_deleted_media()] logger.info("Deleted:\n%s", deleted) return list(set(updated) - set(deleted))
def handle(self, *args, **options): PORT = 50505 MAGIC = "bbx-discover" s = socket(AF_INET, SOCK_DGRAM) #create UDP socket s.bind(('', 0)) s.setsockopt(SOL_SOCKET, SO_BROADCAST, 1) #this is a broadcast socket mucua = Mucua.objects.get(description=DEFAULT_MUCUA) mucua_uri = mucua.description + '|' + 'ssh://' + DEFAULT_IP + '/data/bbx/repositories/mocambos' while 1: data = MAGIC + mucua_uri s.sendto(data, ('<broadcast>', PORT)) logger.info('Enviado anuncio da mucua!') sleep(1)
def handle(self, *args, **options): PORT = 50505 MAGIC = "bbx-discover" s = socket(AF_INET, SOCK_DGRAM) #create UDP socket s.bind(('', 0)) s.setsockopt(SOL_SOCKET, SO_BROADCAST, 1) #this is a broadcast socket mucua = Mucua.objects.get(description=DEFAULT_MUCUA) mucua_uri= mucua.description + '|' + 'ssh://' + DEFAULT_IP + '/data/bbx/repositories/mocambos' while 1: data = MAGIC + mucua_uri s.sendto(data, ('<broadcast>', PORT)) logger.info('Enviado anuncio da mucua!') sleep(1)
def request_copy(self, save=True): u""" Gera um pedido de copia local do media Os pedidos tem um codigo uuid e são gravados em /repository/mucua/requests/uuid O arquivo atualmente contem somente o caminho para o media no repositorio. """ self.set_is_local() if not self.is_local: self.is_requested = True if save: self.save() try: requests_path = os.path.join(REPOSITORY_DIR, self.get_repository(), DEFAULT_MUCUA, 'requests') if not os.path.exists(requests_path): os.makedirs(requests_path) request_filename = os.path.join(requests_path, self.uuid) logger.info("REQUESTING: " + request_filename) request_file = open(request_filename, 'a') request_file.write(self.media_file.path) request_file.close # TODO: Need to git add logger.debug("ADDING REQUEST: " + os.path.basename(request_filename)) logger.debug("ADDED ON: " + os.path.dirname(request_filename)) from repository.models import git_add git_add(os.path.basename(request_filename), os.path.dirname(request_filename)) except IOError: logger.info(u'Alo! I can\'t write request file!') logger.debug("get_file_path: " + get_file_path(self)) logger.debug("media_file.name: " + os.path.basename(self.media_file.name)) async_result = git_annex_get.delay( get_file_path(self), os.path.basename(self.media_file.name))
def get_latest_media(repository=DEFAULT_REPOSITORY): u"""Retorna uma lista de caminhos dos novos medias no repositório, desde a ultima sincronização (last_sync).""" try: current_repository = Repository.objects.get( name=repository) except Repository.DoesNotExist: return [] try: last_sync_mark = open( os.path.join(repository_dir, current_repository.name, 'lastSync.txt'), 'r+') last_sync = last_sync_mark.readline() last_sync = last_sync.replace("'", "") print "Alterações a partir do commit: " + last_sync except IOError: cwd = os.path.join(repository_dir, current_repository.name) p1 = subprocess.Popen(['git', 'rev-list', 'HEAD'], cwd=cwd, stdout=PIPE) p2 = subprocess.Popen(['tail', '-n 1'], stdin=p1.stdout, stdout=PIPE) output, error = p2.communicate() last_sync = output.rstrip() # Este é um exemplo do comando para pegar os ultimos medias desde # last_sync # cmd = 'git diff --pretty="format:" --name-only ' + # last_sync + 'HEAD' \ + '| sort | uniq | grep json | grep -v # mocambolas' cwd = os.path.join(repository_dir, current_repository.name) p1 = subprocess.Popen( ['git', 'log', '--diff-filter=AM', '--pretty=format:', '--name-only', last_sync + '..HEAD'], cwd=cwd, stdout=PIPE ) p2 = subprocess.Popen(["sort"], stdin=p1.stdout, stdout=PIPE) p3 = subprocess.Popen(["uniq"], stdin=p2.stdout, stdout=PIPE) p4 = subprocess.Popen(["grep", "json"], stdin=p3.stdout, stdout=PIPE) p5 = subprocess.Popen(["grep", "-v", "mocambola"], stdin=p4.stdout, stdout=PIPE) output, error = p5.communicate() updated = [line.strip('\n') for line in output.splitlines()] logger.info("Updated:\n%s", updated) deleted = [line.strip('\n') for line in get_deleted_media()] logger.info("Deleted:\n%s", deleted) return list(set(updated) - set(deleted))
def handle(self, *args, **options): if len(args) == 0: medias = Media.objects.filter(is_local=False) logger.info("Atualizando todos as copias locais dos medias") else: since = int(args[0]) some_day_ago = timezone.now().date() - timedelta(days=since) logger.info("Atualizando os medias desde o dia: " + str(some_day_ago)) medias = Media.objects.filter(date__gte=some_day_ago) for media in medias: try: # Synchronize/update tags. # # 1) Add all tags found in the git-annex metadata and not # already present on the media. # 2) If tags from other mucuas have been deleted (are missing in # the git_annex metadata tags), remove them from this media. tags_on_media = set(git_annex_list_tags(media)) existing_tags = set( (t.namespace, t.name) for t in media.tags.all()) # Add new tags to media for t in tags_on_media - existing_tags: # Add tag - search for existing, if none found create new tag. namespace, name = t try: tag = Tag.objects.get(name=unicode(name), namespace=unicode(namespace)) except Tag.DoesNotExist: tag = Tag(name=name, namespace=namespace) tag.save() media.tags.add(tag) # Remove tags that were removed on remote media for t in existing_tags - tags_on_media: namespace, name = t tag = Tag.objects.get(name=name, namespace=namespace) media.tags.remove(tag) media.save(is_syncing=True) except OSError, e: logger.debug('Requested media not found: ' + media.name)
def handle(self, *args, **options): if len(args) == 0: medias = Media.objects.filter(is_local=False) logger.info("Atualizando todos as copias locais dos medias") else: since = int(args[0]) some_day_ago = timezone.now().date() - timedelta(days=since) logger.info("Atualizando os medias desde o dia: " + str(some_day_ago)) medias = Media.objects.filter(date__gte=some_day_ago) for media in medias: try: # Synchronize/update tags. # # 1) Add all tags found in the git-annex metadata and not # already present on the media. # 2) If tags from other mucuas have been deleted (are missing in # the git_annex metadata tags), remove them from this media. tags_on_media = set(git_annex_list_tags(media)) existing_tags = set((t.namespace, t.name) for t in media.tags.all()) # Add new tags to media for t in tags_on_media - existing_tags: # Add tag - search for existing, if none found create new tag. namespace, name = t try: tag = Tag.objects.get(name=unicode(name), namespace=unicode(namespace)) except Tag.DoesNotExist: tag = Tag(name=name, namespace=namespace) tag.save() media.tags.add(tag) # Remove tags that were removed on remote media for t in existing_tags - tags_on_media: namespace, name = t tag = Tag.objects.get(name=name, namespace=namespace) media.tags.remove(tag) media.save(is_syncing=True) except OSError, e: logger.debug('Requested media not found: ' + media.name)
def request_copy(self, save=True): u""" Gera um pedido de copia local do media Os pedidos tem um codigo uuid e são gravados em /repository/mucua/requests/uuid O arquivo atualmente contem somente o caminho para o media no repositorio. """ self.set_is_local() if not self.is_local: self.is_requested = True if save: self.save() try: requests_path = os.path.join(REPOSITORY_DIR, self.get_repository(), DEFAULT_MUCUA, 'requests') if not os.path.exists(requests_path): os.makedirs(requests_path) request_filename = os.path.join(requests_path, self.uuid) logger.info("REQUESTING: " + request_filename) request_file = open(request_filename, 'a') request_file.write(self.media_file.path) request_file.close # TODO: Need to git add logger.debug("ADDING REQUEST: " + os.path.basename(request_filename)) logger.debug("ADDED ON: " + os.path.dirname(request_filename)) from repository.models import git_add git_add(os.path.basename(request_filename), os.path.dirname(request_filename)) except IOError: logger.info(u'Alo! I can\'t write request file!') logger.debug("get_file_path: " + get_file_path(self)) logger.debug("media_file.name: " + os.path.basename(self.media_file.name)) async_result = git_annex_get.delay(get_file_path(self), os.path.basename(self.media_file.name))
def change_interface_lang(request): logger.info('change default lang') new_lang = request.POST.get('new_lang').encode('utf-8') current_lang = request.POST.get('current_lang').encode('utf-8') response_data = { 'new_lang' : new_lang } logger.info(current_lang) logger.info(new_lang) # change default language at bbx application bbx_config_file = os.path.join(os.getcwd(), 'bbx/static/js/config.js') current_lang = current_lang.encode('utf-8') new_lang = new_lang.encode('utf-8') lines = [] with open(bbx_config_file) as infile: for line in infile: line = line.replace(current_lang, new_lang) lines.append(line) with open(bbx_config_file, 'w') as outfile: for line in lines: outfile.write(line) # precisa rodar update_templates # precisa rodar collectstatic from django.core.management import call_command call_command('update_templates', new_lang, interactive=False) call_command('collectstatic', interactive=False) return HttpResponse(json.dumps(response_data), mimetype=u'application/json')
def change_interface_lang(request): logger.info('change default lang') new_lang = request.POST.get('new_lang').encode('utf-8') current_lang = request.POST.get('current_lang').encode('utf-8') response_data = {'new_lang': new_lang} logger.info(current_lang) logger.info(new_lang) # change default language at bbx application bbx_config_file = os.path.join(os.getcwd(), 'bbx/static/js/config.js') current_lang = current_lang.encode('utf-8') new_lang = new_lang.encode('utf-8') lines = [] with open(bbx_config_file) as infile: for line in infile: line = line.replace(current_lang, new_lang) lines.append(line) with open(bbx_config_file, 'w') as outfile: for line in lines: outfile.write(line) # precisa rodar update_templates # precisa rodar collectstatic from django.core.management import call_command call_command('update_templates', new_lang, interactive=False) call_command('collectstatic', interactive=False) return HttpResponse(json.dumps(response_data), mimetype=u'application/json')
def _get_functional_tag(tag): code = {} functional_tags_folder = os.path.join(os.path.dirname(__file__), 'functional_tags'); IGNORE_FILES = ('README') code_files = [] # lista todos os arquivos de codigo da pasta # TODO: tirar o 'interface' hard interface_folder = os.path.join(functional_tags_folder, tag, 'interface') for filename in os.listdir(interface_folder): if os.path.isfile(os.path.join(interface_folder,filename)) and filename not in IGNORE_FILES: name = filename.rsplit('.', 1)[0] with open(os.path.join(interface_folder, filename), 'r') as f: main_data = f.read() f.closed logger.info(main_data) code[name] = main_data return code
def handle_uploaded_image(instance, image_path, current_path): """Limita arquivos de imagem ao tamanho máximo padrão""" filename = os.path.join(get_file_path(instance), instance.get_file_name()) image = Image.open(image_path) width, height = image.size # if exceeds max size, resize it if width > IMAGE_MAX_SIZE or height > IMAGE_MAX_SIZE: # fix: create folder for thumbnail file_folder = '/'.join(filename.rsplit('/')[0:-1]) if not os.path.exists(file_folder): logger.info('creating folder: ' + file_folder) os.makedirs(file_folder) try: image.thumbnail((IMAGE_MAX_SIZE, IMAGE_MAX_SIZE), Image.ANTIALIAS) image.save(filename) return filename except IOError: logger.info('Handle_Uploaded_Image: Error while reading uploaded file.') return False else: return current_path
def git_annex_add(file_name, repository_path): u"""Adiciona um arquivo no repositório *git-annex*.""" logger.info('git annex add ' + file_name) cmd = 'git annex add ' + file_name pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def git_annex_sync(repository_path): u"""Sincroniza o repositório com os outros clones remotos.""" logger.info('git annex sync') cmd = 'git annex sync' pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def remove_deleted_media(repository=DEFAULT_REPOSITORY): """Remove os midias no Django a partir do log do git.""" try: repository = Repository.objects.get(name=repository) except Repository.DoesNotExist: return None logger.info(u">>> %s" % _("CLEANING")) logger.info(u"%s: %s" % (_("Repository"), repository)) from media.models import Media try: for deleted_media in get_deleted_media(repository): logger.info(u"%s: %s" % (_("Deleting media"), deleted_media)) try: fingerprint = os.path.join(repository_dir, repository.get_name(), os.path.splitext(deleted_media)[0]) logger.info(u"%s: %s" % (_("Fingerprint"), fingerprint)) media = Media.objects.filter(media_file__startswith=fingerprint) media[0].delete() logger.info(u"%s" % _("Media deleted.")) except (Media.DoesNotExist, IndexError): logger.info(u"%s" % _("Media doesn't exist")) except Media.DoesNotExist: logger.info(u"%s" % _("Delete problem"))
def git_annex_sync(repository_path): u"""Sincroniza o repositório com os outros clones remotos.""" logger.info("git annex sync") cmd = "git annex sync" pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def git_annex_metadata_del(file_name, repository_path, key, value): u"""Remove um metadata do arquivo.""" cmd = "git annex metadata " + file_name + " -s " + key + "-=" + "'" + value + "'" logger.info("Removing metadata with: " + cmd) pipe = subprocess.Popen(cmd.encode("UTF-8"), shell=True, cwd=repository_path) pipe.wait()
def git_pull(repository_path): u"""Executa o *pull* do repositório, atualizando o repositório local.""" logger.info('git pull ') cmd = 'git pull ' pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def git_push(repository_path): u"""Executa o *push* do repositório, atualizando o repositório de origem.""" logger.info('git push ') cmd = 'git push ' pipe = subprocess.Popen(cmd, shell=True, cwd=repository_path) pipe.wait()
def media_detail(request, repository, mucua, pk=None, format=None): """ Retrieve, create, update or delete a media instance. """ # TODO: Use object permissions for more fine grained control. # For now, do a more primitive check that the user is authenticated. if request.method != 'GET' and not request.user.is_authenticated(): raise PermissionDenied # pegando sessao por url redirect_page = False if mucua == 'rede': mucua = request.DATA['origin'] try: mucua = Mucua.objects.get(description=mucua) except Mucua.DoesNotExist: mucua = Mucua.objects.get(description=DEFAULT_MUCUA) redirect_page = True try: repository = Repository.objects.get(name=repository) except Repository.DoesNotExist: repository = Repository.objects.get(name=DEFAULT_REPOSITORY) redirect_page = True # redirect if redirect_page: return HttpResponseRedirect(redirect_base_url + repository.name + '/' + mucua.description + '/media/') author = request.user # verifica se tem primary key definido antes de direcionar para os request.methods (get, put, post) if pk: # get media try: media = Media.objects.get(uuid=pk) except Media.DoesNotExist: return Response(status=status.HTTP_404_NOT_FOUND) if request.method == 'GET': # get media if pk == '': """ get media Se pk nao existe, chama token (preparacao para insercao) # TODO: mover? """ media_token(request, repository, mucua) if pk != '': serializer = MediaSerializer(media) return Response(serializer.data) elif request.method == 'PUT': """ update media """ if pk == '': return HttpResponseRedirect( redirect_base_url + repository.name + '/' + mucua.description + '/bbx/search') media.name = request.DATA['name'] media.note = request.DATA['note'] media.type = request.DATA['type'] media.license = request.DATA['license'] media.date = request.DATA['date'] # workaround for date problem file_day = str(media.media_file).split('/')[-2] date_day = str(media.date)[8:10] if file_day != date_day: media.date = media.date[0:8] + file_day + media.date[10:] media.save() if media.id: tags = request.DATA['tags'].split(',') media.tags.clear() add_and_synchronize_tags(media, tags, mucua) return Response(_("updated media - OK"), status=status.HTTP_201_CREATED) else: return Response(_("error while creating media"), status=status.HTTP_400_BAD_REQUEST) if serializer.is_valid(): serializer.save() return Response(serializer.data) else: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) elif request.method == 'POST': """ create a new media """ if request.DATA['author'] != '': author = request.DATA['author'] else: author = request.user try: author = User.objects.get(username=author) except User.DoesNotExist: author = User.objects.get(username=request.user) try: mucua = Mucua.objects.get(description=request.DATA['origin']) except Mucua.DoesNotExist: mucua = Mucua.objects.get(description=DEFAULT_MUCUA) media = Media(repository=repository, origin=mucua, author=author, note=request.DATA['note'], type=request.DATA['type'], license=request.DATA['license'], name=request.DATA.get('name', ''), date=(request.DATA['date'] if request.DATA['date'] != '' else get_now()), uuid=generate_UUID() ) logger.info('processing upload') # multiple upload for filename, file in request.FILES.iteritems(): file_name = request.FILES[filename].name media.format=file_name.split('.')[-1].lower() if media.name == '': media.name = get_media_name_by_filename(file_name) if hasattr(request.FILES[filename], 'temporary_file_path'): # if file bigger than 2.5MB, is stored in /tmp tmp_file = request.FILES[filename].temporary_file_path() else: # if lesser than 2.5MB, is stored on memory tmp_file = '/tmp/' + media.uuid f = open(tmp_file, 'w') f.write(request.FILES[filename].read()) f.close() media.type=get_media_type_by_filename(tmp_file) if media.type == 'imagem': media.media_file=handle_uploaded_image(media, tmp_file,request.FILES[filename]) else: media.media_file=request.FILES[filename] media.save() if media.id: # get tags by list or separated by ',' tags = request.DATA['tags'].split(',') add_and_synchronize_tags(media, tags, mucua) media.save() # salva de novo para chamar o post_save serializer = MediaSerializer(media) return Response(serializer.data, status=status.HTTP_201_CREATED) else: return Response(_("error while creating media"), status=status.HTTP_400_BAD_REQUEST) elif request.method == 'DELETE': media.delete() return Response(status=status.HTTP_204_NO_CONTENT)