def get(self, request, *args, **kwargs): if request.method == 'GET': guid = request.GET.get('guid', None) if guid is None: return self.render_to_response(self._results_to_context(('missing guid', False, [], ))) if not guid: return self.render_to_response(self._results_to_context((NO_ERR_RESP, False, [], ))) else: return self.render_to_response(self._results_to_context(('not a get request', False, [], ))) try: address = AddrObj.objects.get(pk=guid) except AddrObj.DoesNotExist: return self.render_to_response(self._results_to_context(('wrong guid', False, [], ))) city = self._get_city_obj(address) if city is None: return self.render_to_response(self._results_to_context((NO_ERR_RESP, False, [], ))) areas = AddrObj.objects.filter(parentguid=city.pk, shortname='р-н') if areas: return self.render_to_response(self._results_to_context(( NO_ERR_RESP, False, ((force_unicode(a.pk), force_unicode(a)) for a in areas), )) ) return self.render_to_response(self._results_to_context((NO_ERR_RESP, False, [], )))
def render(self, name, value, attrs=None): output = [] f_type = 'apk' if 'download_path' in attrs['id'] else 'img' output.append( (r""" <div style="margin-bottom: 20px;" class=""> <button type="button" class="btn btn-primary uploadbtn"> <span class="glyphicon glyphicon-cloud-upload"></span>点击上传</button> <input id="fileupload" type="file" name="%s" class="fileupload" style="display: none;" data-url="/admin/progressbaruploadview/" accept="%s"> <div class="imgbox" style="float:right;margin-left: 10px"></div> <input name="%s" class="valstorage" type="text" style="display: none;"> </div> <div class="modal fade" id="modal-progress" data-backdrop="static" data-keyboard="false"> <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <h4 class="modal-title">上传中...</h4> </div> <div class="modal-body"> <div class="progress"> <div class="progress-bar" role="progressbar" style="width: 0;">0</div> </div> </div> </div> </div> </div> """ % (force_unicode(name), force_unicode( self.file_type_dict[f_type]), force_unicode('id_' + name)))) return mark_safe(u''.join(output))
def __init__(self, model, name, short_description=None, default=""): ChangeListColumn.__init__(self, short_description, None) self.field_name = name try: field, model_, self.direct, self.m2m = model._meta.get_field_by_name(name) except FieldDoesNotExist: descriptor = getattr(model, name) field = descriptor.related self.direct = False self.m2m = True if self.direct: self.field = field self.model = field.model self.opts = self.model._meta if not self.m2m: self.admin_order_field = name else: self.field = field.field self.model = field.parent_model self.opts = field.parent_model._meta if self.short_description is None: if self.direct: self.short_description = force_unicode(field.verbose_name) else: self.short_description = force_unicode(name.replace("_", " ")) self.default = default
def __init__(self, model, name, short_description=None, default=""): ChangeListColumn.__init__(self, short_description, None) self.field_name = name try: field, model_, self.direct, self.m2m = \ model._meta.get_field_by_name(name) except FieldDoesNotExist: descriptor = getattr(model, name) field = descriptor.related self.direct = False self.m2m = True if self.direct: self.field = field self.model = field.model self.opts = self.model._meta if not self.m2m: self.admin_order_field = name else: self.field = field.field self.model = field.parent_model self.opts = field.parent_model._meta if self.short_description is None: if self.direct: self.short_description = force_unicode(field.verbose_name) else: self.short_description = force_unicode(name.replace('_', ' ')) self.default = default
def humanize_datetime(datetime, weekday=True, time=False): u"""datetimeを人間が読みやすいように整形して表示する Example: >>> from datetime import datetime >>> from dateutil.relativedelta import relativedelta >>> TODAY = datetime.today() >>> print humanize_datetime(TODAY, extract_today=False) 今日 >>> # Delay with `seconds=+30` >>> print humanize_datetime(TODAY+relativedelta(hours=+2, seconds=+30), extract_today=True) 2時間後 >>> # Delay with `seconds=+30` >>> print humanize_datetime(TODAY-relativedelta(hours=+2, seconds=+30), extract_today=True) 2時間前 """ TODAY = _datetime.today() date = force_unicode(datetime.strftime("%m/%d")) if weekday: weekday = force_unicode(datetime.strftime("(%a)")) else: weekday = u"" if time: time = u" %s" % force_unicode(datetime.strftime("%H:%M")) else: time = u"" if datetime.year == TODAY.year: if datetime.month == TODAY.month: if datetime.day == TODAY.day: return u"今日%s%s" % (weekday, time) elif datetime.day - TODAY.day == 1: return u"明日%s%s" % (weekday, time) elif datetime.day - TODAY.day == -1: return u"昨日%s%s" % (weekday, time) elif datetime.day - TODAY.day == 2: return u"明後日%s%s" % (weekday, time) elif datetime.day - TODAY.day == -2: return u"一昨日%s%s" % (weekday, time) elif datetime.day - TODAY.day == 7: return u"一週間後%s%s" % (weekday, time) elif datetime.day - TODAY.day == 7: return u"一週間前%s%s" % (weekday, time) else: delta = _relativedelta(datetime, TODAY) if delta.days > 0: suffix = u"後" days = delta.days else: suffix = u"前" days = abs(delta.days) + 1 return u"%d日%s%s%s" % (days, suffix, weekday, time) else: return "%s%s%s" % (date, weekday, time) return "%s/%s%s%s" % (datetime.year, date, weekday, time)
def get_form_delta(form): """ Determines the changes made by a newly submitted form. Provided delta values should be at least semi-human readable. :param form: The form that is being checked for changes. :return: A dictionary of changes. The format is: { form_field_name: { 'initial': The starting value, 'new': The updated value, } } """ delta = {} if form.changed_data: for field in form.changed_data: # There are two places that initial data can come from: # form.initial or form.fields[field].initial. Django # favors form.initial in their code, so this code prefers # form.initial first as well. initial_val = form.initial.get(field, form.fields[field].initial) initial_val = form.fields[field].to_python(initial_val) new_val = form.data.get(field, '') new_val = form.fields[field].to_python(new_val) if isinstance(form.fields[field], MultipleFileField): # Multiple file added results in a MultiValueDict. # MultiValueDict.get() just gets the last item in the list, # so we need to use MultiValueDict.getlist() to account # for all the added attachments. if isinstance(form.files, MultiValueDict): initial_val = None new_val = [f.name for f in form.files.getlist(field)] elif hasattr(form.fields[field], 'choices'): # Coerce the keys to unicode, since the data values # we will be getting back should be unicode. choices = form.fields[field].choices choices_dict = {unicode(x[0]): x[1] for x in choices} initial_val = choices_dict.get(initial_val, initial_val) new_val = choices_dict.get(new_val, new_val) if (initial_val != new_val) and (initial_val or new_val): delta[field] = { 'initial': force_unicode(initial_val), 'new': force_unicode(new_val) } return delta
def make_addr(obj): if obj.aolevel > 3: short_addr.append(force_unicode(obj)) if obj.aolevel > 1: try: parent = AddrObj.objects.get(aoguid=obj.parentguid) except AddrObj.DoesNotExist: return else: full_addr.append(force_unicode(parent)) make_addr(parent)
def save(self, filename, raw_contents): filename = self.get_available_name(filename) success = self.bucket.put_object(filename, raw_contents) if (success.status == 200): return force_unicode(filename.replace('\\', '/')) else: print ("FAILURE writing file {filename}".format(filename= filename))
def _update_address(self): full_addr = [force_unicode(self.address)] short_addr = [] def make_addr(obj): if obj.aolevel > 3: short_addr.append(force_unicode(obj)) # level = int(obj.aolevel) # attr = self._LEVELS[level] # if hasattr(self, attr): # setattr(self, attr, obj) if obj.aolevel > 1: try: parent = AddrObj.objects.get(aoguid=obj.parentguid) except AddrObj.DoesNotExist: return else: full_addr.append(force_unicode(parent)) make_addr(parent) make_addr(self.address) self.full_address = ", ".join(full_addr[::-1]) self.short_address = ", ".join(short_addr[::-1])
def add_form_widget_attr(field, attr_name, attr_value, replace=0): """ Adds widget attributes to a bound form field. This is helpful if you would like to add a certain class to all your forms (i.e. `form-control` to all form fields when you are using Bootstrap):: {% load libs_tags %} {% for field in form.fields %} {% add_form_widget_attr field 'class' 'form-control' as field_ %} {{ field_ }} {% endfor %} The tag will check if the attr already exists and only append your value. If you would like to replace existing attrs, set `replace=1`:: {% add_form_widget_attr field 'class' 'form-control' replace=1 as field_ %} """ if not replace: attr = field.field.widget.attrs.get(attr_name, '') attr += force_unicode(attr_value) field.field.widget.attrs[attr_name] = attr return field else: field.field.widget.attrs[attr_name] = attr_value return field
def _render_option_parts(self, selected_choices, option_value, option_label): option_value = force_unicode(option_value) app_label, classname = model_to_app_and_classname(option_value) attributes = { 'data-actionurl': self.changelist_url_getter.get_url(self.admin_site_name, app_label, classname) } if option_value in selected_choices: attributes['selected'] = 'selected' return { 'value': escape(option_value), 'attributes': attributes, 'label': conditional_escape(force_unicode(option_label)) }
def _update_address(self): full_addr = [force_unicode(self.address)] short_addr = [] def make_addr(obj): if obj.aolevel > 3: short_addr.append(force_unicode(obj)) #level = int(obj.aolevel) #attr = self._LEVELS[level] #if hasattr(self, attr): # setattr(self, attr, obj) if obj.aolevel > 1: try: parent = AddrObj.objects.get(aoguid=obj.parentguid) except AddrObj.DoesNotExist: return else: full_addr.append(force_unicode(parent)) make_addr(parent) make_addr(self.address) self.full_address = ', '.join(full_addr[::-1]) self.short_address = ', '.join(short_addr[::-1])
def make_addr(obj): if obj.aolevel > 3: short_addr.append(force_unicode(obj)) # level = int(obj.aolevel) # attr = self._LEVELS[level] # if hasattr(self, attr): # setattr(self, attr, obj) if obj.aolevel > 1: try: parent = AddrObj.objects.get(aoguid=obj.parentguid) except AddrObj.DoesNotExist: return else: full_addr.append(force_unicode(parent)) make_addr(parent)
def save(self, filename, raw_contents): filename = self.get_available_name(filename) ret, err = qiniu.io.put(self.uptoken, filename, raw_contents, self.extra) if err is not None: print "Error: %s" % err print "FAILURE writing file %s" % (filename) else: return force_unicode(filename.replace('\\', '/'))
def make_addr(obj): if obj.aolevel > 3: short_addr.append(force_unicode(obj)) #level = int(obj.aolevel) #attr = self._LEVELS[level] #if hasattr(self, attr): # setattr(self, attr, obj) if obj.aolevel > 1: try: parent = AddrObj.objects.get(aoguid=obj.parentguid) except AddrObj.DoesNotExist: return else: full_addr.append(force_unicode(parent)) make_addr(parent)
def inner(self, action): object = getattr(action, func.__name__) return { 'url': self.url(object), 'id': self.id(object), 'objectType': self.object_type(object), 'displayName': force_unicode(object), }
def __call__(self, obj): value = getattr(obj, self.field_name) if value: text = force_unicode(value) if len(text) > self.max_length: text = text[:self.max_length] + self.tail return text else: return self.default
def make_list(o): if o.aolevel > 1: try: parent = self.queryset.get(aoguid=o.parentguid) except self.queryset.model.DoesNotExist: return else: lst.append(force_unicode(parent)) make_list(parent)
def render_option(self, selected_choices, option_value, option_label): # Note: the various self.filter_* methods are dynamically added to the # widget by the field. option_value = force_unicode(option_value) selected_html = (option_value in selected_choices) and u' selected="selected"' or '' data_lookup = self.filter_type_determination(option_value) data_filter_value = self.filter_receptacle_determination(option_value, filter_type=self.filter_type, filter_lookup=option_value) data_autocomplete_lookup = self.filter_autocomplete_lookup_determination(option_value) data_lookup = data_lookup and ' data-filtertype="%s"' % (data_lookup, ) or "" data_filter_value = data_filter_value and ' data-filtervalue="%s"' % (data_filter_value, ) or "" data_autocomplete_lookup = data_autocomplete_lookup and ' data-filterautocomplete="%s"' % (reverse('ajax_lookup', kwargs={'channel': data_autocomplete_lookup}), ) or "" return u'<option value="%s"%s%s%s%s>%s</option>' % ( escape(option_value), selected_html, data_lookup, data_filter_value, data_autocomplete_lookup, conditional_escape(force_unicode(option_label)))
def _update_address(self): full_addr = [force_unicode(self.address)] short_addr = [] def make_addr(obj): if obj.aolevel > 3: short_addr.append(obj) if obj.aolevel > 1: try: parent = AddrObj.objects.get(aoguid=obj.parentguid) except AddrObj.DoesNotExist: return else: full_addr.append(force_unicode(parent)) make_addr(parent) make_addr(self.address) self.full_address = ', '.join(full_addr[::-1]) self.short_address = ', '.join(force_unicode(obj) for obj in short_addr[::-1] if obj.aolevel > 4)
def safe_filename(name, allowed_chars=None): """ simple filename sanitizer >>> safe_filename(u'some str@ng3 File 12,0)a nAm#.txt') u'some_strng3_File_120)a_nAm.txt' >>> safe_filename(u'registered\xae sign.txt') u'registered_sign.txt' """ allowed_chars = allowed_chars or DEFAULT_SAFE_FILENAME_CHARS try: name = force_unicode(name).encode('translit/one/ascii') except (UnicodeDecodeError, UnicodeEncodeError, TypeError, LookupError): name = force_unicode(name).encode('ascii', 'ignore') name = name.replace(' ','_') name = ''.join(c for c in name if c in allowed_chars) fn, ext = os.path.splitext(name) if not fn: raise ValueError('Cannot sanitize filename') return force_unicode(name)
def save(self, filename, raw_contents): filename = self.get_available_filename(filename) if not hasattr(self, 'mogile_class'): self.mogile_class = None # Write the file to mogile success = self.client.send_file(filename, StringIO(raw_contents), self.mogile_class) if success: print("Wrote file to key %s, %s@%s" % (filename, self.domain, self.trackers[0])) else: print("FAILURE writing file %s" % (filename)) return force_unicode(filename.replace('\\', '/'))
def safe_filename(name, allowed_chars=None): """ simple filename sanitizer >>> safe_filename(u'some str@ng3 File 12,0)a nAm#.txt') u'some_strng3_File_120)a_nAm.txt' >>> safe_filename(u'registered\xae sign.txt') u'registered_sign.txt' """ allowed_chars = allowed_chars or DEFAULT_SAFE_FILENAME_CHARS try: name = force_unicode(name).encode('translit/one/ascii') except (UnicodeDecodeError, UnicodeEncodeError, TypeError, LookupError): name = force_unicode(name).encode('ascii', 'ignore') name = name.replace(' ', '_') name = ''.join(c for c in name if c in allowed_chars) fn, ext = os.path.splitext(name) if not fn: raise ValueError('Cannot sanitize filename') return force_unicode(name)
def render(self, name, value, attrs=None): try: textname = NonPermanent.objects.get(parent_id=value).__unicode__() nid = value value = textname except: value = '' final_attrs = self.build_attrs(attrs, type=self.input_type, name=name) if value != '': # Only add the 'value' attribute if a value is non-empty. final_attrs['value'] = force_unicode(self._format_value(nid)) output = [] output.append(u'<input%s />' % flatatt(final_attrs)) output.append( u'<input readonly="true" type="text" id="display_%s" value="%s" size="40" /> ' % (self._format_value( final_attrs['id']), force_unicode(self._format_value(value)))) output.append( u'<a href="#" id="link_%s" onclick="this.href=\'/admin/dide/nonpermanent/list/\'+\'?id=\'+django.jQuery(this).attr(\'id\');return focusOrOpen(this, \'Αναπληρωτές\',{\'width\': 500, \'height\': 600});">Επιλογή</a> ' % self._format_value(final_attrs['id'])) output.append( u'<a href="/admin/dide/nonpermanent/add/" class="add-another" id="add_%s" onclick="return showAddAnotherPopup(this);"> <img src="/static/admin/img/icon_addlink.gif" width="10" height="10" alt="Προσθέστε κι άλλο"></a>' % self._format_value(final_attrs['id'])) return mark_safe(''.join(output))
def contact_record_val_to_str(value): """ Translates a field value from a contact record into a human-readable string. Dates are formatted "ShortMonth Day, Year Hour:Minute AMorPM" Times are formatted "XX Hours XX Minutes" If the value matches a contact type choice it's translated to the verbose form. """ value = (value.strftime('%b %d, %Y %I:%M %p') if type(value) is datetime else value.strftime('%H hours %M minutes') if type(value) is time else force_unicode(value)) contact_types = dict(CONTACT_TYPE_CHOICES) if value in contact_types: value = contact_types[value] return value
def __init__(self, **params): """ Init finder. One instance should use only once. @keyword query: user query """ self._query = None self._words = None self.checked = False self.engine = None if 'query' in params: self._query = force_unicode(params['query']) del params['query'] self._params = params
def _txt_for_val(self, value): if not value: return obj = self.queryset.get(pk=value) lst = [force_unicode(obj)] def make_list(o): if o.aolevel > 1: try: parent = self.queryset.get(aoguid=o.parentguid) except self.queryset.model.DoesNotExist: return else: lst.append(force_unicode(parent)) make_list(parent) make_list(obj) return ', '.join(lst[::-1])
def wikilinks(source, args): u""" [[名前]]という部分をWiki内リンクに変換するためのフィルタ \[[名前]]の場合はリンクにしない """ html_class = u'wikilink' def repl(m): prefix = m.group("prefix") value = m.group("value") try: url = reverse('wikis-entry-detail', kwargs={'project': args.project.slug, 'slug': value}) return u"""%(prefix)s<a href="%(url)s" class="%(class)s">%(value)s</a>""" % { 'prefix': prefix, 'url': url, 'class': html_class, 'value': value } except: return m.group(0) value = force_unicode(source) value = re.sub(WIKILINK, repl, value) value = re.sub(NON_WIKILINK, r'\1', value) return mark_safe(value)
def delete_view(self, request, object_id, extra_context=None): opts = self.model._meta app_label = opts.app_label try: obj = self.model._default_manager.get(pk=object_id) except self.model.DoesNotExist: obj = None if not self.has_delete_permission(request, obj): raise PermissionDenied if obj is None: raise Http404( '%s object with primary key %r does not exist.' % (force_unicode(opts.verbose_name), escape(object_id))) if obj.published == False: raise Http404( 'can not carry out the operation because the object %s is unpublished' % (force_unicode(opts.verbose_name))) using = router.db_for_write(self.model) (deleted_objects, perms_needed, protected) = get_deleted_objects([obj], opts, request.user, self.admin_site, using) #if request.POST is set, the user already confirmed deletion if not request.POST: print 'not request.POST' else: print 'request.POST' if perms_needed: raise PermissionDenied dataFileTemp = DataFileTemp.objects.get( filename__exact=obj.filename) #experimentalfilecontempDatafiletempQuerySet = ExperimentalfilecontempDatafiletemp.objects.filter(datafiletemp=dataFileTemp) ''' for efct in experimentalfilecontempDatafiletempQuerySet: try: experimentalParCond = ExperimentalParCond.objects.get(tag__exact=efct.experimentalfilecontemp.tag) except ObjectDoesNotExist as error: pass ''' publication = None try: dataFile = DataFile.objects.get(code__exact=dataFileTemp.code) publication = dataFile.publication except ObjectDoesNotExist as error: dataFile = None if dataFile is None: raise Http404('%s object no published yet.' % (force_unicode(dataFileTemp.filename))) dataFilePropertyQuerySet = None try: dataFilePropertyQuerySet = DataFileProperty.objects.filter( datafile=dataFile) except ObjectDoesNotExist as error: dataFilePropertyQuerySet = None if dataFilePropertyQuerySet is None: raise Http404('%s object no published yet.' % (force_unicode(dataFile.code))) for dfp in dataFilePropertyQuerySet: dataFilePropertyObj = DataFileProperty() dataFilePropertyObj = dfp print "dataFilePropertyObj.delete()" print dataFilePropertyObj.datafile.filename dataFilePropertyObj.delete() pathslist = Path.objects.all() path = Path() for cifdir in pathslist: path = cifdir if os.path.isdir(path.cifs_dir): break ciffilein = os.path.join(path.cifs_dir_valids, obj.filename) ciffileout = os.path.join(path.cifs_dir, dataFile.filename) print "archivo a borrar" print ciffileout try: if os.path.isfile(ciffileout): os.remove(ciffileout) except Exception as e: raise Http404('%s object no published yet.' % (force_unicode(e))) #print(e) print "dataFile.delete() " print dataFile.filename print "publication.delete()" print publication.title dataFile.delete() publication.delete() obj_display = str(obj) #self.delete_model(request, obj) obj.published = False obj.save() self.log_deletion(request, obj, obj_display) self.message_user( request, ('The %(name)s "%(obj)s" was deleted successfully.') % { 'name': force_unicode(opts.verbose_name), 'obj': force_unicode(obj_display) }) if not self.has_change_permission(request, None): return HttpResponseRedirect( reverse('admin:index', current_app=self.admin_site.name)) return HttpResponseRedirect( reverse('admin:%s_%s_changelist' % (opts.app_label, opts.module_name), current_app=self.admin_site.name)) object_name = force_unicode(opts.verbose_name) print "perms_needed" print perms_needed print protected if perms_needed or protected: title = "Cannot delete %(name)s" % {"name": object_name} else: title = "Are you sure?" context = { "title": title, "object_name": object_name, "object": obj, "deleted_objects": deleted_objects, "perms_lacking": perms_needed, "opts": opts, "root_path": self.admin_site, "app_label": app_label, } context.update(extra_context or {}) return render_to_response( self.delete_confirmation_template or [ "admin/%s/%s/delete_confirmation.html" % (app_label, opts.object_name.lower()), "admin/%s/delete_confirmation.html" % app_label, "admin/delete_confirmation.html" ], context, context_instance=template.RequestContext(request))
# --- Clean old files from current input directory if not self.dryrun: # Here is the simplest implementation to manage secondary files # i.e. files that has not been endpoint files # These files could have been used has dependency file, by one or # more import # We need to manage to cases: # - the case of a file that is a dependency of two endpoints files # - the case of a file that is a dependency of a endpoint file that # has gone in error # Both these cases should better be handled with transaction, but # we consider that the transaction implementation in Django is not # enouth advanced for these complex cases (m2m, post_save, etc.) # (See for example ticket #14051 in Django Trac) # When the Implementor has used Config.open to manage these files, # they already have been moved away for f in self.listdir(input): input_file_path = os.path.join(input, f) done_file_path = os.path.join(done, f) if os.path.isdir(input_file_path): continue grace_period = self.GRACE_PERIOD st_mtime = os.stat(input_file_path).st_mtime age = time() - st_mtime if age > grace_period: log.info(u"Removing old file from input dir: %s" % force_unicode(input_file_path)) move_file(input_file_path, done_file_path) if hasattr(self, 'postprocess'): self.postprocess(instances)
def id(self, object): return force_unicode(object.pk)
def parse_extra_classes(extra_classes): if isinstance(extra_classes, basestring): extra_classes = force_unicode(extra_classes) extra_classes = map(string.strip, extra_classes.split(' ')) return extra_classes
def process_recursively(self, path=""): """Recusively inspect :attribute:`BaseConfig.input_dir` and process files using BFS Recursivly inspect :attribute:`BaseConfig.input_dir`, loads builder class through :method:`BaseConfig.load_builder` and run processing""" instances = None if hasattr(self, 'postprocess'): instances = [] log.info(u'process_recursively %s' % path) input, work, error, done = self.paths(path) if not os.path.exists(work): os.makedirs(work) if not os.path.exists(error): os.makedirs(error) if not os.path.exists(done): os.makedirs(done) # input_dir should exists log.info(u'work_path %s' % work) for f in self.listdir(input): # Relative file path from current path partial_file_path = os.path.join(path, f) # Absolute file path input_file_path = os.path.join(input, f) # For now, do not process non utf-8 file names #FIXME if not is_utf8(f): error_file_path = os.path.join(self.error_dir(), f) move_file(input_file_path, error_file_path) if os.path.isdir(input_file_path): self.process_recursively(partial_file_path) else: if not os.path.exists(input_file_path): # the file might have been already moved # by a nested builder continue # --- Check file age # Idea is to prevent from processing a file too much recent, to # avoid processing file while they are downloaded in input dir # and to minimize risk of missing dependency files # If you don't care about this, just do not set it in settings min_age = self.QUARANTINE # seconds if min_age > 0: st_mtime = os.stat(input_file_path).st_mtime age = time() - st_mtime if age < min_age: log.info(u"Skipping too recent file %s" % force_unicode(input_file_path)) continue # --- Load and process builder for file builder = self.load_builder(partial_file_path) if builder is None: log.info(u'skip file %s' % force_unicode(input_file_path)) continue else: log.info(u'match %s' % force_unicode(partial_file_path)) if not self.dryrun: try: new_instances, unhandled_errors = builder.process_and_save() except StopConfig, e: # this is a user controlled exception msg = u'Import stopped for %s' % self log.warning(msg, exc_info=sys.exc_info()) to_dir = self.error_dir() break except PostponeBuilder, e: # Implementor as asked to postpone current process msg = u'Builder postponed for %s' % self log.warning(msg, exc_info=sys.exc_info()) # Do not move files, keep them for next run to_dir = self.input_dir() continue # to next file except Exception, e: msg = u'builder processing of %s failed' % input_file_path log.error(msg, exc_info=sys.exc_info()) to_dir = self.error_dir() else: if hasattr(self, 'postprocess') and new_instances: instances.append(new_instances) to_dir = unhandled_errors and self.error_dir() \ or self.done_dir() finally:
def get_results(self, request, term, page, context): filter_params = None level = 0 result_parts = [] result = [] parts = term.split(',') parts_len = len(parts) """ Проверяем иерархию для всех объектов перед последней запятой """ if parts_len > 1: for part in parts[:-1]: socr_term, obj_term = part.strip().split(' ', 1) socr_term = socr_term.rstrip('.') part_qs = AddrObj.objects.filter(shortname__iexact=socr_term, formalname__iexact=obj_term) if level > 0: part_qs = part_qs.filter(parentguid=result_parts[level-1].aoguid) if len(part_qs) == 1: level += 1 result_parts.append(part_qs[0]) elif len(part_qs) > 1: raise Exception('Много вариантов???') else: return EMPTY_RESULT """ Строку после последней запятой проверяем более тщательно """ last = parts[-1].lstrip() last_len = len(last) # Это сокращение и начало названия объекта? if ' ' in last: socr_term, obj_term = last.split(' ', 1) socr_term = socr_term.rstrip('.') sqs = SocrBase.objects.filter(scname__icontains=socr_term).distinct() if level > 0: sqs = sqs.filter(level__gt=result_parts[-1].aolevel) sqs_len = len(sqs) obj_term = obj_term.strip() if sqs_len > 1: levels = [] socrs = [] for s in sqs: levels.append(s.level) socrs.append(s.scname) filter_params = dict( aolevel__in=set(levels), shortname__in=set(socrs), ) elif sqs_len == 1: filter_params = dict( aolevel=sqs[0].level, shortname=sqs[0].scname, ) else: pass if filter_params: if obj_term: filter_params.update(formalname__icontains=obj_term) if level > 0: filter_params.update(parentguid=result_parts[-1].aoguid, aolevel__gt=result_parts[-1].aolevel) # Это только сокращение? elif last_len < 10: sqs = SocrBase.objects.filter(scname__icontains=last) if level > 0: sqs = sqs.filter(level__gt=result_parts[-1].aolevel) sqs_len = len(sqs) if sqs_len: result = ((None, s.scname) for s in sqs) else: filter_params = dict( formalname__icontains=last ) if level > 0: filter_params.update(parentguid=result_parts[-1].aoguid, aolevel__gt=result_parts[-1].aolevel) prefix = ', '.join((r.get_formal_name() for r in result_parts)) if result_parts else '' if result: if prefix: return NO_ERR_RESP, False, ((k, '{0}, {1}'.format(prefix, v)) for k, v in result) return NO_ERR_RESP, False, result if filter_params is not None: result = AddrObj.objects.order_by('aolevel').filter(**filter_params)[:10] if prefix: return ( NO_ERR_RESP, False, ((force_unicode(l.pk), '{0}, {1}'.format(prefix, l), {'level': l.aolevel}) for l in result) ) else: return NO_ERR_RESP, False, ((force_unicode(l.pk), l.full_name(5, True), {'level': l.aolevel}) for l in result) return EMPTY_RESULT
def save_model(self, request, obj, form, change): try: #print request.POST message_bit = "" #print form.changed_data # list name of field was changed #print change #True or False if not request.POST.has_key( '_addanother') and not request.POST.has_key( '_continue') and not request.POST.has_key('_save'): print "pass" elif request.POST.has_key('_addanother'): print request.POST.get('_addanother', False) elif request.POST.has_key('_continue'): print request.POST.get('_continue', False) elif request.POST.has_key('_save'): print request.POST.get('_save', False) if request.POST.get('published', False) != False: try: obj.datepublished = datetime.datetime.today().strftime( '%Y-%m-%d %H:%M:%S') dataFileTemp = DataFileTemp.objects.get( filename__exact=obj.filename) experimentalfilecontempDatafiletempQuerySet = ExperimentalfilecontempDatafiletemp.objects.filter( datafiletemp=dataFileTemp) experimentalParCond = ExperimentalParCond() for efct in experimentalfilecontempDatafiletempQuerySet: try: experimentalParCond = ExperimentalParCond.objects.get( tag__exact=efct.experimentalfilecontemp.tag ) #experimentalParCond= None except ObjectDoesNotExist as error: print "Error({0}): {1}".format( 99, error.message) experimentalParCond.tag = efct.experimentalfilecontemp.tag experimentalParCond.name = efct.experimentalfilecontemp.name experimentalParCond.description = efct.experimentalfilecontemp.description experimentalParCond.units = efct.experimentalfilecontemp.units experimentalParCond.units_detail = efct.experimentalfilecontemp.units_detail experimentalParCond.save() publicArticle = PublArticle() publicArticle.title = dataFileTemp.publication.title publicArticle.authors = dataFileTemp.publication.authors publicArticle.journal = dataFileTemp.publication.journal publicArticle.year = dataFileTemp.publication.year publicArticle.volume = dataFileTemp.publication.volume publicArticle.issue = dataFileTemp.publication.issue publicArticle.first_page = dataFileTemp.publication.first_page publicArticle.last_page = dataFileTemp.publication.last_page publicArticle.reference = dataFileTemp.publication.reference publicArticle.pages_number = dataFileTemp.publication.pages_number publicArticle.save() dataFile = DataFile() top = DataFile.objects.order_by('-code')[0] code = top.code + 1 dataFile.code = code dataFile.filename = str(code) + ".mpod" dataFile.cod_code = dataFileTemp.cod_code dataFile.phase_generic = dataFileTemp.phase_generic dataFile.phase_name = dataFileTemp.phase_name dataFile.chemical_formula = dataFileTemp.chemical_formula dataFile.publication = publicArticle dataFile.save() obj.datafile = dataFile dataFileTemp.code = dataFile.code dataFileTemp.save() dataFilePropertyTemp = DataFilePropertyTemp.objects.get( datafile=dataFileTemp) dataFileProperty = DataFileProperty() property = Property() try: property = Property.objects.get( tag__exact=dataFilePropertyTemp.property.tag) except ObjectDoesNotExist as error: #print "Error({0}): {1}".format(99, error.message) property.tag = dataFilePropertyTemp.property.tag property.name = dataFilePropertyTemp.property.name property.description = dataFilePropertyTemp.property.description property.tensor_dimensions = dataFilePropertyTemp.property.tensor_dimensions property.units = dataFilePropertyTemp.property.units property.units_detail = dataFilePropertyTemp.property.units_detail property.save() dataFileProperty.property = property dataFileProperty.datafile = dataFile dataFileProperty.save() obj.save() pathslist = Path.objects.all() pathexist = 0 filepath = "" path = Path() for cifdir in pathslist: path = cifdir if os.path.isdir(path.cifs_dir): break ciffilein = os.path.join(path.cifs_dir_valids, obj.filename) ciffileout = os.path.join(path.cifs_dir, dataFile.filename) print dataFile.filename #line.replace("data_" + obj.filename, "data_" +dataFile.code ), end='') datacode = "data_" + obj.filename.replace('.mpod', ' ') newdatacode = "data_" + str(dataFile.code) print datacode print newdatacode with open(ciffilein) as infile, open(ciffileout, 'w') as outfile: for line in infile: l = line.rstrip('\n') if l in datacode: print line line = newdatacode + '\n' outfile.write(line) else: outfile.write(line) messageCategoryDetailQuerySet1 = MessageCategoryDetail.objects.filter( messagecategory=MessageCategory.objects.get( pk=2)) #2 for user notification for mcd in messageCategoryDetailQuerySet1: messageCategoryDetail = MessageCategoryDetail() messageCategoryDetail = mcd messageMail = MessageMail.objects.get( pk=messageCategoryDetail.message.pk) if messageMail.pk == 7: configurationMessage = ConfigurationMessage.objects.get( message=messageMail) smtpconfig = configurationMessage.account my_use_tls = False if smtpconfig.email_use_tls == 1: my_use_tls = True connection = get_connection( host=smtpconfig.email_host, port=int(smtpconfig.email_port), username=smtpconfig.email_host_user, password=smtpconfig.email_host_password, use_tls=my_use_tls) current_site = get_current_site(request) dataitem = "dataitem" forwardslash = "/" message = render_to_string( 'notification_to_user_file_published.html', { 'regards': messageMail.email_regards, 'email_message': messageMail.email_message, 'user': obj.authuser, 'domain': current_site.domain, 'code': dataFile.code, 'dataitem': dataitem, 'forwardslash': forwardslash, }) print message send_mail(messageMail.email_subject, message, smtpconfig.email_host_user, [obj.authuser.email], connection=connection) except ObjectDoesNotExist as error: #print "Error({0}): {1}".format(99, error.message) messages.add_message(request, messages.ERROR, "Error %s " % error.message) else: opts = self.model._meta obj_display = force_unicode(obj) """ self.log_change(request, obj, message) self.log_addition(request, obj) self.log_deletion(request, obj, obj_display) """ #self.message_user(request, ('The %(name)s "%(obj)s" was changed successfully.') % {'name': force_unicode(opts.verbose_name), 'obj': force_unicode(obj_display)}) except Exception, e: messages.add_message(request, messages.ERROR, "Error %s " % e.message)
def _save(self, filename, contents): success = self.client.send_file(filename, contents) if not success: raise IOException("Unable to save file: %s" % filename) return force_unicode(filename)
def __call__(self, obj): value = getattr(obj, self.field_name) if value is not None: return force_unicode(value) else: return self.default
def title(self, action): return force_unicode(action)
if not self.dryrun: # Here is the simplest implementation to manage secondary files # i.e. files that has not been endpoint files # These files could have been used has dependency file, by one or # more import # We need to manage to cases: # - the case of a file that is a dependency of two endpoints files # - the case of a file that is a dependency of a endpoint file that # has gone in error # Both these cases should better be handled with transaction, but # we consider that the transaction implementation in Django is not # enouth advanced for these complex cases (m2m, post_save, etc.) # (See for example ticket #14051 in Django Trac) # When the Implementor has used Config.open to manage these files, # they already have been moved away for f in self.listdir(input): input_file_path = os.path.join(input, f) done_file_path = os.path.join(done, f) if os.path.isdir(input_file_path): continue grace_period = self.GRACE_PERIOD st_mtime = os.stat(input_file_path).st_mtime age = time() - st_mtime if age > grace_period: log.info(u"Removing old file from input dir: %s" % force_unicode(input_file_path)) move_file(input_file_path, done_file_path) if hasattr(self, 'postprocess'): self.postprocess(instances)
def process_recursively(self, path=""): """Recusively inspect :attribute:`BaseConfig.input_dir` and process files using BFS Recursivly inspect :attribute:`BaseConfig.input_dir`, loads builder class through :method:`BaseConfig.load_builder` and run processing""" instances = None if hasattr(self, 'postprocess'): instances = [] log.info(u'process_recursively %s' % path) input, work, error, done = self.paths(path) if not os.path.exists(work): os.makedirs(work) if not os.path.exists(error): os.makedirs(error) if not os.path.exists(done): os.makedirs(done) # input_dir should exists log.info(u'work_path %s' % work) for f in self.listdir(input): # Relative file path from current path partial_file_path = os.path.join(path, f) # Absolute file path input_file_path = os.path.join(input, f) # For now, do not process non utf-8 file names #FIXME if not is_utf8(f): error_file_path = os.path.join(self.error_dir(), f) move_file(input_file_path, error_file_path) if os.path.isdir(input_file_path): self.process_recursively(partial_file_path) else: if not os.path.exists(input_file_path): # the file might have been already moved # by a nested builder continue # --- Check file age # Idea is to prevent from processing a file too much recent, to # avoid processing file while they are downloaded in input dir # and to minimize risk of missing dependency files # If you don't care about this, just do not set it in settings min_age = self.QUARANTINE # seconds if min_age > 0: st_mtime = os.stat(input_file_path).st_mtime age = time() - st_mtime if age < min_age: log.info(u"Skipping too recent file %s" % force_unicode(input_file_path)) continue # --- Load and process builder for file builder = self.load_builder(partial_file_path) if builder is None: log.info(u'skip file %s' % force_unicode(input_file_path)) continue else: log.info(u'match %s' % force_unicode(partial_file_path)) if not self.dryrun: try: new_instances, unhandled_errors = builder.process_and_save( ) except StopConfig, e: # this is a user controlled exception msg = u'Import stopped for %s' % self log.warning(msg, exc_info=sys.exc_info()) to_dir = self.error_dir() break except PostponeBuilder, e: # Implementor as asked to postpone current process msg = u'Builder postponed for %s' % self log.warning(msg, exc_info=sys.exc_info()) # Do not move files, keep them for next run to_dir = self.input_dir() continue # to next file except Exception, e: msg = u'builder processing of %s failed' % input_file_path log.error(msg, exc_info=sys.exc_info()) to_dir = self.error_dir() else: if hasattr(self, 'postprocess') and new_instances: instances.append(new_instances) to_dir = unhandled_errors and self.error_dir() \ or self.done_dir() finally:
def markdown2(value): return markdown(force_unicode(value), extras=['wiki-tables', 'fenced-code-blocks'])
def get_results(self, request, term, page, context): filter_params = None level = 0 result_parts = [] result = [] parts = term.split(',') parts_len = len(parts) """ Проверяем иерархию для всех объектов перед последней запятой """ if parts_len > 1: for part in parts[:-1]: socr_term, obj_term = part.strip().split(' ', 1) socr_term = socr_term.rstrip('.') part_qs = AddrObj.objects.filter(shortname__iexact=socr_term, formalname__iexact=obj_term) if level > 0: part_qs = part_qs.filter(parentguid=result_parts[level - 1].aoguid) if len(part_qs) == 1: level += 1 result_parts.append(part_qs[0]) elif len(part_qs) > 1: raise Exception('Много вариантов???') else: raise Exception('Empty') return EMPTY_RESULT """ Строку после последней запятой проверяем более тщательно """ last = parts[-1].lstrip() last_len = len(last) # Это сокращение и начало названия объекта? if ' ' in last: socr_term, obj_term = last.split(' ', 1) socr_term = socr_term.rstrip('.') sqs = SocrBase.objects.filter( scname__istartswith=socr_term).distinct() if level > 0: sqs = sqs.filter(level__gt=result_parts[-1].aolevel) sqs_len = len(sqs) obj_term = obj_term.strip() if sqs_len > 1: levels = [] socrs = [] for s in sqs: levels.append(s.level) socrs.append(s.scname) filter_params = dict( aolevel__in=set(levels), shortname__in=set(socrs), ) elif sqs_len == 1: filter_params = dict( aolevel=sqs[0].level, shortname=sqs[0].scname, ) else: pass if filter_params: if obj_term: filter_params.update(formalname__istartswith=obj_term) if level > 0: filter_params.update(parentguid=result_parts[-1].aoguid, aolevel__gt=result_parts[-1].aolevel) # Это только сокращение? elif last_len < 10: sqs = SocrBase.objects.filter(scname__istartswith=last) if level > 0: sqs = sqs.filter(level__gt=result_parts[-1].aolevel) sqs_len = len(sqs) if sqs_len: result = ((None, s.scname) for s in sqs) else: filter_params = dict(formalname__istartswith=last) if level > 0: filter_params.update(parentguid=result_parts[-1].aoguid, aolevel__gt=result_parts[-1].aolevel) prefix = ', '.join( (force_unicode(r) for r in result_parts)) if result_parts else '' if result: if prefix: return NO_ERR_RESP, False, ((k, '{0}, {1}'.format(prefix, v)) for k, v in result) return NO_ERR_RESP, False, result if filter_params is not None: result = AddrObj.objects.order_by('aolevel').filter( **filter_params)[:30] if prefix: return (NO_ERR_RESP, False, ((force_unicode(l.pk), '{0}, {1}'.format(prefix, force_unicode(l)), { 'level': l.aolevel }) for l in result)) else: return NO_ERR_RESP, False, ((force_unicode(l.pk), force_unicode(l), { 'level': l.aolevel }) for l in result) return NO_ERR_RESP, False, []
def _get_body_from_instance(self, instance): bodies = [] for field in instance._meta.fields: if not isinstance(field, (CharField, TextField)): continue bodies.append(force_unicode(getattr(instance, field.name))) return "\n".join(bodies)