def get_option_value(optionset_admin, db_option, current_only): """ Given an Option object, return its value for the current language. """ name = smart_str(db_option.name) if not name in optionset_admin.options: return None field = optionset_admin.options[name] if not db_option.lang_dependant: return field.to_python(db_option.value) if db_option.value else '' value_dict = {} for key, value in json.loads(db_option.value).items(): value_dict[smart_str(key)] = value if current_only: curr_lang = get_language() if curr_lang in value_dict: return field.to_python(value_dict[curr_lang]) if value_dict[curr_lang] else '' else: for key in value_dict: value_dict[key] = field.to_python(value_dict[key]) return value_dict
def pullThread(self, msgl): """ gets all messages for a thread """ data= self.get(msgl) msgs = data.findAll("span",{"class":"username-inbox"}) this_id='' ids,texts = [],[] for msg in msgs: this_id= msg.find("a")['href'].split("=")[1] txt = msg.parent.parent.findAll("div")[1].text if "has expressed interest in you" not in txt: texts.append(smart_str(txt)) ids.append(smart_str(this_id)) for link in data.findAll("a"): if "viewallmessages" in link['href'] and not data.find("span", {"class": "text-warning"}): newlink = "http://www.pof.com/" + link['href'] ids2,texts2 = self.pullThread(newlink) ids = ids2 + ids texts = texts2 + texts break if not msgs: return [],[] else: return ids,texts
def urlencode(self, safe=None): """ Returns an encoded string of all query string arguments. :arg safe: Used to specify characters which do not require quoting, for example:: >>> q = QueryDict('', mutable=True) >>> q['next'] = '/a&b/' >>> q.urlencode() 'next=%2Fa%26b%2F' >>> q.urlencode(safe='/') 'next=/a%26b/' """ output = [] if safe: encode = lambda k, v: '%s=%s' % ((quote(k, safe), quote(v, safe))) else: encode = lambda k, v: urlencode({k: v}) for k, list_ in self.lists(): k = smart_str(k, self.encoding) output.extend([encode(k, smart_str(v, self.encoding)) for v in list_]) return '&'.join(output)
def changeBusinessMan(request): """ 为某个实例修改businessMan """ if request.method == "POST": if request.POST.get('change', '') == u"businessMan": thisUser = request.user if not checkPerm(thisUser, 'businessMan'): return HttpResponse('failed') thisVm = smart_str(request.POST.get('host')) oldValue = smart_str(request.POST.get('oldvalue', '')) newValue = smart_str(request.POST.get('newvalue', '')) vm = Vm(thisVm) try: vm.update(businessMan=newValue) log = LogRequest(request.user) logContent = '(modify businessMan) %s --> %s' %\ (oldValue, newValue) log.save(logContent) return HttpResponse('successful') except: return HttpResponse('failed') else: raise Http404 else: raise Http404
def convert_query(self, query, num_params): # kinterbasdb tries to convert the passed SQL to string. # But if the connection charset is NONE, ASCII or OCTETS it will fail. # So we convert it to string first. if num_params == 0: return smart_str(query, self.encoding) return smart_str(query % tuple("?" * num_params), self.encoding)
def json(request, *args, **kwargs): """ The oembed endpoint, or the url to which requests for metadata are passed. Third parties will want to access this view with URLs for your site's content and be returned OEmbed metadata. """ # coerce to dictionary params = dict(request.GET.items()) callback = params.pop('callback', None) url = params.pop('url', None) if not url: return HttpResponseBadRequest('Required parameter missing: URL') try: provider = oembed.site.provider_for_url(url) if not provider.provides: raise OEmbedMissingEndpoint() except OEmbedMissingEndpoint: raise Http404('No provider found for %s' % url) query = dict([(smart_str(k), smart_str(v)) for k, v in params.items() if v]) try: resource = oembed.site.embed(url, **query) except OEmbedException, e: raise Http404('Error embedding %s: %s' % (url, str(e)))
def rec_list(obj): obj.title = smart_str("— "*obj.step) + smart_str(obj.title) mass_object.append(obj) children = Category.objects.filter(parent=obj) for child in children: rec_list(child)
def put_object_headers(response, meta, restricted=False, token=None): response['ETag'] = meta['checksum'] response['Content-Length'] = meta['bytes'] response.override_serialization = True response['Content-Type'] = meta.get('type', 'application/octet-stream') response['Last-Modified'] = http_date(int(meta['modified'])) if not restricted: response['X-Object-Hash'] = meta['hash'] response['X-Object-UUID'] = meta['uuid'] if TRANSLATE_UUIDS: meta['modified_by'] = \ retrieve_displayname(token, meta['modified_by']) response['X-Object-Modified-By'] = smart_str( meta['modified_by'], strings_only=True) response['X-Object-Version'] = meta['version'] response['X-Object-Version-Timestamp'] = http_date( int(meta['version_timestamp'])) for k in [x for x in meta.keys() if x.startswith('X-Object-Meta-')]: response[smart_str( k, strings_only=True)] = smart_str(meta[k], strings_only=True) for k in ( 'Content-Encoding', 'Content-Disposition', 'X-Object-Manifest', 'X-Object-Sharing', 'X-Object-Shared-By', 'X-Object-Allowed-To', 'X-Object-Public'): if k in meta: response[k] = smart_str(meta[k], strings_only=True) else: for k in ('Content-Encoding', 'Content-Disposition'): if k in meta: response[k] = smart_str(meta[k], strings_only=True)
def _sync_ldap_members(connection, group, ldap_info, count=0, max_count=1): if count >= max_count: return None # Find all users and groups of group. users_info = connection.find_users_of_group(ldap_info['dn']) groups_info = connection.find_groups_of_group(ldap_info['dn']) posix_members = ldap_info['posix_members'] for user_info in users_info: LOG.debug("Synchronizing user %s with group %s" % (smart_str(user_info['dn']), smart_str(group.name))) try: user = ldap_access.get_ldap_user(username=user_info['username']) group.user_set.add(user) except User.DoesNotExist: LOG.debug("Synchronizing user %s with group %s failed. User does not exist." % (smart_str(user_info['dn']), smart_str(group.name))) for group_info in groups_info: LOG.debug("Synchronizing group %s" % smart_str(group_info['dn'])) try: group = Group.objects.get(name=group_info['name']) _sync_ldap_members(connection, group, group_info, count+1, max_count) except Group.DoesNotExist: LOG.debug("Synchronizing group %s failed. Group does not exist." % smart_str(group.name)) for posix_member in posix_members: LOG.debug("Synchronizing posix user %s with group %s" % (smart_str(posix_member), smart_str(group.name))) users_info = connection.find_users(posix_member, search_attr='uid', user_name_attr=desktop.conf.LDAP.USERS.USER_NAME_ATTR.get(), find_by_dn=False) for user_info in users_info: try: user = ldap_access.get_ldap_user(username=user_info['username']) group.user_set.add(user) except User.DoesNotExist: LOG.debug("Synchronizing posix user %s with group %s failed. User does not exist." % (smart_str(posix_member), smart_str(group.name)))
def get_profile_fields(obj): reg = obj.user affiliate = reg.profile.get_registration().wsgc_affiliate if not affiliate: affiliate = reg.profile.get_registration().wsgc_affiliate_other race = [r.name for r in reg.profile.race.all()] fields = [ reg.profile.salutation, smart_str( reg.first_name, encoding='utf-8', strings_only=False, errors='strict' ), smart_str( reg.profile.second_name, encoding='utf-8', strings_only=False, errors='strict' ), smart_str( reg.last_name, encoding='utf-8', strings_only=False, errors='strict' ), reg.email,reg.profile.email_auxiliary(), reg.profile.phone_primary,reg.profile.phone_mobile, reg.profile.address1,reg.profile.address2,reg.profile.city, reg.profile.state,reg.profile.postal_code, reg.profile.address1_current,reg.profile.address2_current, reg.profile.city_current,reg.profile.state_current, reg.profile.postal_code_current, reg.profile.date_of_birth,reg.profile.gender, ' '.join(race),reg.profile.tribe, reg.profile.disability,reg.profile.disability_specify, reg.profile.employment,reg.profile.military,reg.profile.us_citizen, reg.profile.registration_type, affiliate ] return fields
def profile_sync( request ): profile = Profile.objects.get( user = request.user ) data_encoded = urllib.urlencode( [ ( 'first_name', smart_str( profile.first_name ) ) ,( 'last_name', smart_str( profile.last_name ) ) ,( 'phone', smart_str( profile.phone ) ) ,( 'mobile', smart_str( profile.mobile ) ) ,( 'email_1', smart_str( profile.email ) ) ] ) if profile.atsid: url = '/'.join( [ ATS_URI, 'candidate/%s/update/' ] ) r = urllib2.Request( url % ( profile.atsid ), data = data_encoded ) y = urllib2.urlopen( r ) #response = y.read() #y.close() #result = simplejson.loads( response ) else: url = '/'.join( [ ATS_URI, 'candidate/new/' ] ) r = urllib2.Request( url, data = data_encoded ) y = urllib2.urlopen( r ) response = y.read() y.close() result = simplejson.loads( response ) profile.atsid = result['candidate'] profile.save() return HttpResponseRedirect( '/jobs/' )
def export_xls_zip_all_classes(StudentAdmin, request, queryset): import openpyxl from django.utils.encoding import smart_str from openpyxl.utils import get_column_letter response = HttpResponse(content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename=journal.xlsx' wb = openpyxl.Workbook() wb.remove(wb.active) pos = 1 columns = [ (u"ID", 15), (u"ФИО", 20), (u"Класс", 20), (u"Оценка", 10), (u"Комментарий", 40), ] dv = DataValidation(type="decimal", operator="between", formula1=0, formula2=10, allow_blank=True) for parallel in Parallel.objects.all(): for group in parallel.class2_set.all(): isinstance(group, Class2) ws = wb.create_sheet(str(parallel) + str(group.name), pos) ws.protection.sheet = True ws.protection.password = '******' pos += 1 row_num = 0 for col_num in xrange(len(columns)): c = ws.cell(row=row_num + 1, column=col_num + 1) c.value = columns[col_num][0] #c.style.font.bold = True # set column width ws.column_dimensions[get_column_letter(col_num + 1)].width = columns[col_num][1] for student in group.student_set.all(): row_num += 1 row = [ student.pk, smart_str(student.fname + " " + student.lname + ' ' + student.fathers_name), smart_str(str(parallel.name) + str(group.name)), smart_str(''), smart_str(''), ] for col_num in xrange(len(row)): c = ws.cell(row=row_num + 1, column=col_num + 1) c.alignment = Alignment(wrap_text=True) lock = False if col_num + 1 <= 3: lock = True c.protection = Protection(locked=lock) c.value = row[col_num] response.write(save_virtual_workbook(wb)) return response
def download_file(file_name): path_to_file = EX_DIR + '\\candidateDetails\\' + file_name my_file = open(path_to_file, 'rb').read() response = HttpResponse(my_file, content_type = "text/html") response['Content-Disposition'] = 'attachment; filename=%s' % smart_str(file_name) response['X-Sendfile'] = smart_str(path_to_file) return response
def get_version_path(value, version_prefix): """ Construct the PATH to an Image version. Value has to be server-path, relative to MEDIA_ROOT. version_filename = filename + version_prefix + ext Returns a path relative to MEDIA_ROOT. """ if os.path.isfile(smart_str(os.path.join(MEDIA_ROOT, value))): path, filename = os.path.split(value) filename, ext = os.path.splitext(filename) # check if this file is a version of an other file # to return filename_<version>.ext instead of filename_<version>_<version>.ext tmp = filename.split("_") if tmp[len(tmp) - 1] in ADMIN_VERSIONS: # it seems like the "original" is actually a version of an other original # so we strip the suffix (aka. version_perfix) new_filename = filename.replace("_" + tmp[len(tmp) - 1], "") # check if the version exists when we use the new_filename if os.path.isfile(smart_str(os.path.join(MEDIA_ROOT, path, new_filename + "_" + version_prefix + ext))): # our "original" filename seem to be filename_<version> construct # so we replace it with the new_filename filename = new_filename # if a VERSIONS_BASEDIR is set we need to strip it from the path # or we get a <VERSIONS_BASEDIR>/<VERSIONS_BASEDIR>/... construct if VERSIONS_BASEDIR != "": path = path.replace(VERSIONS_BASEDIR + "/", "") version_filename = filename + "_" + version_prefix + ext return os.path.join(VERSIONS_BASEDIR, path, version_filename) else: return None
def print_entity_list(): with open(ENTITYJSONFILE) as f: j = json.load(f) for i,k in enumerate(j["targets"]): for alias in k["alias"]: row = u"%s|%s|%s|%s" % (k["target_id"], k["group"], k["entity_type"], alias) print smart_str(row)
def signin(): user = MySQLdb.escape_string(smart_str(request.form['Username'])) password = MySQLdb.escape_string(smart_str(request.form['Password'])) if sign_in(user, password): posts = fget_post_all(user) return render_template("home.html", posts=posts, Username=user) return render_template("main-page.html")
def RenderContents(self,as_rows = True): from django.db import connection, transaction from django.utils.encoding import smart_str cursor = connection.cursor() cursor.execute(self.report_query) query_set = today_var1 = cursor.fetchall() self.results = query_set formatted_row = [] formatted_set = [] row_count = 0 for row in query_set: row_count += 1 for_index = 0 for col in row: try: formatter = self.spec[for_index][2] if as_rows: formatted_row.append(smart_str(getattr(self, formatter)(col,self.organization))) else: formatted_set.append(smart_str(getattr(self, formatter)(col,self.organization))) for_index += 1 except: pass if as_rows: formatted_set.append(formatted_row) formatted_row = [] if as_rows: return formatted_set else: return formatted_set,row_count
def fun(): user = MySQLdb.escape_string(smart_str(request.form['user'])) unf = MySQLdb.escape_string(smart_str(request.form['unf'])) if unfollow(user, unf): posts = fget_post_all(user) return render_template("home.html", posts=posts, Username=user) return render_template("main-page.html")
def export_csv_vol(modeladmin, request, queryset): # instead of text/html, we render the response as a text/csv file response = HttpResponse(mimetype='text/csv') response['Content-Disposition'] = 'attachement; filename="volunteer.csv"' writer = csv.writer(response, csv.excel) # this will ensure that the encoding is utf8 so that excel can properly open the file response.write(u'\ufeff'.encode('utf8')) # these are the four fields that will be exported by django (first row) writer.writerow([ smart_str(u"Email"), smart_str(u"First Name"), smart_str(u"Last name"), smart_str(u"Start Date"), smart_str(u"Active") ]) # now we need to write every row that the Big C for obj in queryset: s = time.strptime(str(obj.start_date.month) + ' ' + str(obj.start_date.day) + ' ' + str(obj.start_date.year), "%m %d %Y") s = time.strftime("%m/%d/%Y", s) writer.writerow([ smart_str(obj.email), smart_str(obj.first_name), smart_str(obj.last_name), smart_str(s), smart_str(obj.is_active) ]) return response
def fpost(): post = MySQLdb.escape_string(smart_str(request.form['post_text'])) user = MySQLdb.escape_string(smart_str(request.form['user'])) lv = MySQLdb.escape_string(smart_str(request.form['lvalue'])) la = MySQLdb.escape_string(smart_str(request.form['ladr'])) pic = request.files['file'] if pic: ur = secure_filename(pic.filename) if '.' not in ur: ur = "." + ur if len(get_post_all(user)) > 0: ur = str(get_post_all(user)[-1][0] + 1) + ur else: ur = "1" + ur pic.save(os.path.join(app.config['UPLOAD_FOLDER'], ur)) ur = "pics/" + ur else: ur = "__empty__" if posting(user, MySQLdb.escape_string(post), ur): if la: if la[:7] != "http://": la = "http://" + la pi = int(get_post_all(user)[-1][0]) if lv: put_link(pi, la, lv) else: put_link(pi, la) session['user'] = user return redirect(url_for("hom"))
def serialize_model(self, instance): """ Given a model instance or dict, serialize it to a dict.. """ data = {} fields = self.get_fields(instance) # serialize each required field for fname in fields: try: if hasattr(self, smart_str(fname)): # check first for a method 'fname' on self first meth = getattr(self, fname) if inspect.ismethod(meth) and len(inspect.getargspec(meth)[0]) == 2: obj = meth(instance) elif hasattr(instance, '__contains__') and fname in instance: # check for a key 'fname' on the instance obj = instance[fname] elif hasattr(instance, smart_str(fname)): # finally check for an attribute 'fname' on the instance obj = getattr(instance, fname) else: continue key = self.serialize_key(fname) val = self.serialize_val(fname, obj) data[key] = val except _SkipField: pass return data
def build_message(self, contact): """ Build the email as a multipart message containing a multipart alternative for text (plain, HTML) plus all the attached files. """ content_html = self.build_email_content(contact) content_text = html2text(content_html) message = MIMEMultipart() message['Subject'] = self.build_title_content(contact) message['From'] = smart_str(self.newsletter.header_sender) message['Reply-to'] = smart_str(self.newsletter.header_reply) message['To'] = contact.mail_format() message_alt = MIMEMultipart('alternative') message_alt.attach(MIMEText(smart_str(content_text), 'plain', 'UTF-8')) message_alt.attach(MIMEText(smart_str(content_html), 'html', 'UTF-8')) message.attach(message_alt) for attachment in self.attachments: message.attach(attachment) for header, value in self.newsletter.server.custom_headers.items(): message[header] = value return message
def putRow(self, cluster, tableName, row, data): client = self.connectCluster(cluster) mutations = [] Mutation = get_thrift_type('Mutation') for column in data.keys(): mutations.append(Mutation(column=smart_str(column), value=smart_str(data[column]))) # must use str for API, does thrift coerce by itself? return client.mutateRow(tableName, smart_str(row), mutations, None)
def get_hexdigest(algorithm, salt, raw_password): """ Returns a string of the hexdigest of the given plaintext password and salt using the given algorithm ('md5', 'sha1' or 'crypt'). """ raw_password, salt = smart_str(raw_password), smart_str(salt) if algorithm == 'crypt': try: import crypt except ImportError: raise ValueError('"crypt" password algorithm not supported in this environment') return crypt.crypt(raw_password, salt) # The rest of the supported algorithms are supported by hashlib, but # hashlib is only available in Python 2.5. try: import hashlib except ImportError: if algorithm == 'md5': import md5 return md5.new(salt + raw_password).hexdigest() elif algorithm == 'sha1': import sha return sha.new(salt + raw_password).hexdigest() else: if algorithm == 'md5': return hashlib.md5(salt + raw_password).hexdigest() elif algorithm == 'sha1': return hashlib.sha1(salt + raw_password).hexdigest() raise ValueError("Got unknown password algorithm type in password.")
def get_revisions(title, csv_writer, lang, textcleaner, startid=None, prev_text=""): api_base = 'http://%s.wikipedia.org/w/api.php' % lang options = {} options.update({ 'action': 'query', 'prop': 'revisions', 'rvlimit': 500, 'titles': title, 'rvprop': 'ids|timestamp|content', 'rvdir': 'newer', 'format': 'json' }) if startid != None: options.update({ 'rvstartid': startid }) url = api_base + '?' + urllib.urlencode(options) logging.info(url) result = simplejson.load(urllib.urlopen(url)) pages = result["query"]["pages"] for page in pages: revs = pages[page]["revisions"] for r in revs: text_cleaned = textcleaner.clean_all(r["*"]) text = smart_str(_diff_text(prev_text, text_cleaned)[0]) csv_writer.writerow([r["timestamp"], lang, smart_str(title), "", text]) prev_text = text_cleaned try: cont = result['query-continue']['revisions']['rvstartid'] logging.info("Continue to %d", cont) get_revisions(title, csv_writer, lang, cont, prev_text) except KeyError: logging.info("Finished!")
def render(self, context): try: source = self.src.resolve(context) except VariableDoesNotExist: return None if self.version_prefix: version_prefix = self.version_prefix else: try: version_prefix = self.version_prefix_var.resolve(context) except VariableDoesNotExist: return None try: source = force_unicode(source) version_path = get_version_path(url_to_path(source), version_prefix) if not os.path.isfile(smart_str(os.path.join(MEDIA_ROOT, version_path))): # create version version_path = version_generator(url_to_path(source), version_prefix) elif os.path.getmtime(smart_str(os.path.join(MEDIA_ROOT, url_to_path(source)))) > os.path.getmtime(smart_str(os.path.join(MEDIA_ROOT, version_path))): # recreate version if original image was updated version_path = version_generator(url_to_path(source), version_prefix, force=True) context[self.var_name] = FileObject(version_path) except: context[self.var_name] = "" return ''
def _upload_file(request): """ Upload file to the server. """ from django.core.files.move import file_move_safe if request.method == 'POST': folder = request.POST.get('folder') fb_uploadurl_re = re.compile(r'^.*(%s)' % reverse("fb_upload")) folder = fb_uploadurl_re.sub('', folder) abs_path = _check_access(request, folder) if request.FILES: filedata = request.FILES['Filedata'] filedata.name = convert_filename(filedata.name) _check_access(request, abs_path, filedata.name) # PRE UPLOAD SIGNAL filebrowser_pre_upload.send(sender=request, path=request.POST.get('folder'), file=filedata) # HANDLE UPLOAD uploadedfile = handle_file_upload(abs_path, filedata) # MOVE UPLOADED FILE # if file already exists if os.path.isfile(smart_str(os.path.join(fb_settings.MEDIA_ROOT, fb_settings.DIRECTORY, folder, filedata.name))): old_file = smart_str(os.path.join(abs_path, filedata.name)) new_file = smart_str(os.path.join(abs_path, uploadedfile)) file_move_safe(new_file, old_file) # POST UPLOAD SIGNAL filebrowser_post_upload.send(sender=request, path=request.POST.get('folder'), file=FileObject(smart_str(os.path.join(fb_settings.DIRECTORY, folder, filedata.name)))) return HttpResponse('True')
def render(self, context): link = '#' try: path = context['request'].META['PATH_INFO'] view, args, kwargs = resolve(path) filter_value = self.filter_value.resolve(context, True) if filter_value: filter_name = smart_str(self.filter_name) filter_value = smart_str(filter_value) kwargs[filter_name] = filter_value # These two don't make sense if filter_name == 'server' and 'hostname' in kwargs: del kwargs['hostname'] elif filter_name == 'hostname' and 'server' in kwargs: del kwargs['server'] try: link = reverse(view, args=args, kwargs=kwargs) except NoReverseMatch: link = reverse(self.fallback_view, args=None, kwargs={filter_name: filter_value}) qs = context['request'].GET.urlencode() if qs: link += "?" + qs except NoReverseMatch: rm = sys.exc_info()[1] raise rm except (Resolver404, ValueError): pass return link
def get_hexdigest(algorithm, salt, raw_password): raw_password, salt = smart_str(raw_password), smart_str(salt) if algorithm == 'md5': return md5_constructor(salt + raw_password).hexdigest() elif algorithm == 'sha1': return sha_constructor(salt + raw_password).hexdigest() raise ValueError('Got unknown password algorithm type in password')
def moderate(self, comment, content_object, request): """Need to pass Akismet test""" if not AKISMET_COMMENT: return False try: from akismet import Akismet from akismet import APIKeyError except ImportError: return False akismet = Akismet(key=AKISMET_API_KEY, blog_url='%s://%s/' % ( PROTOCOL, Site.objects.get_current().domain)) if akismet.verify_key(): akismet_data = { 'user_ip': request.META.get('REMOTE_ADDR', ''), 'user_agent': request.META.get('HTTP_USER_AGENT', ''), 'referrer': request.META.get('HTTP_REFERER', 'unknown'), 'permalink': content_object.get_absolute_url(), 'comment_type': 'comment', 'comment_author': smart_str(comment.userinfo.get('name', '')), 'comment_author_email': smart_str(comment.userinfo.get('email', '')), 'comment_author_url': smart_str(comment.userinfo.get('url', '')), } is_spam = akismet.comment_check(smart_str(comment.comment), data=akismet_data, build_data=True) if is_spam: comment.save() user = comment.content_object.authors.all()[0] comment.flags.create(user=user, flag='spam') return is_spam raise APIKeyError("Your Akismet API key is invalid.")