def form_valid(self, form): image = form.cleaned_data.get('image') user = self.request.user button = self.request.POST.get('button') if button == 'like': _, created = Like.objects.get_or_create(image=image, user=user) if not created: try: Like.objects.get(image=image, user=user).delete() except ObjectDoesNotExist: pass elif button == 'comment' and form.cleaned_data['comment']: saved = form.save(commit=False) saved.user = user saved.save() if get_attribute(image, 'user.email') and image.user != user\ and get_attribute(image, 'user.member.notify'): EmailMessage( subject='New picture comment', body=render_to_string( 'gallery/notify.html', { 'image': image, 'from_user': user, 'domain': get_current_site(self.request).domain, }), to=[image.user.email]).send() self.success_url += f'/#{image.id}' return super().form_valid(form)
def _read_manifest(self): """ internal function. Reads the manifest file of a COMBINE Archive """ try: with self._zip.open(self.MANIFEST_LOCATION) as manifest_file: manifest = ElementTree.fromstring(manifest_file.read()) except KeyError: # manifest does not exists, probably an empty/new archive return False except ElementTree.ParseError as e: raise exceptions.CombineArchiveException( 'Cannot parse xml manifest. {}'.format(e.msg)) # check for correct root element and namespace if manifest.tag != utils.extend_tag_name(_XML_ROOT_ELEM, _XML_NS): raise exceptions.CombineArchiveException( 'manifest has no valid omex root element') # check entries for entry in manifest.findall(_XML_CONTENT_TAG, _XML_NS): try: location = utils.get_attribute(entry, _XML_CONTENT_LOCATION, _XML_NS) entry_format = utils.check_format(utils.get_attribute( entry, _XML_CONTENT_FORMAT, _XML_NS), convert=False) master = True if entry.attrib.get(_XML_CONTENT_MASTER, False) in ('True', 'true', True) else False except KeyError: raise exceptions.CombineArchiveException( 'location and format field are required. Corrupt manifest.xml' ) # clean location location = utils.clean_pathname(location) # check if file is in zip, if it's not the root element zipinfo = None if location not in self.ARCHIVE_REFERENCE: try: zipinfo = self._zip.getinfo(location) except KeyError: raise exceptions.CombineArchiveException( '{location} is specified by the manifest, but not contained by the ZIP file' .format(location=location)) archive_entry = ArchiveEntry(location, format=entry_format, master=master, archive=self, zipinfo=zipinfo) self.entries[location] = archive_entry
def _read_metadata(self): # go over all possible metdata files for meta_file in self.filter_format(_XML_CONTENT_METADATA_TYPE): try: # parse the xml meta = ElementTree.fromstring(meta_file.read()) except ElementTree.ParseError as e: raise exceptions.CombineArchiveException( 'Cannot parse xml metadata {file}. {msg}'.format(file=meta_file.location, msg=e.msg)) # find every rdf:Description for description in meta.findall(metadata.Namespace.rdf_terms.description, _XML_NS): try: about_url = urlparse(utils.get_attribute(description, metadata.Namespace.rdf_terms.about, _XML_NS)) about_str = about_url.path fragment_str = about_url.fragment except KeyError: raise exceptions.CombineArchiveException('A metadata description tag has to have an about field') if about_str in self.ARCHIVE_REFERENCE: # meta data is about the archive (root element) about = self else: # meta data is about normal file about = self.get_entry(about_str) # start parsing try: data = metadata.OmexMetaDataObject(xml_element=description) except ValueError as e: data = metadata.DefaultMetaDataObject(xml_element=description) about.add_description(data, fragment=fragment_str)
def run_method(self, vm, method, method_descriptor): native_method = None # handle native methods if (method.access_flags & ACC_NATIVE) != 0: native_method = Class.fetch_native_method(self.name, method) code = get_attribute(method, 'Code') # may contain an instance argument (not STATIC is_static = method.access_flags & ACC_STATIC != 0 num_args = len(method.parameters) + (0 if is_static else 1) arguments = [vm.frame_stack[-1].pop() for i in xrange(num_args)][::-1] for argument in arguments: assert argument is null or isinstance(argument, (int, long, ClassInstance, float)), 'not valid type '+str(type(argument)) if not is_static: instance = arguments[0] assert instance is not null, '%s is null' % str(arguments[0]) if method.name in instance._klass.method_overrides: native_method = instance._klass.method_overrides[method.name] print 'adding method %s.%s to stack' % (self.name, method.name) print 'with arguments %s' % repr(arguments) frame = Frame( parameters=arguments, max_stack=code.max_stack, max_locals=code.max_locals, code=code, method=method, native_method=native_method, klass=self) vm.frame_stack.append(frame)
def run_method(self, vm, method, method_descriptor): native_method = None # handle native methods if (method.access_flags & ACC_NATIVE) != 0: native_method = Class.fetch_native_method(self.name, method) code = get_attribute(method, 'Code') # may contain an instance argument (not STATIC is_static = method.access_flags & ACC_STATIC != 0 num_args = len(method.parameters) + (0 if is_static else 1) arguments = [vm.frame_stack[-1].pop() for i in xrange(num_args)][::-1] for argument in arguments: assert argument is null or isinstance( argument, (int, long, ClassInstance, float)), 'not valid type ' + str(type(argument)) if not is_static: instance = arguments[0] assert instance is not null, '%s is null' % str(arguments[0]) if method.name in instance._klass.method_overrides: native_method = instance._klass.method_overrides[method.name] print 'adding method %s.%s to stack' % (self.name, method.name) print 'with arguments %s' % repr(arguments) frame = Frame(parameters=arguments, max_stack=code.max_stack, max_locals=code.max_locals, code=code, method=method, native_method=native_method, klass=self) vm.frame_stack.append(frame)
def post(self, request): # Check password first password = request.POST.get('password') if not request.user.check_password(password): messages.warning(request, "Invalid password") return self.get(request) # Delete profile if request.POST.get('settings') == 'delete': user = request.user logout(request) user.delete() messages.success(request, "Your profile has been deleted") return redirect(SigninView.success_url) # Update notification setting notify_form = NotifyForm(request.POST) notify = get_attribute(request, 'user.member.notify') if notify_form.is_valid(): notify_setting = notify_form.cleaned_data.get('notify') if notify is not None and notify != notify_setting: notify = notify_setting request.user.member.notify = notify request.user.member.save() # Update all other settings if request.POST.get('username') == request.user.username: POST = request.POST.copy() POST['username'] = '' settings_form = SettingsForm(POST) else: settings_form = SettingsForm(request.POST) if not settings_form.is_valid(): return render(request, self.template_name, { 'settings_form': settings_form, 'notify': notify }) try: self.set_new_password(request, settings_form) except ValidationError as errors: settings_form.add_error('new_password', errors) return render(request, self.template_name, { 'settings_form': settings_form, 'notify': notify }) for attribute in ('username', 'email'): new_value = settings_form.cleaned_data.get(attribute) if new_value: setattr(request.user, attribute, new_value) request.user.save() messages.success(request, "You've successfully updated your profile") return render(request, self.template_name, { 'settings_form': settings_form, 'notify': notify })
def _read_manifest(self): """ internal function. Reads the manifest file of a COMBINE Archive """ try: with self._zip.open(self.MANIFEST_LOCATION) as manifest_file: manifest = ElementTree.fromstring(manifest_file.read()) except KeyError: # manifest does not exists, probably an empty/new archive return False except ElementTree.ParseError as e: raise exceptions.CombineArchiveException('Cannot parse xml manifest. {}'.format(e.msg)) # check for correct root element and namespace if manifest.tag != utils.extend_tag_name(_XML_ROOT_ELEM, _XML_NS): raise exceptions.CombineArchiveException('manifest has no valid omex root element') # check entries for entry in manifest.findall(_XML_CONTENT_TAG, _XML_NS): try: location = utils.get_attribute(entry, _XML_CONTENT_LOCATION, _XML_NS) entry_format = utils.check_format(utils.get_attribute(entry, _XML_CONTENT_FORMAT, _XML_NS), convert=False) master = True if entry.attrib.get(_XML_CONTENT_MASTER, False) in ('True', 'true', True) else False except KeyError: raise exceptions.CombineArchiveException('location and format field are required. Corrupt manifest.xml') # clean location location = utils.clean_pathname(location) # check if file is in zip, if it's not the root element zipinfo = None if location not in self.ARCHIVE_REFERENCE: try: zipinfo = self._zip.getinfo(location) except KeyError: raise exceptions.CombineArchiveException( '{location} is specified by the manifest, but not contained by the ZIP file'.format(location=location)) archive_entry = ArchiveEntry(location, format=entry_format, master=master, archive=self, zipinfo=zipinfo) self.entries[location] = archive_entry
def _read_metadata(self): # go over all possible metdata files for meta_file in self.filter_format(_XML_CONTENT_METADATA_TYPE): try: # parse the xml meta = ElementTree.fromstring(meta_file.read()) except ElementTree.ParseError as e: raise exceptions.CombineArchiveException( 'Cannot parse xml metadata {file}. {msg}'.format( file=meta_file.location, msg=e.msg)) # find every rdf:Description for description in meta.findall( metadata.Namespace.rdf_terms.description, _XML_NS): try: about_url = urlparse( utils.get_attribute(description, metadata.Namespace.rdf_terms.about, _XML_NS)) about_str = about_url.path fragment_str = about_url.fragment except KeyError: raise exceptions.CombineArchiveException( 'A metadata description tag has to have an about field' ) if about_str in self.ARCHIVE_REFERENCE: # meta data is about the archive (root element) about = self else: # meta data is about normal file about = self.get_entry(about_str) # start parsing try: data = metadata.OmexMetaDataObject(xml_element=description) except ValueError as e: data = metadata.DefaultMetaDataObject( xml_element=description) about.add_description(data, fragment=fragment_str)
def handle_exception(self): frame = self.frame_stack[-1] raised_exception = frame.raised_exception if raised_exception is None: return # unroll the stack to build this eception found = False while self.frame_stack: frame = self.frame_stack[-1] try: exceptions = get_attribute(frame.method, 'CodeAttribute').exceptions except NoSuchAttributeError: exceptions = [] for start_pc, end_pc, jump_pc, thrown_class in exceptions: if start_pc <= frame.pc < end_pc: caught = False if thrown_class == 0: caught = True else: klass_name = frame.klass.constant_pool.get_class(thrown_class) resolved_thrown_class = self.load_class(klass_name) caught = resolved_thrown_class.is_subclass(raised_exception) if caught: frame.push(raised_exception) frame.raised_exception = None frame.pc = jump_pc found = True break if found: break self.frame_stack.pop() if not found: print 'Unable to handle exception %s' % raised_exception #print raised_exception.stacktrace sys.exit(1) print self.frame_stack
def get_values(self, points, calculationtype=None, starttime='*-1d', endtime='*', boundary=None, boundarytype=None, maxcount='1000', desiredunits=None, interval=None, intervals='24', retrievalmode='Auto', summarytype='Total', calculationbasis='TimeWeighted', timetype='Auto', summaryduration=None, sampletype='ExpressionRecordedValues', sampleinterval=None, time=None, filterexpression=None, includefilteredvalues=False, sortorder='Ascending', append=False, overwrite=False): # starttime is Time String # endtime is Time String # interval is AFTimeSpan # desiredUnits is a uom, cannot be specified for PI points # fiterexpression is filtering like * or SINU* # includefilteredvalues bool: Specify 'true' to indicate that values which fail the filter criteria are present in the returned data at the times where they occurred with a value set to a 'Filtered' enumeration value with bad status. Repeated consecutive failures are omitted. # sortorder default is 'Ascending' # summaryDuration The duration of each summary interval. If specified in hours, minutes, seconds, or milliseconds, the summary durations will be evenly spaced UTC time intervals. Longer interval types are interpreted using wall clock rules and are time zone dependent. # TODO: add starttime parameter # TODO: add endtime parameter # TODO: add boundary parameter # TODO: add interval parameter calctype = calculationtype.lower() is_single_value = True if calctype == 'current' or calctype == 'end' \ else False log.debug('Calculation type: %s, Single value: %s', calctype, is_single_value) for point in iterfy(points): log.debug('Retrieving %s data for %s...', calctype, point.name) if calctype == 'current': payload = {'time': time} elif calctype == 'interpolated': payload = {'startTime': starttime, 'endTime': endtime, 'interval': interval, 'filterExpression': filterexpression, 'includeFilteredValues': includefilteredvalues} elif calctype == 'interpolatedattimes': payload = {'time': time, 'filterExpression': filterexpression, 'includeFilteredValues': includefilteredvalues, 'sortOrder': sortorder} elif calctype == 'recorded': payload = {'startTime': starttime, 'endTime': endtime, 'boundaryType': boundarytype, 'filterExpression': filterexpression, 'includeFilteredValues': includefilteredvalues, 'maxCount': maxcount} elif calctype == 'recordedattime': payload = {'time': time, 'retrievalMode': retrievalmode} elif calctype == 'plot': payload = {'startTime': starttime, 'endTime': endtime, 'intervals': intervals} elif calctype == 'summary': payload = {'startTime': starttime, 'endTime': endtime, 'summaryType': summarytype, 'calculationBasis': calculationbasis, 'timeType': timetype, 'summaryDuration': summaryduration, 'sampleType': sampletype, 'sampleInterval': sampleinterval, 'filterExpression': filterexpression} elif calctype == 'end': payload = {} else: log.debug('This %s request has no URL parameters', calctype) endpoint = get_endpoint(self.url, point, calctype) # TODO: add queryParamater generator function here? try: log.debug('Instantiating %s request for PI point %s to ' 'endpoint %s with the following parameters: %s', calctype, point.name, endpoint, payload) r = self.session.get(endpoint, params=payload) if r.status_code != requests.codes.ok: r.raise_for_status() except OSIsoftPyException as e: log.error('Exception while retrieving recorded values' 'from %s for %s. Raw JSON: %s', endpoint, point.name, exc_info=True) data = r.json() log.debug('HTTP %s - Instantiating OSIsoftPy.Values()', r.status_code) log.debug('Staging PI point value for ' 'instantiation...') try: new_values = get_point_values(point, calctype, data) log.debug('%s %s value(s) were instantiated for %s.', get_count(new_values), calctype, point.name) except OSIsoftPyException as e: log.error('Exception while instantiating PI Point value(s)' 'for %s. Raw JSON: %s', point.name, data, exc_info=True) current_values = TypedList(validtypes=Value) if is_single_value: try: value = getattr(point, get_attribute(calctype)) log.debug('Storing %s value.', calctype) current_values.append(value) except TypeError as e: log.warning('TypeError encountered - the attribute %s is ' 'empty for %s, which will raise an ' 'exception when trying to iterate.', get_attribute(calctype), point.name, exc_info=False) else: try: for value in getattr(point, get_attribute(calctype)): log.debug( 'Storing %s value for PI point %s, attribute: %s', calctype, point.name, get_attribute(calctype)) current_values.append(value) except TypeError as e: log.warning('TypeError encountered - the attribute %s is ' 'empty for %s, which will raise an ' 'exception when trying to iterate.', get_attribute(calctype), point.name, exc_info=False) log.debug('PI point %s currently has %s %s values.', point.name, get_count(current_values), calctype) if is_single_value and overwrite: log.debug('Single point value - overwriting existing %s ' 'value, Single value: %s.', calctype, is_single_value) setattr(point, get_attribute(calctype), new_values[0]) elif is_single_value and append: log.debug('Single point value - append is true but cannot ' 'append...overwriting existing %s ' 'value, Single value: %s.', calctype, is_single_value) setattr(point, get_attribute(calctype), new_values[0]) elif not is_single_value and overwrite: log.debug('Multiple point values - overwriting %s existing ' '%s values, Single value: %s.', get_count(current_values), calctype, is_single_value) setattr(point, get_attribute(calctype), new_values) elif not is_single_value and append: for new_value in new_values: current_values.append(new_value) setattr(point, get_attribute(calctype), current_values) else: # TODO: allow both to be false if no data exists. log.error('Error saving %s new %s point value(s) for PI ' 'point %s. Single value: %s, Overwrite: %s, Append: ' '%s.', get_count(new_values), calctype, point.name, is_single_value, overwrite, append) return points
def get(self, request): context = { 'settings_form': SettingsForm(), 'notify': get_attribute(request, 'user.member.notify'), } return render(request, self.template_name, context)
else: hdfs_file_path = "/user/lsde02/data/*/*.gz" forced_partitions = 1500 hdfs_results_path = "/user/lsde02/results/" start_time = time.strftime("%Y-%m-%d-%H-%M-%S") print "Started processing: %s" % hdfs_file_path sc = SparkContext() context = sc.textFile(hdfs_file_path, forced_partitions) stations = context.flatMap( lambda x: [utils.extract(record) for record in x.splitlines()]) #stations = stations.filter(lambda x: 'fixed-weather-station' in x[1] or ) # Do computations on month level month_data = stations.map(lambda x:((x[0][0], x[0][1], x[0][3]), (utils.get_attribute(x[1], 'temp'), utils.get_attribute(x[1], 'windspeed'), \ utils.get_attribute(x[1], 'sky-condition'), utils.get_attribute(x[1], 'visibility'), utils.get_attribute(x[1], 'wind-direction'), \ utils.get_attribute(x[1], 'latitude'), utils.get_attribute(x[1], 'longitude')))) month_data = month_data.combineByKey(lambda value: (value[0] if value[0] != None else 0, 1 if value[0] != None else 0,\ value[1] if value[1] != None else 0, 1 if value[1] != None else 0, \ value[2] if value[2] != None else 0, 1 if value[2] != None else 0, \ value[3] if value[3] != None else 0, 1 if value[3] != None else 0, \ math.sin(value[4]*math.pi/180.0) if value[4] != None else 0, \ math.cos(value[4]*math.pi/180.0) if value[4] != None else 0, \ value[0]*value[0] if value[0] != None else 0, \ value[1]*value[1] if value[1] != None else 0, \ value[2]*value[2] if value[2] != None else 0, \ value[3]*value[3] if value[3] != None else 0, \ value[0] if value[0] != None else sys.maxint, \ value[1] if value[1] != None else sys.maxint, \ value[2] if value[2] != None else sys.maxint, \
forced_partitions = (int(sys.argv[2])+1-int(sys.argv[1]))*12 else: hdfs_file_path = "/user/lsde02/data/*/*.gz" forced_partitions = 1500 hdfs_results_path = "/user/lsde02/results/" start_time = time.strftime("%Y-%m-%d-%H-%M-%S") print "Started processing: %s" % hdfs_file_path sc = SparkContext() context = sc.textFile(hdfs_file_path, forced_partitions) stations = context.flatMap(lambda x: [utils.extract(record) for record in x.splitlines()]) #stations = stations.filter(lambda x: 'fixed-weather-station' in x[1] or ) # Do computations on month level month_data = stations.map(lambda x:((x[0][0], x[0][1], x[0][3]), (utils.get_attribute(x[1], 'temp'), utils.get_attribute(x[1], 'windspeed'), \ utils.get_attribute(x[1], 'sky-condition'), utils.get_attribute(x[1], 'visibility'), utils.get_attribute(x[1], 'wind-direction'), \ utils.get_attribute(x[1], 'latitude'), utils.get_attribute(x[1], 'longitude')))) month_data = month_data.combineByKey(lambda value: (value[0] if value[0] != None else 0, 1 if value[0] != None else 0,\ value[1] if value[1] != None else 0, 1 if value[1] != None else 0, \ value[2] if value[2] != None else 0, 1 if value[2] != None else 0, \ value[3] if value[3] != None else 0, 1 if value[3] != None else 0, \ math.sin(value[4]*math.pi/180.0) if value[4] != None else 0, \ math.cos(value[4]*math.pi/180.0) if value[4] != None else 0, \ value[0]*value[0] if value[0] != None else 0, \ value[1]*value[1] if value[1] != None else 0, \ value[2]*value[2] if value[2] != None else 0, \ value[3]*value[3] if value[3] != None else 0, \ value[0] if value[0] != None else sys.maxint, \ value[1] if value[1] != None else sys.maxint, \ value[2] if value[2] != None else sys.maxint, \