def handle(self, *args, **kwargs): for screenshot in models.Screenshot.load(): print screenshot.name img = Image.open(staticfiles_storage.path(screenshot.path)) # Ensure we're in RGB if img.mode not in ('L', 'RGB'): img = img.convert('RGB') # Magic! # The thumbnail is only limited by width, so use # a larger-than-needed height. img.thumbnail((200 * screenshot.screens, 1000), Image.ANTIALIAS) # Save the thumbnail to a tmpfile fd, tmp = tempfile.mkstemp() file = os.fdopen(fd, 'w+b') type = mimetypes.guess_type(screenshot.name)[0].split('/')[1] img.save(file, type) file.close() # Nuke previous version if it exists if staticfiles_storage.exists(screenshot.thumbnail): staticfiles_storage.delete(screenshot.thumbnail) # save thumbnail to stattic dir file = File(open(tmp, 'rb')) staticfiles_storage.save(screenshot.thumbnail, file) file.close() os.unlink(tmp)
def testCollectStaticStandaloneBuildProfile(self): shutil.copyfile( os.path.join(self.test_resources_dir, self.test_standalone_build_profile), os.path.join(WORKING_DIR, "js", "main.build.js"), ) with self.settings(REQUIRE_ENVIRONMENT=self.require_environment): call_command("collectstatic", interactive=False, verbosity=0) self.assertTrue(os.path.exists(staticfiles_storage.path("js/main-built.js")))
def _render(self, path, val, caller): if val == 'email_embedded_media': fullpath = default_storage.path(path) elif val == 'email_embedded_static': fullpath = staticfiles_storage.path(path) else: fullpath = path return 'cid:' + self.environment.email_object_instance.attach_related_file(fullpath)
def full_path(path): try: from django.contrib.staticfiles.storage import staticfiles_storage return staticfiles_storage.path(path) except ImportError: from django.conf import settings import os return os.path.join(settings.MEDIA_ROOT, path)
def find(self, path, all=False): if not env.is_collecting and env.output and staticfiles_storage.exists(path): absolute_path = staticfiles_storage.path(path) if all: return [absolute_path] else: return absolute_path return []
def testCollectStaticNoBuildProfile(self): shutil.copyfile( os.path.join(self.test_resources_dir, self.test_standalone_build_profile), os.path.join(WORKING_DIR, "js", "main.build.js"), ) contents = """ function test(){ // dont uglify this }; """ with open(os.path.join(WORKING_DIR, 'dontcompress.js'), 'w') as f: f.write(contents) with self.settings(REQUIRE_ENVIRONMENT=self.require_environment): call_command("collectstatic", interactive=False, verbosity=0) self.assertTrue(os.path.exists(staticfiles_storage.path("js/main-built.js"))) with open(staticfiles_storage.path('dontcompress.js')) as f: self.assertEqual(f.read(), contents)
def get_file_from_storage(self, filename): try: filename = staticfiles_storage.path(filename) except NotImplementedError: # remote storages don't implement path pass if staticfiles_storage.exists(filename): return filename, staticfiles_storage else: return None, None
def _test_compiler(self, compiler_cls_str, infile, expected): compiler_cls = to_class(compiler_cls_str) compiler = compiler_cls(verbose=False, storage=staticfiles_storage) infile_path = staticfiles_storage.path(infile) outfile_path = compiler.output_path(infile_path, compiler.output_extension) compiler.compile_file(_(infile_path), _(outfile_path), force=True) with open(outfile_path) as f: result = f.read() with staticfiles_storage.open(expected) as f: expected = f.read() self.assertEqual(smart_bytes(result), expected)
def email_embedded_static(context, path): """ use this tag with EmailMultiRelated class, tag attach inline image in email and return src attribute value Example: <img src="{% email_inline_static "django/email/header.png" %}" width="780" height="11" alt=""/> """ email = context.get('emailmultirelated_object') if isinstance(email, EmailMultiRelated): return 'cid:' + email.attach_related_file(staticfiles_storage.path(path)) return staticfiles_storage.url(path)
def render(self, context): rendered_contents = self.nodelist.render(context) css = '' for expression in self.filter_expressions: path = expression.resolve(context, True) if path is not None: path = smart_unicode(path) expanded_path = staticfiles_storage.path(path) with open(expanded_path) as css_file: css = ''.join((css, css_file.read())) engine = conf.get_engine()(html=rendered_contents, css=css) return engine.render()
def render(self, context): rendered_contents = self.nodelist.render(context) css = '' for expression in self.filter_expressions: path = expression.resolve(context, True) if path is not None: path = smart_unicode(path) if settings.DEBUG or getattr(settings, 'INLINECSS_USE_FINDERS', False): expanded_path = finders.find(path) else: expanded_path = staticfiles_storage.path(path) with open(expanded_path) as css_file: css = ''.join((css, css_file.read())) engine = conf.get_engine()(html=rendered_contents, css=css) return engine.render()
def send_html_email(subject, html_content, sender, to_addresses, images): text_content = html2text.html2text(html_content) msg = EmailMultiAlternatives(subject, text_content, sender, to_addresses) msg.attach_alternative(html_content, "text/html") msg.mixed_subtype = 'related' for f in images: fp = open(staticfiles_storage.path(f), 'rb') msg_img = MIMEImage(fp.read()) fp.close() name = os.path.basename(f) msg_img.add_header('Content-ID', '<{}>'.format(name)) msg_img.add_header('Content-Disposition', 'attachment', filename=name) msg.attach(msg_img) msg.send()
def get_file_contents(path): ''' Locate the specified static file using Django's staticfiles finders, raising an exception if appropriate. Read in and return the file contents. ''' if settings.DEBUG: expanded_path = finders.find(path) else: expanded_path = staticfiles_storage.path(path) if not exists(expanded_path): if SIMPLEINLINER_RAISE_EXCEPTIONS: raise SimpleInlinerException("The supplied static file path, " "'{0}', could not be found.".format( path )) return '' with open(expanded_path) as static_file: contents = static_file.read() return contents
def staticintegrity(context, name): """Hash a local static file for subresource integrity""" if settings.DEBUG: # In DEBUG, static files are scattered around and need to be found. path = finders.find(name) else: # Otherwise, we can just look in the static root if hasattr(staticfiles_storage, 'hashed_files'): name = staticfiles_storage.hashed_files.get(name, name) path = staticfiles_storage.path(name) key = 'staticintegrity-%s' % path out = cache.get(key) if out is None: with open(path, 'rb') as content_file: out = _hash_content(content_file.read()) cache.set(key, out, STATICINTEGRITY_CACHE_TTL) return out
def render(self, context): rendered_contents = self.nodelist.render(context) css = "" for expression in self.filter_expressions: path = expression.resolve(context, True) if path is not None: path = smart_unicode(path) try: if settings.DEBUG: expanded_path = finders.find(path) else: expanded_path = staticfiles_storage.path(path) with open(expanded_path) as css_file: css = "".join((css, css_file.read())) except NotImplementedError: css = "".join((css, staticfiles_storage.open(path).read())) engine = conf.get_engine()(html=rendered_contents, css=css) return engine.render()
def get_data(self): out = StringIO() for x in self.media: if not x.startswith(staticfiles_storage.base_url): raise CollectionException("Collection contains a non static file.") path = x[len(staticfiles_storage.base_url) :] filename = staticfiles_storage.path(path) with open(filename, "rb") as fp: data = force_str(fp.read()) if self.type == "link" and self.attrs.get("type") == "text/css": data = UrlsNormalizer().normalize(data, os.path.dirname(path)) elif self.type == "script": data = "(function() {\n%s\n})();" % data out.write(data) out.write("\n") return out.getvalue()
def load_staticfile(name, postprocessor=None, fail_silently=False): if not hasattr(load_staticfile, '_cache'): load_staticfile._cache = {} if postprocessor: cache_key = '{0}:{1}.{2}'.format( name, postprocessor.__module__, postprocessor.__name__) else: cache_key = name if cache_key in load_staticfile._cache: return load_staticfile._cache[cache_key] if settings.DEBUG: # Dont access file via staticfile storage in debug mode. Not available # without collectstatic management command. path = find(name) elif staticfiles_storage.exists(name): # get path if target file exists. path = staticfiles_storage.path(name) else: path = None if not path: if not fail_silently: raise ValueError('Staticfile not found for inlining: {0}'.format(name)) return '' with open(path, 'r') as staticfile: content = staticfile.read() if postprocessor: content = postprocessor(name, path, content) if not settings.DEBUG: load_staticfile._cache[cache_key] = content return content
def load_geojson_for_level(self, level): fname = self.geometry_files.get(level, self.geometry_files.get('')) if not fname: return None, None # we have to have geojson name, ext = os.path.splitext(fname) if ext != '.geojson': fname = name + '.geojson' fname = staticfiles_storage.path(fname) # try load it try: with open(fname, 'r') as f: return fname, json.load(f) except IOError as e: if e.errno == 2: log.warn("Couldn't open geometry file %s -- no geometry will be available for level %s" % (fname, level)) else: raise e return None, None
def render_individual_css(self, package, paths, **kwargs): html = ''.join( self.expand_urls(path, open(staticfiles_storage.path(path), 'r').read()) for path in paths ) return mark_safe('<style type="text/css">' + html + '</style>')
def frame(request, slug, status): event = get_object_or_404(Event, slug=slug) statuses = [] if status == TeamStatus.labels[TeamStatus.SCHOOL_FINAL]: statuses.append(TeamStatus.SCHOOL_FINAL) elif status == TeamStatus.labels[TeamStatus.FINAL]: statuses.append(TeamStatus.INVITED) statuses.append(TeamStatus.FINAL) else: for s in ( TeamStatus.INVITED, TeamStatus.FINAL, TeamStatus.SCHOOL_FINAL, TeamStatus.BSU_SEMIFINAL, TeamStatus.SCHOOL_SEMIFINAL, TeamStatus.SEMIFINAL, TeamStatus.QUARTERFINAL, TeamStatus.QUALIFICATION, TeamStatus.EDITING, TeamStatus.DISQUALIFIED, ): statuses.append(s) if TeamStatus.labels[s] == status: break codeforces_resource = Resource.objects.get(host='codeforces.com') teams = Team.objects.filter(event=event, status__in=statuses).order_by('-created') teams = teams.prefetch_related( 'participants__coder__user', 'participants__organization', Prefetch( 'participants__coder__account_set', queryset=Account.objects.filter(resource=codeforces_resource), ), 'participants__coder__account_set__resource', ) teams = teams.select_related( 'author__coder__user', 'author__organization', 'coach__coder__user', 'coach__organization', ) countries = Counter(t.country for t in teams) base_css_path = staticfiles_storage.path('css/base.css') with open(base_css_path, 'r') as fo: base_css = fo.read() return render( request, 'frame-team.html', { 'teams': teams, 'countries': countries.most_common(), 'team_status': TeamStatus, 'base_css': base_css, 'codeforces_resource': codeforces_resource, }, )
def pred(patients): age = int(patients.age) sex = patients.gender cp = patients.cp trestbps = int(patients.trestbps) chol = int(patients.chol) fbs = patients.fbs restecg = patients.restecg thalach = int(patients.thalach) exang = int(patients.exang) oldpeak = float(patients.oldpeak) slope = patients.slope ca = int(patients.ca) thal = patients.thal if cp == "Atypical Angina": newcp = 2 elif cp == "Non-anginal pain": newcp = 3 elif cp == "Typical Angina": newcp = 1 else: newcp = 4 if sex == "Male": nsex = 1 else: nsex = 0 if fbs == "greater than 120": newfbs = 1 else: newfbs = 0 if restecg == "Normal": newrestecg = 0 elif restecg == "having ST-T wave abnormality": newrestecg = 1 else: newrestecg = 2 if slope == "Upsloping": newslope = 1 elif slope == "flat": newslope = 2 else: newslope = 3 if thal == "Normal": newthal = 3 elif thal == "Fixed Defect": newthal = 6 else: newthal = 7 ndata = [ age, nsex, newcp, trestbps, chol, newfbs, newrestecg, thalach, exang, oldpeak, newslope, ca, newthal ] print(ndata) tdata = np.matrix(ndata, dtype=np.float32) print(tdata) url = staticfiles_storage.path('model/rtree.xml') rtree = cv.ml.RTrees_load(url) res = rtree.predict(tdata)[1] print(res[0]) return res[0]
def teacher_print_reminder_cards(request, access_code): response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = 'filename="student_reminder_cards.pdf"' p = canvas.Canvas(response, pagesize=A4) # Define constants that determine the look of the cards PAGE_WIDTH, PAGE_HEIGHT = A4 PAGE_MARGIN = PAGE_WIDTH / 32 INTER_CARD_MARGIN = PAGE_WIDTH / 64 CARD_PADDING = PAGE_WIDTH / 48 NUM_X = 2 NUM_Y = 4 CARD_WIDTH = (PAGE_WIDTH - PAGE_MARGIN * 2 - INTER_CARD_MARGIN * (NUM_X - 1)) / NUM_X CARD_HEIGHT = (PAGE_HEIGHT - PAGE_MARGIN * 2 - INTER_CARD_MARGIN * (NUM_Y - 1)) / NUM_Y HEADER_HEIGHT = CARD_HEIGHT * 0.16 FOOTER_HEIGHT = CARD_HEIGHT * 0.1 CARD_INNER_WIDTH = CARD_WIDTH - CARD_PADDING * 2 CARD_INNER_HEIGHT = CARD_HEIGHT - CARD_PADDING * 2 - HEADER_HEIGHT - FOOTER_HEIGHT CARD_IMAGE_WIDTH = CARD_INNER_WIDTH * 0.25 CORNER_RADIUS = CARD_WIDTH / 32 # Setup various character images to cycle round CHARACTER_FILES = [ "portal/img/dee.png", "portal/img/kirsty.png", "portal/img/wes.png", "portal/img/nigel.png", "portal/img/phil.png" ] CHARACTERS = [] for character_file in CHARACTER_FILES: character_image = ImageReader(staticfiles_storage.path(character_file)) character_height = CARD_INNER_HEIGHT character_width = CARD_IMAGE_WIDTH character_height = character_width * character_image.getSize( )[1] / character_image.getSize()[0] if character_height > CARD_INNER_HEIGHT: character_height = CARD_INNER_HEIGHT character_width = character_height * character_image.getSize( )[0] / character_image.getSize()[1] character = { 'image': character_image, 'height': character_height, 'width': character_width } CHARACTERS.append(character) klass = get_object_or_404(Class, access_code=access_code) # Check auth if klass.teacher.new_user != request.user: raise Http404 COLUMN_WIDTH = (CARD_INNER_WIDTH - CARD_IMAGE_WIDTH) * 0.45 # Work out the data we're going to display, use data from the query string # if given, else display everyone in the class without passwords student_data = [] student_data = get_student_data(request, klass, student_data) # Now draw everything x = 0 y = 0 def drawParagraph(text, position): style = ParagraphStyle('test') style.font = 'Helvetica-Bold' font_size = 16 while font_size > 0: style.fontSize = font_size style.leading = font_size para = Paragraph(text, style) (para_width, para_height) = para.wrap( CARD_INNER_WIDTH - COLUMN_WIDTH - CARD_IMAGE_WIDTH, CARD_INNER_HEIGHT) if para_height <= 48: para.drawOn( p, inner_left + COLUMN_WIDTH, inner_bottom + CARD_INNER_HEIGHT * position + 8 - para_height / 2) return font_size -= 1 current_student_count = 0 for student in student_data: character_index = current_student_count % len(CHARACTERS) left = PAGE_MARGIN + x * CARD_WIDTH + x * INTER_CARD_MARGIN bottom = PAGE_HEIGHT - PAGE_MARGIN - ( y + 1) * CARD_HEIGHT - y * INTER_CARD_MARGIN inner_left = left + CARD_PADDING inner_bottom = bottom + CARD_PADDING + FOOTER_HEIGHT header_bottom = bottom + CARD_HEIGHT - HEADER_HEIGHT footer_bottom = bottom # header rect p.setFillColorRGB(0.0, 0.027, 0.172) p.setStrokeColorRGB(0.0, 0.027, 0.172) p.roundRect(left, header_bottom, CARD_WIDTH, HEADER_HEIGHT, CORNER_RADIUS, fill=1) p.rect(left, header_bottom, CARD_WIDTH, HEADER_HEIGHT / 2, fill=1) # footer rect p.roundRect(left, bottom, CARD_WIDTH, FOOTER_HEIGHT, CORNER_RADIUS, fill=1) p.rect(left, bottom + FOOTER_HEIGHT / 2, CARD_WIDTH, FOOTER_HEIGHT / 2, fill=1) # outer box p.setStrokeColor(black) p.roundRect(left, bottom, CARD_WIDTH, CARD_HEIGHT, CORNER_RADIUS) # header text p.setFillColor(white) p.setFont('Helvetica', 18) p.drawCentredString(inner_left + CARD_INNER_WIDTH / 2, header_bottom + HEADER_HEIGHT * 0.35, '[ code ] for { life }') # footer text p.setFont('Helvetica', 10) p.drawCentredString(inner_left + CARD_INNER_WIDTH / 2, footer_bottom + FOOTER_HEIGHT * 0.32, settings.CODEFORLIFE_WEBSITE) # left hand side writing p.setFillColor(black) p.setFont('Helvetica', 12) p.drawString(inner_left, inner_bottom + CARD_INNER_HEIGHT * 0.12, 'Password:'******'Class Code:') p.drawString(inner_left, inner_bottom + CARD_INNER_HEIGHT * 0.78, 'Name:') # right hand side writing drawParagraph(student['password'], 0.10) drawParagraph(klass.access_code, 0.43) drawParagraph(student['name'], 0.76) # character image character = CHARACTERS[character_index] p.drawImage(character['image'], inner_left + CARD_INNER_WIDTH - character['width'], inner_bottom, character['width'], character['height'], mask='auto') x = (x + 1) % NUM_X compute_show_page(p, x, y, NUM_Y) current_student_count += 1 compute_show_page(p, x, y, NUM_Y) p.save() return response
def file(self): path = staticfiles_storage.path(self.image) return ImageFile(open(path))
def passMap(self, where_u_r): static_crime_path = staticfiles_storage.path('mapPage/csv/crime.csv') static_cctv_path = staticfiles_storage.path('mapPage/csv/cctv.csv') static_police_path = staticfiles_storage.path('mapPage/csv/police.csv') a = pd.read_csv(static_crime_path, thousands=',', encoding='utf-8') b = pd.read_csv(static_cctv_path, thousands=',', encoding='utf-8') police = pd.read_csv(static_police_path, thousands=',', encoding='utf-8') a.head() b.head() police.head() temp_user = self.search_map(where_u_r) temp_user = json.loads(temp_user) temp_user = temp_user['addresses'][0] print(temp_user) R2_p = (float(temp_user['y']), float(temp_user['x'])) print(R2_p) region = [] for value in a: region.append(value) # del region[0:1] x = [] y = [] z = [] cc = [] tv = [] lst = [] lst2 = [] for value in region: temp_map = self.search_map(value) temp_map = json.loads(temp_map) temp_map = temp_map['addresses'][0] x.append(float(temp_map['x'])) y.append(float(temp_map['y'])) z.append(temp_map['roadAddress']) m = folium.Map(location=(R2_p), tiles='cartodbpositron', zoom_start=16) fg_1 = folium.FeatureGroup(name='CCTV Location').add_to(m) fg_2 = folium.FeatureGroup(name='Crime List').add_to(m) fg_3 = folium.FeatureGroup(name='Police Office').add_to(m) fg_4 = folium.FeatureGroup(name='Distance to Police Office').add_to(m) for i in range(len(x)): classes = ('table table-striped table-hover' 'taalbe-condensed table-responsive') popup = a.iloc[[0, 1, 2, 3, 4, 5, 6, 7], [0, i + 1]].to_html(classes=classes) folium.Marker([y[i], x[i]], popup=popup, icon=folium.Icon(color='blue')).add_to(fg_1) for i in range(373): cc.append(b.iloc[i, 2]) tv.append(b.iloc[i, 3]) radi = 30 radi_user = 15 rotating_degree = 45 ar = 0 r = 50 R = 30 for i in range(373): c = cc[i], tv[i] d = self.distance(c, (R2_p)) d = d * 100 tmp_array = [cc[i], tv[i], d] lst.append(tmp_array) offsets = self.calc_offsets(radi, c[1]) coordinates = [ self.coordinate_after_rotation(c, e, offsets) for e in range(0, 360 + 1, rotating_degree) ] b = [] for i in coordinates: b.append(i) static_cctv_img = staticfiles_storage.path( 'mapPage/image/cctv.png') icon1 = folium.features.CustomIcon(static_cctv_img, icon_size=(17, 17)) folium.Marker(c, icon=icon1, popup=c).add_to(fg_2) folium.Circle(location=c, radius=25, fill=True, color='red', tooltip='Polygon').add_to(fg_2) static_user_img = staticfiles_storage.path('mapPage/image/user.png') icon = folium.features.CustomIcon(static_user_img, icon_size=(15, 15)), folium.Marker((R2_p), icon=folium.features.CustomIcon(static_user_img, icon_size=(15, 15)), popup=(R2_p)).add_to(fg_2) folium.Circle(location=(R2_p), radius=25, fill=True, popup=(R2_p)).add_to(fg_2) crros_over = min(lst, key=lambda item: item[2]) print(crros_over) R1_p = (crros_over[0], crros_over[1]) pulse = (crros_over[2]) if pulse < 5: print("cctv 반경 내에 위치합니다.") else: print("cctv 반경에서 벗어났습니다.") police_we = [] police_gang = [] for i in range(2264): police_we.append(police.iloc[i, 4]) police_gang.append(police.iloc[i, 3]) for i in range(2264): police_loc = (police_we[i], police_gang[i]) police_d = self.distance(police_loc, (R2_p)) police_d = police_d * 1000 police_array = [police_we[i], police_gang[i], police_d] lst2.append(police_array) static_police_img = staticfiles_storage.path( 'mapPage/image/police.png') icon2 = folium.features.CustomIcon(static_police_img, icon_size=(20, 20)) folium.Marker(police_loc, icon=icon2, popup=police_loc).add_to(fg_3) for i in lst2: if i[2] < 1000: test_lat_lon = i[0:2] locations = (R2_p) data = [] data.append(locations) data.append(test_lat_lon) print(data) popup3 = str(round(i[2])) + 'm' folium.plugins.PolyLineOffset(data, popup=popup3, color="black", opacity=1, offset=-5, dash_array="5,10").add_to(fg_4) folium.LayerControl(collapsed=False).add_to(m) return m._repr_html_()
def linear_regression_play(request): file = staticfiles_storage.path('xlsx/Salary_Data.csv') dataset = pandas.read_csv(file) # dataset = pandas.read_csv('./static/xlsx/Salary_Data.csv') X = dataset.iloc[:, :-1].values y = dataset.iloc[:, 1].values if request.POST: try: curr_rdm = int(request.POST['curr_rdm']) except: pass else: curr_rdm = random.randint(0, 9999) X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 1/3, random_state = curr_rdm) regressor = LinearRegression() regressor.fit(X_train, y_train) y_pred = regressor.predict(X_test) user_values = [] if request.POST: try: user_number = float(request.POST['user_number']) user_pred = regressor.predict([[user_number,]]) user_values = (user_number, user_pred) except: pass all_values = [] for i in range(X.__len__()): all_values += [(X[i, 0], int(y[i]))] train_values = [] for i in range(X_train.__len__()): train_values += [(X_train[i, 0], int(y_train[i]))] train_values = sorted(train_values) test_values = [] for i in range(X_test.__len__()): test_values += [(X_test[i, 0], (int(y_test[i]), int(y_pred[i])))] test_values = sorted(test_values) # Visualising All Data matplotlib.pyplot.clf() matplotlib.pyplot.scatter(X, y, c=numpy.random.rand(X.__len__(),)) matplotlib.pyplot.xlabel('Years of Experience') matplotlib.pyplot.ylabel('Salary') matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_all = base64.b64encode(buf.read()).decode() buf.close() # Visualising the Training set results matplotlib.pyplot.clf() matplotlib.pyplot.scatter(X_train, y_train, c=numpy.random.rand(X_train.__len__(),)) matplotlib.pyplot.plot(X_train, regressor.predict(X_train), color = 'blue', alpha=0.6) matplotlib.pyplot.xlabel('Years of Experience') matplotlib.pyplot.ylabel('Salary') matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_train = base64.b64encode(buf.read()).decode() buf.close() # Visualising the Test set results matplotlib.pyplot.clf() matplotlib.pyplot.scatter(X_test, y_test, c=numpy.random.rand(X_test.__len__(),)) if user_values: try: matplotlib.pyplot.scatter(user_number, user_pred, color = 'red', label = 'User') except: pass matplotlib.pyplot.plot(X_train, regressor.predict(X_train), color = 'blue', alpha=0.6) matplotlib.pyplot.xlabel('Years of Experience') matplotlib.pyplot.ylabel('Salary') matplotlib.pyplot.legend() matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_test = base64.b64encode(buf.read()).decode() buf.close() context = { 'all_values': all_values, 'train_values': train_values, 'test_values': test_values, 'user_values': user_values, 'curr_rdm': curr_rdm, 'b64_train': b64_train, 'b64_test': b64_test, 'b64_all': b64_all } return render(request, 'linear_regression/linear_regression_play.html', context)
def __init__(self, contacts: List[InterestPerContract]): snippets = get_custom_texts() self.buffer = io.BytesIO() story = [] styles = getSampleStyleSheet() styleH = styles['Heading3'] styleH.fontName = 'Times-BoldItalic' styleH.fontSize = 14 styleN = styles['Normal'] styleN.spaceAfter = 0.3 * cm styleN.fontName = 'Times-Italic' styleN.leading = 14 styleN.fontSize = 12 doc = BaseDocTemplate(self.buffer, pagesize=landscape(A4)) doc.leftMargin = 1 * cm doc.rightMargin = 1 * cm doc.topMargin = 1 * cm doc.bottomMargin = 1 * cm frameWidth = doc.width / 2 leftRightSpace = 3 * cm frameHeight = doc.height + 3 * cm frames = [] for frame in range(2): leftMargin = doc.leftMargin + frame * (frameWidth + leftRightSpace) column = Frame(leftMargin, doc.bottomMargin, frameWidth, frameHeight) frames.append(column) template = PageTemplate(frames=frames) doc.addPageTemplates(template) for contact in contacts: frame_floatables = [] img = get_image(staticfiles_storage.path('custom/logo.png'), width=self.LOGO_WIDTH) table_style = TableStyle([ ('ALIGN', (0, 0), (0, 0), 'LEFT'), ('VALIGN', (0, 0), (0, 0), 'BOTTOM'), ('ALIGN', (0, 1), (0, 1), 'RIGHT'), ('VALIGN', (0, 1), (0, 1), 'TOP'), ('LEFTPADDING', (0, 0), (-1, -1), 0), ('RIGHTPADDING', (0, 0), (-1, -1), 0), ]) frame_floatables.append( Table([[ Paragraph(f"Hallo {contact.first_name},", styleH), img, ]], style=table_style, colWidths='*')) frame_floatables.append(Spacer(1, 0.4 * cm)) frame_floatables.append( Paragraph(snippets["thanks_what_happened"], styleN)) frame_floatables.append(Paragraph(snippets["next_year"], styleN)) frame_floatables.append(Paragraph(snippets["invitation"], styleN)) frame_floatables.append(Paragraph(snippets["wish"], styleN)) frame_floatables.append(Spacer(1, 0.5 * cm)) img = get_image(staticfiles_storage.path('custom/image.png'), width=self.IMG_WIDTH) table_style = TableStyle([ ('ALIGN', (0, 0), (-1, -1), 'LEFT'), ('VALIGN', (0, 0), (-1, -1), 'TOP'), ('LEFTPADDING', (0, 0), (-1, -1), 0), ('RIGHTPADDING', (0, 0), (-1, -1), 0), ]) frame_floatables.append( Table([[ img, Paragraph(snippets["greetings"], styleN), ]], style=table_style, colWidths='*')) story.append(KeepTogether(frame_floatables)) doc.build(story) self.buffer.seek(0)
def testCollectStaticStandalone(self): with self.settings(REQUIRE_ENVIRONMENT=self.require_environment): call_command("collectstatic", interactive=False, verbosity=0) self.assertTrue( os.path.exists(staticfiles_storage.path("js/main-built.js")))
def predict(request): exp1 = float(request.GET['exp1']) exp2 = float(request.GET['exp2']) exp3 = float(request.GET['exp3']) exp4 = float(request.GET['exp4']) exp5 = float(request.GET['exp5']) exp6 = float(request.GET['exp6']) exp7 = float(request.GET['exp7']) exp8 = float(request.GET['exp8']) exp9 = float(request.GET['exp9']) exp10 = float(request.GET['exp10']) exp11 = float(request.GET['exp11']) exp12 = float(request.GET['exp12']) exp13 = float(request.GET['exp13']) exp14 = float(request.GET['exp14']) exp15 = float(request.GET['exp15']) exp16 = float(request.GET['exp16']) exp17 = float(request.GET['exp17']) exp18 = float(request.GET['exp18']) exp19 = float(request.GET['exp19']) exp20 = float(request.GET['exp20']) exp21 = float(request.GET['exp21']) exp22 = float(request.GET['exp22']) exp23 = float(request.GET['exp23']) exp24 = float(request.GET['exp24']) exp25 = float(request.GET['exp25']) exp26 = float(request.GET['exp26']) exp27 = float(request.GET['exp27']) exp28 = float(request.GET['exp28']) exp29 = float(request.GET['exp29']) exp30 = float(request.GET['exp30']) rawdata = staticfiles_storage.path('cancer_dataset.csv') dataset = pd.read_csv(rawdata) X = dataset.iloc[0:, 2:32].values y = dataset.iloc[0:, 1].values X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.8, random_state=42) model = LogisticRegression() model.fit(X_train, y_train) yet_to_predict = np.array([[ exp1, exp2, exp3, exp4, exp5, exp6, exp7, exp8, exp9, exp10, exp11, exp12, exp13, exp14, exp15, exp16, exp17, exp18, exp19, exp20, exp21, exp22, exp23, exp24, exp25, exp26, exp27, exp28, exp29, exp30 ]]) y_pred = model.predict(yet_to_predict) accuracy = model.score(X_test, y_test) accuracy = accuracy * 100 accuracy = int(accuracy) return render( request, 'index.html', { "predicted": y_pred[0], "exp1": exp1, "exp2": exp2, "exp3": exp3, "exp4": exp4, "exp5": exp5, "exp6": exp6, "exp7": exp7, "exp8": exp8, "exp9": exp9, "exp10": exp10, "exp11": exp11, "exp12": exp12, "exp13": exp13, "exp14": exp14, "exp15": exp15, "exp16": exp16, "exp17": exp17, "exp18": exp18, "exp19": exp19, "exp20": exp20, "exp21": exp21, "exp22": exp22, "exp23": exp23, "exp24": exp24, "exp25": exp25, "exp26": exp26, "exp27": exp27, "exp28": exp28, "exp29": exp29, "exp30": exp30 })
def knn_play(request): # Importing the dataset file = staticfiles_storage.path('xlsx/Social_Network_Ads.csv') dataset = pandas.read_csv(file) # dataset = pandas.read_csv('./static/xlsx/Social_Network_Ads.csv') X = dataset.iloc[:, [2, 3]].values y = dataset.iloc[:, 4].values # Setting split random state if request.POST: try: curr_rdm = int(request.POST['curr_rdm']) except: pass else: curr_rdm = random.randint(0, 9999) # Splitting the dataset into the Training set and Test set X_train, X_test, y_train, y_test = sklearn.model_selection.train_test_split( X, y, test_size=0.25, random_state=curr_rdm) # Feature Scaling sc = sklearn.preprocessing.StandardScaler() X_train = sc.fit_transform(X_train) X_test = sc.transform(X_test) # Fitting classifier to the Training set classifier = sklearn.neighbors.KNeighborsClassifier(n_neighbors=3, metric='minkowski', p=2) classifier.fit(X_train, y_train) # Predicting the Test set results y_pred = classifier.predict(X_test) # Making the Confusion Matrix # cm = sklearn.metrics.confusion_matrix(y_test, y_pred) # Assigning the output variables user_values = [] if request.POST: try: user_age = int(request.POST['user_age']) user_income = int(request.POST['user_income'].replace(',', '')) user_pred = classifier.predict( sc.transform([[user_age, user_income]])) if user_pred == 0: user_pred = 'No' else: user_pred = 'Yes' user_values = (user_age, user_income, user_pred) except: pass all_values = [] for i in range(X.__len__()): all_values += [[X[i, 0], X[i, 1], 'No' if y[i] == 0 else 'Yes']] original_X_train = sc.inverse_transform(X_train) train_values = [] for i in range(X_train.__len__()): train_values += [[ int(original_X_train[i, 0]), int(original_X_train[i, 1]), 'No' if y_train[i] == 0 else 'Yes' ]] original_X_test = sc.inverse_transform(X_test) test_values = [] for i in range(X_test.__len__()): test_values += [[ int(original_X_test[i, 0]), int(original_X_test[i, 1]), 'No' if y_test[i] == 0 else 'Yes', 'No' if y_pred[i] == 0 else 'Yes' ]] # Visualising All Data matplotlib.pyplot.clf() X_set, y_set = X, y for i, j in enumerate(numpy.unique(y_set)): matplotlib.pyplot.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], c=matplotlib.colors.ListedColormap( ('red', 'green'))(i), label='Yes' if j == 1 else 'No') matplotlib.pyplot.xlabel('Age') matplotlib.pyplot.ylabel('Income') matplotlib.pyplot.legend() matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_all = base64.b64encode(buf.read()).decode() buf.close() # Visualising the Training set results matplotlib.pyplot.clf() X_set, y_set = X_train, y_train X1, X2 = numpy.meshgrid( numpy.arange(start=X_set[:, 0].min() - 1, stop=X_set[:, 0].max() + 1, step=0.01), numpy.arange(start=X_set[:, 1].min() - 1, stop=X_set[:, 1].max() + 1, step=0.01)) matplotlib.pyplot.contourf( X1, X2, classifier.predict(numpy.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), alpha=0.4, cmap=matplotlib.colors.ListedColormap(('red', 'green'))) matplotlib.pyplot.xlim(X1.min(), X1.max()) matplotlib.pyplot.ylim(X2.min(), X2.max()) for i, j in enumerate(numpy.unique(y_set)): matplotlib.pyplot.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], c=matplotlib.colors.ListedColormap( ('red', 'green'))(i), label='Yes' if j == 1 else 'No') matplotlib.pyplot.xlabel('Age (scaled)') matplotlib.pyplot.ylabel('Income (scaled)') matplotlib.pyplot.legend() matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_train = base64.b64encode(buf.read()).decode() buf.close() # Visualising the Test set results matplotlib.pyplot.clf() X_set, y_set = X_test, y_test X1, X2 = numpy.meshgrid( numpy.arange(start=X_set[:, 0].min() - 1, stop=X_set[:, 0].max() + 1, step=0.01), numpy.arange(start=X_set[:, 1].min() - 1, stop=X_set[:, 1].max() + 1, step=0.01)) matplotlib.pyplot.contourf( X1, X2, classifier.predict(numpy.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), alpha=0.4, cmap=matplotlib.colors.ListedColormap(('red', 'green'))) matplotlib.pyplot.xlim(X1.min(), X1.max()) matplotlib.pyplot.ylim(X2.min(), X2.max()) for i, j in enumerate(numpy.unique(y_set)): matplotlib.pyplot.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], c=matplotlib.colors.ListedColormap( ('red', 'green'))(i), label='Yes' if j == 1 else 'No') if user_values: try: matplotlib.pyplot.scatter(sc.transform([[user_age, user_income]])[0][0], sc.transform([[user_age, user_income]])[0][1], color='yellow', label='User') except: pass matplotlib.pyplot.xlabel('Age (scaled)') matplotlib.pyplot.ylabel('Income (scaled)') matplotlib.pyplot.legend() matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_test = base64.b64encode(buf.read()).decode() buf.close() context = { 'all_values': all_values, 'train_values': train_values, 'test_values': test_values, 'user_values': user_values, 'curr_rdm': curr_rdm, 'b64_train': b64_train, 'b64_test': b64_test, 'b64_all': b64_all } return render(request, 'knn/knn_play.html', context)
def random_forest_play(request): file = staticfiles_storage.path('xlsx/Salary_Data.csv') dataset = pandas.read_csv(file) # dataset = pandas.read_csv('./static/xlsx/Salary_Data.csv') X = dataset.iloc[:, :-1].values y = dataset.iloc[:, 1].values if request.POST: try: curr_rdm = int(request.POST['curr_rdm']) except: pass else: curr_rdm = random.randint(0, 9999) X_train, X_test, y_train, y_test = sklearn.model_selection.train_test_split( X, y, test_size=1 / 3, random_state=curr_rdm) regressor = sklearn.ensemble.RandomForestRegressor(n_estimators=10, random_state=0) regressor.fit(X_train, y_train) y_pred = regressor.predict(X_test) user_values = [] if request.POST: try: user_number = float(request.POST['user_number']) user_pred = regressor.predict([[user_number]]) user_values = [user_number, user_pred] except: pass all_values = [] for i in range(X.__len__()): all_values += [(X[i, 0], int(y[i]))] train_values = [] for i in range(X_train.__len__()): train_values += [(X_train[i, 0], int(y_train[i]))] train_values = sorted(train_values) test_values = [] for i in range(X_test.__len__()): test_values += [(X_test[i, 0], (int(y_test[i]), int(y_pred[i])))] test_values = sorted(test_values) # Visualising All Data matplotlib.pyplot.clf() X_grid = numpy.arange(min(X), max(X), 0.1) X_grid = X_grid.reshape((len(X_grid), 1)) matplotlib.pyplot.scatter(X, y, c=numpy.random.rand(X.__len__(), )) matplotlib.pyplot.xlabel('Years of Experience') matplotlib.pyplot.ylabel('Salary') matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_all = base64.b64encode(buf.read()).decode() buf.close() # Visualising the Decision Tree train results matplotlib.pyplot.clf() X_grid = numpy.arange(min(X_train), max(X_train), 0.01) X_grid = X_grid.reshape((len(X_grid), 1)) matplotlib.pyplot.scatter(X_train, y_train, c=numpy.random.rand(X_train.__len__(), )) matplotlib.pyplot.plot(X_grid, regressor.predict(X_grid), color='blue', alpha=0.6) matplotlib.pyplot.xlabel('Years of Experience') matplotlib.pyplot.ylabel('Salary') matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_train = base64.b64encode(buf.read()).decode() buf.close() # Visualising the Decision Tree test results matplotlib.pyplot.clf() X_grid = numpy.arange(min(X_train), max(X_train), 0.01) X_grid = X_grid.reshape((len(X_grid), 1)) matplotlib.pyplot.scatter(X_test, y_test, c=numpy.random.rand(X_test.__len__(), )) if user_values: matplotlib.pyplot.scatter(user_number, regressor.predict([[user_number]]), color='red', label='User') matplotlib.pyplot.plot(X_grid, regressor.predict(X_grid), color='blue', alpha=0.6) matplotlib.pyplot.xlabel('Years of Experience') matplotlib.pyplot.ylabel('Salary') matplotlib.pyplot.legend() matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_test = base64.b64encode(buf.read()).decode() buf.close() context = { 'all_values': all_values, 'train_values': train_values, 'test_values': test_values, 'user_values': user_values, 'curr_rdm': curr_rdm, 'b64_train': b64_train, 'b64_test': b64_test, 'b64_all': b64_all } return render(request, 'random_forest/random_forest_play.html', context)
def svr_play(request): file = staticfiles_storage.path('xlsx/Salary_Data.csv') dataset = pandas.read_csv(file) # dataset = pandas.read_csv('./static/xlsx/Salary_Data.csv') X = dataset.iloc[:, :-1].values y = dataset.iloc[:, 1].values X_original = dataset.iloc[:, :-1].values y_original = dataset.iloc[:, 1].values sc_X = sklearn.preprocessing.StandardScaler() sc_y = sklearn.preprocessing.StandardScaler() X = sc_X.fit_transform(X) y = numpy.ravel(sc_y.fit_transform(y.reshape(-1, 1))) if request.POST: try: curr_rdm = int(request.POST['curr_rdm']) except: pass else: curr_rdm = random.randint(0, 9999) X_train, X_test, y_train, y_test = sklearn.model_selection.train_test_split( X, y, test_size=1 / 3, random_state=curr_rdm) regressor = sklearn.svm.SVR(kernel='rbf') regressor.fit(X_train, y_train) y_pred = regressor.predict(sc_X.transform(X_test)) user_values = [] if request.POST: try: user_number = request.POST['user_number'] scaled_user_number = sc_X.transform(numpy.array([[user_number]])) user_pred = sc_y.inverse_transform( regressor.predict(sc_X.transform(numpy.array([[user_number] ])))) user_values = [user_number, user_pred] except: pass all_values = [] for i in range(X.__len__()): all_values += [(X_original[i, 0], int(y_original[i]))] train_values = [] for i in range(X_train.__len__()): train_values += [(round(float(sc_X.inverse_transform(X_train[i])), 2), sc_y.inverse_transform([y_train[i]]))] train_values = sorted(train_values) test_values = [] for i in range(X_test.__len__()): test_values += [(round(float(sc_X.inverse_transform(X_test[i])), 2), (sc_y.inverse_transform([y_test[i]]), sc_y.inverse_transform([y_pred[i]])))] test_values = sorted(test_values) # Visualising All Data matplotlib.pyplot.clf() X_grid = numpy.arange(min(X), max(X), 0.1) X_grid = X_grid.reshape((len(X_grid), 1)) matplotlib.pyplot.scatter(X_original, y_original, c=numpy.random.rand(X.__len__(), )) matplotlib.pyplot.xlabel('Years of Experience') matplotlib.pyplot.ylabel('Salary') matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_all = base64.b64encode(buf.read()).decode() buf.close() # Visualising the train results matplotlib.pyplot.clf() X_grid = numpy.arange(min(X_train), max(X_train), 0.01) X_grid = X_grid.reshape((len(X_grid), 1)) matplotlib.pyplot.scatter(X_train, y_train, c=numpy.random.rand(X_train.__len__(), )) matplotlib.pyplot.plot(X_grid, regressor.predict(X_grid), color='blue', alpha=0.6) matplotlib.pyplot.xlabel('Years of Experience (scaled)') matplotlib.pyplot.ylabel('Salary (scaled)') matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_train = base64.b64encode(buf.read()).decode() buf.close() # Visualising the test results matplotlib.pyplot.clf() X_grid = numpy.arange(min(X_train), max(X_train), 0.01) X_grid = X_grid.reshape((len(X_grid), 1)) matplotlib.pyplot.scatter(X_test, y_test, c=numpy.random.rand(X_test.__len__(), )) if user_values: matplotlib.pyplot.scatter( scaled_user_number, regressor.predict(sc_X.transform(numpy.array([[user_number]]))), color='red', label='User') matplotlib.pyplot.plot(X_grid, regressor.predict(X_grid), color='blue', alpha=0.6) matplotlib.pyplot.xlabel('Years of Experience (scaled)') matplotlib.pyplot.ylabel('Salary (scaled)') matplotlib.pyplot.legend() matplotlib.pyplot.tight_layout() buf = io.BytesIO() matplotlib.pyplot.savefig(buf, format='png') buf.seek(0) b64_test = base64.b64encode(buf.read()).decode() buf.close() context = { 'all_values': all_values, 'train_values': train_values, 'test_values': test_values, 'user_values': user_values, 'curr_rdm': curr_rdm, 'b64_train': b64_train, 'b64_test': b64_test, 'b64_all': b64_all } return render(request, 'svr/svr_play.html', context)
def city_site(request): url = 'https://api.openweathermap.org/data/2.5/weather?q={}&units=metric&appid=8cc2f0a4630ad36fc89c8ab644ef1e28' url2 = 'http://api.openweathermap.org/data/2.5/forecast?id={}&units=metric&appid=8cc2f0a4630ad36fc89c8ab644ef1e28' url3 = 'https://api.openweathermap.org/data/2.5//group?id={}&units=metric&appid=8cc2f0a4630ad36fc89c8ab644ef1e28' if request.method == 'POST': form = CityForm(request.POST) if form.is_valid(): form.save() return redirect('city_site') else: try: city = CityField.objects.all().last() r = requests.get(url.format(city)).json() city2 = r['id'] r2 = requests.get(url2.format(city2)).json() time_date_forecast = [] time_date_forecast_done = [] for i in range(8): time_date_forecast.append( datetime.utcfromtimestamp(r2['list'][i]['dt'] + r2['city']['timezone']).strftime( '%H:%M:%S, %d.%m.%Y')) for x in time_date_forecast: time_date_forecast_done.append(x.split(' ')) list_of_cities = [] with open(staticfiles_storage.path('city.list.json')) as f: data = json.load(f) for cit_l in range(len(data)): list_of_cities.append(data[cit_l]['id']) r_li = random.sample(list_of_cities, k=5) r3 = requests.get( url3.format(r_li).replace(' ', '').replace('[', '').replace(']', '')).json() city_weather = { 'country': r['sys']['country'], 'city': r['name'], 'temperature': round(float(r['main']['temp'])), 'weather': r['weather'][0]['description'], 'icon': r['weather'][0]['icon'], 'sunrise': datetime.utcfromtimestamp(r['sys']['sunrise'] + r['timezone']).strftime('%H:%M:%S'), 'sunset': datetime.utcfromtimestamp(r['sys']['sunset'] + r['timezone']).strftime('%H:%M:%S'), 'pressure': int(r['main']['pressure']), 'humidity': int(r['main']['humidity']), 'wind': round(float(r['wind']['speed']), 1), 'for_time_date': time_date_forecast_done, 'for_list': r2['list'], 'r_city': r3['list'], } context = {'city_weather': city_weather} return render(request, 'weather/details.html', context) except KeyError: return redirect('main_site')
def page_cv_word(request, collaborateurs_id): collab = get_object_or_404(collaborateurs, pk=collaborateurs_id) mission_du_collab = experiences.objects.filter( collaborateurMission=collaborateurs_id).order_by('-dateDebut') fichier_template = staticfiles_storage.path( 'collab/Thémis-conseil-DC_TEMPLATE.docx') doc = DocxTemplate(fichier_template) today = datetime.date.today() context = {} nom = collab.nomCollaborateur prenom = collab.prenomCollaborateur nom_sortie = nom + "-" + prenom + "-" + str(today) + ".docx" titre = collab.titreCollaborateur #calcul nb année expe if isinstance(collab.dateDebutExpPro, datetime.datetime): dateExpeDebutAnne = collab.dateDebutExpPro.year else: dateExpeDebutAnne = datetime.date.today().year anneeActuelle = datetime.date.today().year differenceExpe = anneeActuelle - dateExpeDebutAnne nbAnneeExpe = differenceExpe texte_introductif = generateRichText(doc, collab.texteIntroductifCv, "DC_Text_Intro") #recup des compétences competencesDuCollab = collab.listeCompetencesCles.all() competences = [] for compe in competencesDuCollab: competences.append(compe.nomCompetence) #recup des niveau d'intervention NivInterven = collab.niveauxIntervention.all() interventions = [] for inter in NivInterven: interventions.append(inter.libelle) #recup des clients principaux clientsPrincipaux = collab.clientPrincipaux.all() clients = [] for client in clientsPrincipaux: clients.append(client.nomClient) #recup des secteur SecteurConsult = collab.expertiseSectorielle.all() secteurs = [] for secteur in SecteurConsult: secteurs.append(secteur.nom) #recup des outils OutilsConsult = collab.outilsCollaborateur.all() outils = [] for outil in OutilsConsult: outils.append(outil.nomOutil) #recup des langues LanguesConsult = collab.langues.all() langues = [] for langue in LanguesConsult: langues.append(langue) #recup des methodlogies MethodoConsult = collab.methodologie.all() methodologies = [] for methodo in MethodoConsult: methodologies.append(methodo.nom) #recup des formations FormationConsult = collab.formation.all() formations = [] for forma in FormationConsult: formations.append(forma.formation) #recup des missions (il faut tout recup car impossible d'utiliser les templatetags avec du Word) missions = [] for miss in mission_du_collab: data = {} data["nomMission"] = miss.nomMission data["Client"] = miss.client.nomClient data["Domaine"] = miss.client.domaineClient data["Service"] = miss.service data["dateDebut"] = miss.dateDebut #calcul durée dateFin = miss.dateFin if dateFin is None: fin = datetime.date.today() debut = miss.dateDebut dureeMission = (fin.year - debut.year) * 12 + (fin.month - debut.month) else: fin = miss.dateFin debut = miss.dateDebut dureeMission = (fin.year - debut.year) * 12 + (fin.month - debut.month) data["dureeMission"] = dureeMission data["contexteMission"] = generateRichText(doc, miss.resumeIntervention, "DC_Intervention_Contexte") data["descriptif"] = generateRichText(doc, miss.descriptifMission, "DC_Intervention_Desc") data["environnement"] = generateRichText(doc, miss.environnementMission, "DC_Intervention_Env") missions.append(data) #Ajout des valeurs dans le context context["nom"] = nom context["prenom"] = prenom context["titre"] = titre context["trigramme"] = collab.trigramme context["nbAnneeExpe"] = nbAnneeExpe context["text_intro"] = texte_introductif context["expeSigni1"] = collab.expSignificative1 context["expeSigni2"] = collab.expSignificative2 context["expeSigni3"] = collab.expSignificative3 context["expeSigni4"] = collab.expSignificative4 context["expeSigni5"] = collab.expSignificative5 context["competences"] = competences context["Interventions"] = interventions context["clients"] = clients context["secteurs"] = secteurs context["outils"] = outils context["langues"] = langues context["methodologies"] = methodologies context["formations"] = formations context["missions"] = missions context["parcours"] = generateRichText(doc, collab.parcours, "DC_Parcours") doc.render(context) doc_io = io.BytesIO() doc.save(doc_io) doc_io.seek(0) response = HttpResponse(doc_io.read()) response["Content-Disposition"] = "attachment; filename=" + nom_sortie response[ "Content-Type"] = "application/vnd.openxmlformats-officedocument.wordprocessingml.document" return response
def send_eventauthorisation_success_email(instance): # Generate PDF first to prevent context conflicts context = { 'object': instance.event, 'fonts': { 'opensans': { 'regular': 'RIGS/static/fonts/OPENSANS-REGULAR.TTF', 'bold': 'RIGS/static/fonts/OPENSANS-BOLD.TTF', } }, 'receipt': True, 'current_user': False, } template = get_template('RIGS/event_print.xml') merger = PdfFileMerger() rml = template.render(context) buffer = rml2pdf.parseString(rml) merger.append(PdfFileReader(buffer)) buffer.close() terms = urllib.request.urlopen(settings.TERMS_OF_HIRE_URL) merger.append(BytesIO(terms.read())) merged = BytesIO() merger.write(merged) # Produce email content context = { 'object': instance, } if instance.event.person is not None and instance.email == instance.event.person.email: context['to_name'] = instance.event.person.name elif instance.event.organisation is not None and instance.email == instance.event.organisation.email: context['to_name'] = instance.event.organisation.name subject = "N%05d | %s - Event Authorised" % (instance.event.pk, instance.event.name) client_email = EmailMultiAlternatives( subject, get_template("RIGS/eventauthorisation_client_success.txt").render( context), to=[instance.email], reply_to=[settings.AUTHORISATION_NOTIFICATION_ADDRESS], ) css = staticfiles_storage.path('css/email.css') html = Premailer(get_template( "RIGS/eventauthorisation_client_success.html").render(context), external_styles=css).transform() client_email.attach_alternative(html, 'text/html') escapedEventName = re.sub('[^a-zA-Z0-9 \n\.]', '', instance.event.name) client_email.attach( 'N%05d - %s - CONFIRMATION.pdf' % (instance.event.pk, escapedEventName), merged.getvalue(), 'application/pdf') if instance.event.mic: mic_email_address = instance.event.mic.email else: mic_email_address = settings.AUTHORISATION_NOTIFICATION_ADDRESS mic_email = EmailMessage( subject, get_template("RIGS/eventauthorisation_mic_success.txt").render( context), to=[mic_email_address]) # Now we have both emails successfully generated, send them out client_email.send(fail_silently=True) mic_email.send(fail_silently=True) # Set event to booked now that it's authorised instance.event.status = models.Event.BOOKED instance.event.save()
def read_file(file_path): file = staticfiles_storage.path(file_path) with open(file) as f: rows = list(csv.reader(f)) return rows
def pandas_table(request): csv_file = staticfiles_storage.path('csv/norway_new_car_sales_by_make.csv') df = pd.read_csv(csv_file) car_makers = df['Make'].unique() return render(request, 'car_makers.html', {'car_makers': car_makers})
def predict(request): pw = str(request.GET['pw']) if pw == 'Yes': pwv = 1 else: pwv = 2 fc = str(request.GET['fc']) if fc == 'Yes': fcv = 1 else: fcv = 2 fi = str(request.GET['fi']) if fi == 'Yes': fiv = 1 else: fiv = 2 jc = str(request.GET['jc']) if jc == 'Yes': jcv = 1 else: jcv = 2 hi = str(request.GET['hi']) if hi == 'Yes': hiv = 1 else: hiv = 2 #Reading the dataframe rawdata = staticfiles_storage.path('pep_ds.csv') ds = pd.read_csv(rawdata) # Separating features and target x = ds.iloc[:, 1:6] y = ds.iloc[:, 6] # Split dataframe into train and test data x_train, x_test, y_train, y_test = train_test_split(x, y, train_size=0.8, random_state=368) # Creating the AI model rf = RandomForestClassifier() rf.fit(x_train, y_train) #Prediction #yet_to_predict = np.array([[exp]]) #y_pred = regressor.predict(yet_to_predict) result = np.array([[pwv, fcv, fiv, jcv, hiv]]) y_pred = rf.predict(result) #rf.predict([[0,1,1,0,1]]) #accuracy = regressor.score(X_test, y_test) #accuracy = accuracy*100 #accuracy = int(accuracy) return render(request, 'index.html', {"predicted": y_pred})
def _header(self, data): header = [] img = get_image(staticfiles_storage.path('custom/logo.png'), width=self.LOGO_WIDTH) table_style = TableStyle([ *self.base_table_style, ('ALIGN', (0, 0), (-1, -1), 'RIGHT'), ('VALIGN', (0, 0), (-1, -1), 'TOP'), ]) header.append(Table([[ img, ]], style=table_style, colWidths='*')) table_style = TableStyle([ *self.base_table_style, ('ALIGN', (0, 0), (0, -1), 'LEFT'), ('VALIGN', (0, 0), (0, -1), 'BOTTOM'), ('ALIGN', (1, 0), (1, -1), 'RIGHT'), ('VALIGN', (1, 0), (1, -1), 'TOP'), ]) left_table_style = TableStyle([ *self.base_table_style, ('LEFTPADDING', (0, 0), (-1, -1), 20), ]) right_table_style = TableStyle([ *self.base_table_style, ]) address_lines = data.contract.contact.address.split(',') left_column = Table([ [Spacer(1, 1.7 * cm)], [ Paragraph( f"{self.snippets['gmbh_name']} - {self.snippets['street_no']} - {self.snippets['zipcode']} {self.snippets['city']}", self.styleSS) ], [Spacer(1, 0.5 * cm)], [Paragraph(data.contract.contact.name, self.styleN)], [Paragraph(address_lines[0], self.styleN)], [Spacer(1, 0.3 * cm)], [Paragraph(address_lines[1], self.styleN)], ], style=left_table_style, colWidths='*') right_column = Table([ [Paragraph("<i>Projekt im Mietshäuser Syndikat</i>", self.styleL)], [Spacer(1, 0.3 * cm)], [ Paragraph( (f"{self.snippets['street_no']}<br/>" f"{self.snippets['zipcode']} {self.snippets['city']}"), self.styleL) ], [Spacer(1, 0.3 * cm)], [ Paragraph( f"e-mail: {self.snippets['email']}<br/>{self.snippets['web']}", self.styleL) ], ], style=right_table_style, colWidths='*') date = Table([[ Paragraph(f"{self.snippets['city']}, {self.today}", self.styleNR) ]], style=TableStyle([ ('ALIGN', (0, 0), (0, 0), 'RIGHT'), ('RIGHTPADDING', (0, 0), (-1, -1), 0), ]), colWidths=['*']) header.append( Table([[left_column, right_column]], style=table_style, colWidths=[13.4 * cm, 4.2 * cm])) header.append(Spacer(1, 1.5 * cm)) header.append(date) return header
HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound, HttpResponseServerError, HttpResponse, ) from django.contrib.staticfiles.storage import staticfiles_storage import csv from django.template import Context, Engine, TemplateDoesNotExist, loader from django.views.decorators.csrf import requires_csrf_token from django.views.decorators.http import require_GET # CSV 読み込み a = {} i = 0 file = staticfiles_storage.path('data/youtuber.csv') csvfile = open(file) for row in csv.reader(csvfile): key_name = "data" + str(i) a[key_name] = row i += 1 channelIDs = a['data0'] titles = a['data1'] subscribers = a['data2'] descriptions = a['data3'] videoIDs = a['data4'] video_img = a['data5'] read_items_range = list(range(0, len(channelIDs), 1)) readed_video_number = len(channelIDs)
from django.contrib.staticfiles.storage import staticfiles_storage from django.http import Http404 from django.shortcuts import render import json with open(staticfiles_storage.path('data/language_version.json'), 'r') as json_file: data = json.load(json_file) # Create your views here. def index(request, lang="en"): try: context = data[lang] except KeyError as err: raise Http404("Language not implemented yet") return render(request, 'summary/index.html', context)
def path(self): return Path(staticfiles_storage.path(self.image))
def teacher_print_reminder_cards(request, access_code): response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = 'filename="student_reminder_cards.pdf"' p = canvas.Canvas(response, pagesize=A4) # Define constants that determine the look of the cards PAGE_WIDTH, PAGE_HEIGHT = A4 PAGE_MARGIN = PAGE_WIDTH / 32 INTER_CARD_MARGIN = PAGE_WIDTH / 64 CARD_PADDING = PAGE_WIDTH / 48 NUM_X = 2 NUM_Y = 4 CARD_WIDTH = (PAGE_WIDTH - PAGE_MARGIN * 2 - INTER_CARD_MARGIN * (NUM_X - 1)) / NUM_X CARD_HEIGHT = (PAGE_HEIGHT - PAGE_MARGIN * 2 - INTER_CARD_MARGIN * (NUM_Y - 1)) / NUM_Y HEADER_HEIGHT = CARD_HEIGHT * 0.16 FOOTER_HEIGHT = CARD_HEIGHT * 0.1 CARD_INNER_WIDTH = CARD_WIDTH - CARD_PADDING * 2 CARD_INNER_HEIGHT = CARD_HEIGHT - CARD_PADDING * 2 - HEADER_HEIGHT - FOOTER_HEIGHT CARD_IMAGE_WIDTH = CARD_INNER_WIDTH * 0.25 CORNER_RADIUS = CARD_WIDTH / 32 # Setup various character images to cycle round CHARACTER_FILES = ["portal/img/dee_large.png", "portal/img/kirsty_large.png", "portal/img/wes_large.png", "portal/img/nigel_large.png", "portal/img/phil_large.png"] CHARACTERS = [] for character_file in CHARACTER_FILES: character_image = ImageReader(staticfiles_storage.path(character_file)) character_height = CARD_INNER_HEIGHT character_width = CARD_IMAGE_WIDTH character_height = character_width * character_image.getSize()[1] / character_image.getSize()[0] if character_height > CARD_INNER_HEIGHT: character_height = CARD_INNER_HEIGHT character_width = character_height * character_image.getSize()[0] / character_image.getSize()[1] character = { 'image': character_image, 'height': character_height, 'width': character_width } CHARACTERS.append(character) klass = Class.objects.get(access_code=access_code) COLUMN_WIDTH = (CARD_INNER_WIDTH - CARD_IMAGE_WIDTH) * 0.45 # Work out the data we're going to display, use data from the query string # if given, else display everyone in the class without passwords student_data = [] if request.method == 'POST': student_data = json.loads(request.POST.get('data', '[]')) else: students = Student.objects.filter(class_field=klass) for student in students: student_data.append({ 'name': student.user.user.first_name, 'password': '******', }) # Now draw everything x = 0 y = 0 def drawParagraph(text, position): style = ParagraphStyle('test') style.font = 'Helvetica-Bold' font_size = 16 while font_size > 0: style.fontSize = font_size style.leading = font_size para = Paragraph(text, style) (para_width, para_height) = para.wrap(CARD_INNER_WIDTH - COLUMN_WIDTH - CARD_IMAGE_WIDTH, CARD_INNER_HEIGHT) if para_height <= 48: para.drawOn(p, inner_left + COLUMN_WIDTH, inner_bottom + CARD_INNER_HEIGHT * position + 8 - para_height / 2) return font_size -= 1 current_student_count = 0 for student in student_data: character_index = current_student_count % len(CHARACTERS) left = PAGE_MARGIN + x * CARD_WIDTH + x * INTER_CARD_MARGIN bottom = PAGE_HEIGHT - PAGE_MARGIN - (y + 1) * CARD_HEIGHT - y * INTER_CARD_MARGIN inner_left = left + CARD_PADDING inner_bottom = bottom + CARD_PADDING + FOOTER_HEIGHT header_bottom = bottom + CARD_HEIGHT - HEADER_HEIGHT footer_bottom = bottom # header rect p.setFillColorRGB(0.0, 0.027, 0.172) p.setStrokeColorRGB(0.0, 0.027, 0.172) p.roundRect(left, header_bottom, CARD_WIDTH, HEADER_HEIGHT, CORNER_RADIUS, fill=1) p.rect(left, header_bottom, CARD_WIDTH, HEADER_HEIGHT / 2, fill=1) # footer rect p.roundRect(left, bottom, CARD_WIDTH, FOOTER_HEIGHT, CORNER_RADIUS, fill=1) p.rect(left, bottom + FOOTER_HEIGHT / 2, CARD_WIDTH, FOOTER_HEIGHT / 2, fill=1) # outer box p.setStrokeColor(black) p.roundRect(left, bottom, CARD_WIDTH, CARD_HEIGHT, CORNER_RADIUS) # header text p.setFillColor(white) p.setFont('Helvetica', 18) p.drawCentredString(inner_left + CARD_INNER_WIDTH / 2, header_bottom + HEADER_HEIGHT * 0.35, '[ code ] for { life }') # footer text p.setFont('Helvetica', 10) p.drawCentredString(inner_left + CARD_INNER_WIDTH / 2, footer_bottom + FOOTER_HEIGHT * 0.32 , settings.CODEFORLIFE_WEBSITE) # left hand side writing p.setFillColor(black) p.setFont('Helvetica', 12) p.drawString(inner_left, inner_bottom + CARD_INNER_HEIGHT * 0.12, 'Password:'******'Class Code:') p.drawString(inner_left, inner_bottom + CARD_INNER_HEIGHT * 0.78, 'Name:') # right hand side writing drawParagraph(student['password'], 0.10) drawParagraph(klass.access_code, 0.43) drawParagraph(student['name'], 0.76) # character image character = CHARACTERS[character_index] p.drawImage(character['image'], inner_left + CARD_INNER_WIDTH - character['width'], inner_bottom, character['width'], character['height'], mask='auto') x = (x + 1) % NUM_X if x == 0: y = (y + 1) % NUM_Y if y == 0: p.showPage() current_student_count += 1 if x != 0 or y != 0: p.showPage() p.save() return response
def teacher_print_reminder_cards(request, access_code): response = HttpResponse(content_type="application/pdf") response["Content-Disposition"] = 'filename="student_reminder_cards.pdf"' p = canvas.Canvas(response, pagesize=A4) # Define constants that determine the look of the cards PAGE_WIDTH, PAGE_HEIGHT = A4 PAGE_MARGIN = PAGE_WIDTH / 32 INTER_CARD_MARGIN = PAGE_WIDTH / 64 CARD_PADDING = PAGE_WIDTH / 48 NUM_X = 2 NUM_Y = 4 CARD_WIDTH = ( PAGE_WIDTH - PAGE_MARGIN * 2 - INTER_CARD_MARGIN * (NUM_X - 1) ) / NUM_X CARD_HEIGHT = ( PAGE_HEIGHT - PAGE_MARGIN * 2 - INTER_CARD_MARGIN * (NUM_Y - 1) ) / NUM_Y HEADER_HEIGHT = CARD_HEIGHT * 0.16 FOOTER_HEIGHT = CARD_HEIGHT * 0.1 CARD_INNER_WIDTH = CARD_WIDTH - CARD_PADDING * 2 CARD_INNER_HEIGHT = CARD_HEIGHT - CARD_PADDING * 2 - HEADER_HEIGHT - FOOTER_HEIGHT CARD_IMAGE_WIDTH = CARD_INNER_WIDTH * 0.25 CORNER_RADIUS = CARD_WIDTH / 32 # Setup various character images to cycle round CHARACTER_FILES = [ "portal/img/dee.png", "portal/img/kirsty.png", "portal/img/wes.png", "portal/img/nigel.png", "portal/img/phil.png", ] CHARACTERS = [] logo_image = ImageReader( staticfiles_storage.path("portal/img/logo_c4l_reminder_card.png") ) for character_file in CHARACTER_FILES: character_image = ImageReader(staticfiles_storage.path(character_file)) character_height = CARD_INNER_HEIGHT character_width = CARD_IMAGE_WIDTH character_height = ( character_width * character_image.getSize()[1] / character_image.getSize()[0] ) if character_height > CARD_INNER_HEIGHT: character_height = CARD_INNER_HEIGHT character_width = ( character_height * character_image.getSize()[0] / character_image.getSize()[1] ) character = { "image": character_image, "height": character_height, "width": character_width, } CHARACTERS.append(character) klass = get_object_or_404(Class, access_code=access_code) # Check auth if klass.teacher.new_user != request.user: raise Http404 COLUMN_WIDTH = (CARD_INNER_WIDTH - CARD_IMAGE_WIDTH) * 0.45 # Work out the data we're going to display, use data from the query string # if given, else display everyone in the class without passwords student_data = [] student_data = get_student_data(request, klass, student_data) # Now draw everything x = 0 y = 0 def drawParagraph(text, position): style = ParagraphStyle("test") style.font = "Helvetica-Bold" font_size = 16 while font_size > 0: style.fontSize = font_size style.leading = font_size para = Paragraph(text, style) (para_width, para_height) = para.wrap( CARD_INNER_WIDTH - COLUMN_WIDTH - CARD_IMAGE_WIDTH, CARD_INNER_HEIGHT ) if para_height <= 48: para.drawOn( p, inner_left + COLUMN_WIDTH, inner_bottom + CARD_INNER_HEIGHT * position + 8 - para_height / 2, ) return font_size -= 1 current_student_count = 0 for student in student_data: character_index = current_student_count % len(CHARACTERS) left = PAGE_MARGIN + x * CARD_WIDTH + x * INTER_CARD_MARGIN bottom = ( PAGE_HEIGHT - PAGE_MARGIN - (y + 1) * CARD_HEIGHT - y * INTER_CARD_MARGIN ) inner_left = left + CARD_PADDING inner_bottom = bottom + CARD_PADDING + FOOTER_HEIGHT header_bottom = bottom + CARD_HEIGHT - HEADER_HEIGHT footer_bottom = bottom # header rect p.setFillColorRGB(0.0, 0.027, 0.172) p.setStrokeColorRGB(0.0, 0.027, 0.172) p.roundRect( left, header_bottom, CARD_WIDTH, HEADER_HEIGHT, CORNER_RADIUS, fill=1 ) p.rect(left, header_bottom, CARD_WIDTH, HEADER_HEIGHT / 2, fill=1) # footer rect p.roundRect(left, bottom, CARD_WIDTH, FOOTER_HEIGHT, CORNER_RADIUS, fill=1) p.rect(left, bottom + FOOTER_HEIGHT / 2, CARD_WIDTH, FOOTER_HEIGHT / 2, fill=1) # outer box p.setStrokeColor(black) p.roundRect(left, bottom, CARD_WIDTH, CARD_HEIGHT, CORNER_RADIUS) # header image p.drawImage( logo_image, inner_left, header_bottom + 5, CARD_INNER_WIDTH, HEADER_HEIGHT * 0.6, ) # footer text p.setFont("Helvetica", 10) p.drawCentredString( inner_left + CARD_INNER_WIDTH / 2, footer_bottom + FOOTER_HEIGHT * 0.32, settings.CODEFORLIFE_WEBSITE, ) # left hand side writing p.setFillColor(black) p.setFont("Helvetica", 12) p.drawString(inner_left, inner_bottom + CARD_INNER_HEIGHT * 0.12, "Password:"******"Class Code:") p.drawString(inner_left, inner_bottom + CARD_INNER_HEIGHT * 0.78, "Name:") # right hand side writing drawParagraph(student["password"], 0.10) drawParagraph(klass.access_code, 0.43) drawParagraph(student["name"], 0.76) # character image character = CHARACTERS[character_index] p.drawImage( character["image"], inner_left + CARD_INNER_WIDTH - character["width"], inner_bottom, character["width"], character["height"], mask="auto", ) x = (x + 1) % NUM_X y = compute_show_page_character(p, x, y, NUM_Y) current_student_count += 1 compute_show_page_end(p, x, y) p.save() return response
def testCollectStaticStandalone(self): with self.settings(REQUIRE_ENVIRONMENT=self.require_environment): call_command("collectstatic", interactive=False, verbosity=0) self.assertTrue(os.path.exists(staticfiles_storage.path("js/main-built.js")))
def find(self, path, all=False): full_path = staticfiles_storage.path(path) if staticfiles_storage.exists(full_path): return [full_path] if all else full_path return []
def load(self, path): """ Retrieve CSS contents by local file system """ expanded_path = staticfiles_storage.path(path) return load_css_by_path(expanded_path)
def handle_noargs(self, verbosity=1, relative_depth=5, clear=False, dry_run=False, static_root=None, **options): verbosity = int(verbosity) static_root = os.path.realpath( static_root if static_root else staticfiles_storage.path('')) def log(msg, level=2): '''Utility to write log message at appropriate log level.''' if verbosity >= level: self.stdout.write(msg) def make_path(path, start): '''Build a relative or absolute path. If relative_depth < 0, always return a relative path. If the number of .. components at the head of the relative path is greater than relative_depth or relative_depth evaluates to False (== 0), return path unchanged. Otherwise, return a relative path. The arguments are the same as for os.path.relpath. ''' if not relative_depth: return path rel_path = os.path.relpath(path, start) if relative_depth < 0: return rel_path count = 0 for name in rel_path.split(os.sep, relative_depth + 1): if name != '..': break count += 1 return path if count > relative_depth else rel_path def link(names, storage, dirs=False): '''Link each name from its storage to the static root. names is a list of file or directory names, storage is the storage for the source file, and dirs indicates whether or not names are all directories (only important on Windows). ''' if storage.prefix: dst_dir = os.path.join(static_root, storage.prefix) if not os.path.exists(dst_dir): log('creating directory {}'.format(dst_dir)) if not dry_run: os.mkdir(dst_dir) else: dst_dir = static_root for name in names: dst_path = os.path.join(dst_dir, name) if os.path.lexists(dst_path): if clear: log('removing {}'.format(dst_path)) if not dry_run: os.unlink(dst_path) else: log('exists: {}'.format(dst_path), level=1) continue src_path = make_path(storage.path(name), os.path.dirname(dst_path)) log('linking {} to {}'.format(src_path, dst_path)) if not dry_run: os.symlink(src_path, dst_path, target_is_directory=dirs) # Iterate through all the static storage and link to static root. for finder in finders.get_finders(): for storage in finder.storages.values(): dirs, files = storage.listdir('') link(dirs, storage, True) link(files, storage, False)
from .forms import img import os import numpy as np import torch import torchvision import matplotlib.pyplot as plt import seaborn as sns import time images_id = [] error_prediction = [] #from .model import * import torch from django.conf import settings from django.contrib.staticfiles.storage import staticfiles_storage url = staticfiles_storage.path('vgg_model2') vgg16 = torch.load(url) classes = ['buildings', 'forest', 'glacier', 'mountain', 'sea', 'street'] #print(model1) img_with_labels = {} import shutil def delete(): folders = [ 'media/imagesrec/images', 'media/imagesrec/train/forest', 'media/imagesrec/train/buildings', 'media/imagesrec/train/glacier', 'media/imagesrec/train/sea', 'media/imagesrec/train/street', 'media/imagesrec/train/mountain' ] for folder in folders:
def prediction(self, inputs): model_file = staticfiles_storage.path('boston.pkl') with open(model_file, 'rb') as fid: lin_model = pickle.load(fid) predictions = lin_model.predict(inputs) return predictions
def render_individual_css(self, _, paths, **kwargs): html = [] for path in paths: with codecs.open(staticfiles_storage.path(path), 'r', 'utf-8') as open_file: html.append(open_file.read()) return mark_safe('<style>' + '\n'.join(html) + '</style>')
def reportPDF(request, report): """ View to generate report PDF with supplements Args: request (HttpRequest): request to view page report (str): primary key of :class:`~makeReports.models.basic_models.Report` Returns: HttpResponse : the PDF Notes: A function instead of class due to limitations of class based views """ #first get report or return 404 error report = get_object_or_404(Report, pk=report) #get templates for each of the sections (sec 1 and 2 together since sec 1 doesn't have supplements) sec1and2 = get_template('makeReports/DisplayReport/PDFsub/pdf1and2.html') sec3 = get_template('makeReports/DisplayReport/PDFsub/pdf3.html') sec4 = get_template('makeReports/DisplayReport/PDFsub/pdf4.html') #build the context needed for the report context = {'rpt': report, 'report': report} #SimpleNamespace lets report be accessed via dot-notation in section#Context s = SimpleNamespace(**context) context = section1Context(s, context) context = section2Context(s, context) #render HTML string for section 1 and 2 p1and2 = sec1and2.render(context).encode() #reset context for section 3 context = {'rpt': report, 'report': report} context = section3Context(s, context) #render HTML string for section 3 p3 = sec3.render(context).encode() #reset context context = {'rpt': report, 'report': report} context = section4Context(s, context) #render HTML string for section 4 p4 = sec4.render(context).encode() #get all supplements (PDFs) that go with the report assessSups = AssessmentSupplement.objects.filter( assessmentversion__report=report) dataSups = DataAdditionalInformation.objects.filter(report=report) repSups = ReportSupplement.objects.filter(report=report) #get the HTML of all sections html1and2 = HTML(string=p1and2) html3 = HTML(string=p3) html4 = HTML(string=p4) #set-up temporary files to write pdfs for each section f1and2 = tempfile.TemporaryFile() f3 = tempfile.TemporaryFile() f4 = tempfile.TemporaryFile() #write to those temporary files from the HTML generated html1and2.write_pdf( target=f1and2, stylesheets=[CSS(staticfiles_storage.path('css/report.css'))]) html3.write_pdf(target=f3, stylesheets=[ CSS(staticfiles_storage.path('css/report.css')), CSS(staticfiles_storage.path('css/shelves.css')) ]) html4.write_pdf( target=f4, stylesheets=[CSS(staticfiles_storage.path('css/report.css'))]) #set-up a merger to merge all PDFs together merged = PdfFileMerger() merged.append(f1and2) #start with section 1 and 2, then append assessment supplements merged = addSupplements(assessSups, merged) #add section 3 merged.append(f3) #append data supplements merged = addSupplements(dataSups, merged) #append section 4 merged.append(f4) #add report supplements merged = addSupplements(repSups, merged) #write the merged pdf to the HTTP Response http_response = HttpResponse(content_type="application/pdf") merged.write(http_response) return http_response
def create_plots(): ''' Perform the two data pulls from CDEC. Creates the following dataframes: df: CDEC daily precip data for current water year. df_all_years: Monthly precip data for the last 100 years. OUTPUT: Two html files and two static images. :return: ''' cur_dir = os.getcwd() wy = datetime.today().year # Water Year month_num = datetime.today().month + 2 # Current month number # The "AS-OF" date. Date of data pull data_pull_date = (datetime.today() - timedelta(days=1)).strftime("%b %d") # A date range spanning every day of the entire water year. idx = pd.date_range(f'10.01.{wy-1}', f'9.30.{wy}') if datetime.today( ).month >= 10: # The water year will be year + 1 if it's Oct, Nov, or Dec wy = wy + 1 month_num = month_num - 9 try: # # Daily Data Pull # response = requests.get( # url="https://cdec.water.ca.gov/dynamicapp/QueryDaily?s=LSP", # params={"end": f"{wy}-10-1", # "span": "1year" # }, # ) # print('Response HTTP Status Code: {status_code}'.format(status_code=response.status_code)) # # # Historical Data Pull # all_years = requests.get ( # url="https://cdec.water.ca.gov/dynamicapp/QueryWY", # params={"Stations": "LSP", # "span": "100+years", # "SensorNums":2 # }, # ) # print('Response HTTP Status Code: {status_code}'.format(status_code=all_years.status_code)) # # # Current data to-date # df = pd.read_html(response.content)[0] # For some reason, the dataframe extends to 10-01 of the following water year instead of 9-30, so # we need to remove the last row # df = df[:-1] df_hh = pd.read_excel( staticfiles_storage.path("data/Daily_Output.xlsx"), sheet_name='Hell_Hole_elv_bands', header=1, engine='openpyxl') df_fm = pd.read_excel( staticfiles_storage.path("data/Daily_Output.xlsx"), sheet_name='French_Meadows_elv_bands', header=1, engine='openpyxl') df_fm['Date'] = pd.to_datetime(df_fm['Date']) df_hh['Date'] = pd.to_datetime(df_hh['Date']) df = pd.merge(df_fm, df_hh, on="Date", how='outer') for column in df_hh: if column != 'Date': df[column] = df[f"{column}_x"] + df[f"{column}_y"] df.drop([f"{column}_x", f"{column}_y"], axis=1, inplace=True) df['Date'] = pd.to_datetime(df['Date']) df.set_index('Date', inplace=True) df_ave = df.groupby(df.index.strftime('2021-%m-%d')).mean() df_ave = df_ave[df_ave.index != "2021-02-29"] df_ave.index = pd.to_datetime(df_ave.index) df_dates = pd.DataFrame( pd.date_range('10-1-2020', '9-30-2021', freq='D')) df_dates.index = df_dates[df_dates.columns[0]] df_dates.columns = ["Date"] df_ave = pd.merge(df_dates, df_ave, left_on=[df_dates.index.month, df_dates.index.day], right_on=[df_ave.index.month, df_ave.index.day], how='outer') df_ave.index = pd.to_datetime(df_ave["Date"]) # df_ave['Date'] = df_ave.apply(lambda x: x["Date"].dt.year.replace('2021', '2020') if x['Date'].dt.month > 9 else x["Date"], axis=1) # df_ave.loc[df_ave['Date'].dt.month > 9, df_ave['Date'].dt.year] = 2020 # Line chart info. lineChart = (go.Figure(data=[ go.Scatter(x=(df_ave.index), y=df_ave['7.0-7.5'], name="15 Year Average", hovertext=df_ave['7.0-7.5'], hovertemplate="Average: %{y:.1f}\"<extra></extra>", fill='tozeroy', line=dict(width=0.5, color='rgb(111, 231, 219)'), marker_line_width=2, texttemplate='%{hovertext}', textposition="top center", textfont=dict( family="Arial", size=24, color="rgba(0, 0, 0)")) ])) df['Date'] = df.index for yr in range(2021, 2022): visible = False if yr == 2004 or yr == 2021: if yr == 2004: df_yr = df[(df['Date'] >= f"2004-1-1") & (df['Date'] <= f'{yr}-9-30')] if yr == 2021: df_yr = df[(df['Date'] >= f"2020-10-1") & (df['Date'] <= f'{yr}-4-20')] visible = True else: df_yr = df[(df['Date'] >= f"{yr-1}-10-1") & (df['Date'] <= f'{yr}-9-30')] # Last Year trace for column in df_yr.columns: if column != 'Date': lineChart.add_trace( go.Scatter( x=df_ave["Date"], y=df_yr[column], mode='lines', hovertemplate="AF at Elev: %{y}\"<extra></extra>", name=f'WY {yr} between {column} ft', visible="legendonly"), ) # Update all aspects of the chart lineChart.update_layout( paper_bgcolor='rgb(255,255,255)', plot_bgcolor="rgb(255,255,255)", title=f"Total PCWA Basin SWE By Elevation Band {wy}", showlegend=True, hovermode="x unified", legend=dict(yanchor="top", y=0.99, xanchor="left", x=0), font=dict(size=24, color="black")) #wfolder_path = pathlib.Path('G:/', 'Energy Marketing', 'Weather', 'Programs', 'Lake_Spaulding') wfolder_path = os.getcwd() #external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css'] #app = dash.Dash(__name__, external_stylesheets=external_stylesheets) app.layout = html.Div([ html.H4("Choose one or more years"), dcc.Dropdown(id='wy_dropdown', options=[ { 'label': 'WY 2021', 'value': '2021' }, { 'label': 'WY 2020', 'value': '2020' }, { 'label': 'WY 2019', 'value': '2019' }, { 'label': 'WY 2018', 'value': '2018' }, { 'label': 'WY 2017', 'value': '2017' }, { 'label': 'WY 2016', 'value': '2016' }, { 'label': 'WY 2015', 'value': '2015' }, { 'label': 'WY 2014', 'value': '2014' }, { 'label': 'WY 2013', 'value': '2013' }, { 'label': 'WY 2012', 'value': '2012' }, { 'label': 'WY 2011', 'value': '2011' }, { 'label': 'WY 2010', 'value': '2010' }, { 'label': 'WY 2009', 'value': '2009' }, { 'label': 'WY 2008', 'value': '2008' }, { 'label': 'WY 2007', 'value': '2007' }, { 'label': 'WY 2006', 'value': '2006' }, { 'label': 'WY 2005', 'value': '2005' }, { 'label': 'WY 2004', 'value': '2004' }, ], value=['2021'], multi=True), html.Div(children=[ dcc.Graph(figure=lineChart, id='wy_graph'), ], id='wy-output-container'), html.Div(id='slider-container', children=[ html.H4("Slide To Choose Elevation Band"), dcc.Slider(id='wy_slider', min=3000, max=7500, step=500, value=5000, marks={ 3000: { 'label': '3-3.5k ft', 'style': { 'color': '#77b0b1' } }, 3500: { 'label': '3.5-4k ft', 'style': { 'color': '#77b0b1' } }, 4000: { 'label': '4-4.5k ft', 'style': { 'color': '#77b0b1' } }, 4500: { 'label': '4.5-5k ft', 'style': { 'color': '#77b0b1' } }, 5000: { 'label': '5-5.5k ft', 'style': { 'color': '#77b0b1' } }, 5500: { 'label': '5.5-6k ft', 'style': { 'color': '#77b0b1' } }, 6000: { 'label': '6-6.5k ft', 'style': { 'color': '#77b0b1' } }, 6500: { 'label': '6.5-6k ft', 'style': { 'color': '#77b0b1' } }, 7000: { 'label': '7-7.5k ft', 'style': { 'color': '#77b0b1' } }, 7500: { 'label': '7.5-8k ft', 'style': { 'color': '#77b0b1' } }, }), ]), ]) @app.callback(dash.dependencies.Output('wy_graph', 'figure'), [ dash.dependencies.Input('wy_slider', 'value'), dash.dependencies.Input('wy_graph', 'figure'), dash.dependencies.Input('wy_dropdown', 'value') ]) def update_output(elev_slider, figure, wy): elev = dict({ '3000': '3.0-3.5', '3500': '3.5-4.0', '4000': '4.0-4.5', '4500': '4.5-5.0', '5000': '5.0-5.5', '5500': '5.5-6.0', '6000': '6.0-6.5', '6500': '6.5-7.0', '7000': '7.0-7.5', '7500': '7.5-8.0' }) elev_col = elev[str(elev_slider)] lineChart = (go.Figure(data=[ go.Scatter(x=df_ave.index, y=df_ave[elev_col], name="15 Year Average", hovertext=df_ave[elev_col], hovertemplate="Average: %{y:.1f}\"<extra></extra>", fill='tozeroy', line=dict(width=0.5, color='rgb(111, 231, 219)'), marker_line_width=2, texttemplate='%{hovertext}', textposition="top center", textfont=dict( family="Arial", size=24, color="rgba(0, 0, 0)")) ])) for yr in wy: yr = int(yr) if yr == 2004 or yr == 2021: if yr == 2004: df_yr = df[(df['Date'] >= f"2004-1-1") & (df['Date'] <= f'{yr}-9-30')] if yr == 2021: df_yr = df[(df['Date'] >= f"2020-10-1") & (df['Date'] <= f'{yr}-4-20')] else: df_yr = df[(df['Date'] >= f"{yr-1}-10-1") & (df['Date'] <= f'{yr}-9-30')] lineChart.add_trace( go.Scatter(x=df_ave["Date"], y=df_yr[elev_col], mode='lines', hovertemplate="Total AF: %{y}<extra></extra>", name=f'WY {yr} between {elev_col} ft'), ) lineChart.update_layout( title=dict( text= f'<b>Total Acre Feet within {elev_col} ft elevation band</b>', x=0.5, font=dict(family="Arial", size=20, color='#000000')), height=500, yaxis_title="Total Acre Feet", ) return lineChart #app.run_server(debug=True, use_reloader=False) # Turn off reloader if inside Jupyter except requests.exceptions.RequestException: print('HTTP Request failed') return None
def path(self, name): try: return staticfiles_storage.path(name) except NotImplementedError: return name
from django.contrib.auth import get_user_model from django.contrib.staticfiles.storage import staticfiles_storage from django.test import tag, TestCase from openpyxl import load_workbook from main.importer.experiment_desc import CombinatorialCreationImporter from main.importer.experiment_desc.constants import (STRAIN_NAME_ELT, REPLICATE_ELT, ELEMENTS_SECTION, ABBREVIATIONS_SECTION, BASE_NAME_ELT) from main.importer.experiment_desc.parsers import ExperimentDescFileParser, JsonInputParser from main.models import (CarbonSource, MetadataType, Protocol, Strain, Study) User = get_user_model() simple_experiment_def_xlsx = staticfiles_storage.path( 'main/example/sample_experiment_description.xlsx') class CombinatorialCreationTests(TestCase): """ Defines automated integration tests for most of the supporting back-end code for experiment description file upload and combinatorial line creation (processes are very similar/based on the same code) """ @classmethod def setUpTestData(cls): cls.system_user = User.objects.get(username='******') Protocol.objects.get_or_create(name='Proteomics', owned_by=cls.system_user) Protocol.objects.get_or_create(name='Metabolomics', owned_by=cls.system_user)