def get_case_content_wordcloud_data(): content = '' for case in Case.objects.all(): content += case.first_history.content content += case.first_history.title jieba.set_dictionary(str(settings.ROOT_DIR('static/jieba/dict.txt'))) stop = [] with open(str(settings.ROOT_DIR('static/jieba/stop.txt')), 'r', encoding='UTF-8') as file: for data in file.readlines(): data = data.strip() stop.append(data) pattern = re.compile( '[\s+\.\!\/_,$%^*(+\"\']+|[+——!,。?“”、~@#¥%……&*()(\d+)]+') content = pattern.sub("", content) words_2 = [ word for word in jieba.cut_for_search(content) if len(word) == 2 and word not in stop ] counter_2 = Counter(words_2) words_3 = [ word for word in jieba.cut_for_search(content) if len(word) > 2 and word not in stop ] counter_3 = Counter(words_3) data = [{'name': word, 'weight': weight*1} for word, weight in counter_2.most_common(20)] + \ [{'name': word, 'weight': weight*1.5} for word, weight in counter_3.most_common(30)] return data
def _build_docs(project, version, project_url, project_filename, releases): conf_template = get_template('sphinx/conf.py.tmpl') index_template = get_template('sphinx/index.rst.tmpl') with tempfile.TemporaryDirectory() as tmp_dir: directory_name = "{name}-{version}".format(name=project, version=version) filename = os.path.join(tmp_dir, project_filename) extract_dir = os.path.join(tmp_dir, directory_name) urllib.request.urlretrieve(project_url, filename=filename) with zipfile.ZipFile(filename, "r") as zip_ref: zip_ref.extractall(extract_dir) print('File now in %s' % extract_dir) autoapi_dirs = [] for possible_project in os.listdir(extract_dir): if '__init__.py' in os.listdir( os.path.join(extract_dir, possible_project)): autoapi_dirs.append(os.path.join(extract_dir, possible_project)) print('Autoapi now in %s' % autoapi_dirs) conf_filename = os.path.join(extract_dir, 'conf.py') with open(conf_filename, 'w+') as conf_file: to_write = conf_template.render( dict( autoapi_dirs=json.dumps(autoapi_dirs), project=project, version=version, releases=releases, output_directory=settings.JSON_DIR(), python_path=settings.ROOT_DIR(), )) conf_file.write(to_write) print('Conf File now in %s' % conf_filename) index_filename = os.path.join(extract_dir, 'index.rst') with open(index_filename, 'w+') as index_file: to_write = index_template.render( dict( project=project, version=version, )) index_file.write(to_write) print('Index File now in %s' % index_filename) outdir = settings.DOCS_DIR.path(directory_name) if not os.path.exists(outdir.root): os.makedirs(outdir.root) print('Running Sphinx') sphinx_command = 'sphinx-build -b html ' \ '-d {root}_build/{name}-doctrees {root} {outdir}'.format( outdir=outdir.root, root=extract_dir, name=directory_name ) print(sphinx_command) os.system(sphinx_command)
def get_source_commit(): logger.info('ROOT_DIR: {0}, APP_DIR: {1}'.format(settings.ROOT_DIR, settings.APPS_DIR)) source_commit_file = os.path.join(settings.ROOT_DIR(), 'git_commits.json') try: with open(source_commit_file) as commit_file: return json.load(commit_file) except OSError: return dict()
def case_content_wordcloud(): content = '' for case in Case.objects.all(): content += case.first_history.content content += case.first_history.title jieba.set_dictionary(str(settings.ROOT_DIR('static/dict.txt'))) pattern = re.compile( '[\s+\.\!\/_,$%^*(+\"\']+|[+——!,。?“”、~@#¥%……&*()(\d+)]+') content = pattern.sub("", content) words = [word for word in jieba.cut_for_search(content) if len(word) > 2] counter = Counter(words) data = [{ 'name': word, 'weight': weight } for word, weight in counter.most_common(20)] chart = get_highchart_word_cloud(data=data) return chart
def get(self, request, *args, **kwargs): student = get_object_or_404(Student, pk=kwargs['pk']) IMG = str(settings.ROOT_DIR('foton/theme/static/theme/img/fiche1.jpg')) IMG2 = str(settings.ROOT_DIR('foton/theme/static/theme/img/fiche2.jpg')) IMG3 = str(settings.ROOT_DIR('foton/theme/static/theme/img/fiche3.jpg')) IMG4 = str(settings.ROOT_DIR('foton/theme/static/theme/img/fiche4.jpg')) response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = 'attachment; filename="registration_form.pdf"' buffer = BytesIO() p = canvas.Canvas(buffer) p.setTitle("Registration Form.pdf") p.drawInlineImage(IMG, 0, 0, width=600,height=850) p.setFont("Helvetica", 14) p.drawString(160, 730, "Academic: {0} / Reference Number:{1}".format(student.year, format(student.id))) p.setFont("Helvetica", 16) p.drawString(240, 588.5, str(student.last_name)) p.drawString(150, 551.5, str(student.first_name)) if student.gender == 0: g = "Female" else: g = "Male" p.drawString(120, 514, g) p.drawString(130, 476, str(student.birth_date)) p.drawString(150, 438, str(student.birth_venue)) p.drawString(120, 400, str(student.residence)) p.drawString(130, 362, str(student.phone)) p.drawString(110, 324, str(student.email)) p.drawString(130, 287, str(student.origin.name)) p.drawString(135, 249, str(student.national_Id)) if student.marital_status == 0: m = "Single" else: m = "Maried" p.drawString(150, 210, str(m)) p.showPage() # Page 2 p.drawInlineImage(IMG2, 0, 0, width=600,height=850) p.showPage() # Page 3 p.drawInlineImage(IMG3, 0, 0, width=600,height=850) p.drawString(290, 525, str(student.sponsor_full_name)) p.drawString(220, 492.5, str(student.sponsor_relationship)) p.drawString(180, 459.5, str(student.sponsor_address)) p.drawString(130, 426.5, str(student.sponsor_occupation)) p.drawString(125, 393.5, str(student.sponsor_phone)) p.drawString(110, 360.5, str(student.sponsor_email)) p.showPage() # Page 4 p.drawInlineImage(IMG4, 0, 0, width=600,height=850) p.showPage() p.save() pdf = buffer.getvalue() buffer.close() response.write(pdf) return response
) from datahub.company_referral.models import CompanyReferral from datahub.event.models import Event from datahub.feature_flag.models import FeatureFlag from datahub.interaction.models import Interaction, InteractionDITParticipant from datahub.investment.project.models import ( InvestmentProject, InvestmentProjectStageLog, InvestmentProjectTeamMember, ) from datahub.investment.project.proposition.models import Proposition from datahub.oauth.cache import add_token_data_to_cache logger = logging.getLogger(__name__) E2E_FIXTURE_DIR = settings.ROOT_DIR('fixtures/test_data.yaml') TEST_USER_TOKEN_TIMEOUT = 24 * 3600 @api_view(['POST']) @authentication_classes(()) @permission_classes(()) @schema(None) def reset_fixtures(request): """ Reset db to a known state. This view is to facilitate End to End testing. It has no authentication and should only be enabled to run tests and never in production! The database will have its objects (except Metadata) removed and reset to the state in the
def createPNG(self, fileName=None, forGeotiff=False): '''Draw processed data on a map and save it as a .png file ''' if not forGeotiff: fig = plt.figure(figsize=(18, 12)) ax = plt.axes() else: fig = plt.figure() ax = fig.add_axes((0, 0, 1, 1)) if not fileName: fileName = self.args.pngFileName e = self.getExtent() m = Basemap(llcrnrlon=e[0], llcrnrlat=e[1], urcrnrlon=e[2], urcrnrlat=e[3], projection='cyl', resolution='l', ax=ax) if not forGeotiff: m.arcgisimage(server='http://services.arcgisonline.com/ArcGIS', service='Ocean_Basemap') for depth, drift in list(self.adcpDrift.items()): m.plot(drift['lon'], drift['lat'], '-', c='black', linewidth=1) plt.text(drift['lon'][-1], drift['lat'][-1], '%i m' % depth, size='small') for platform, drift in list(self.trackDrift.items()): # Ad hoc coloring of platforms... if platform.startswith('stella'): color = 'yellow' elif platform.startswith('daphne'): color = 'orange' elif platform.startswith('makai'): color = 'magenta' else: color = 'red' m.plot(drift['lon'], drift['lat'], '-', c=color, linewidth=2) plt.text(drift['lon'][-1], drift['lat'][-1], platform, size='small') # Plot each data point with it's own color based on the activity statistics from STOQS coloredDotSize = 30 clt = readCLT( os.path.join(settings.ROOT_DIR('static'), 'colormaps', 'jetplus.txt')) cm_jetplus = matplotlib.colors.ListedColormap(np.array(clt)) for key, drift in list(self.stoqsDrift.items()): min, max = key.split(',')[2:4] ax.scatter(drift['lon'], drift['lat'], c=drift['datavalue'], s=coloredDotSize, cmap=cm_jetplus, lw=0, vmin=min, vmax=max) label = '%s from %s' % tuple(key.split(',')[:2]) plt.text(drift['lon'][-1], drift['lat'][-1], label, size='small') nowLocal = str( pytz.utc.localize(datetime.now()).astimezone( pytz.timezone('America/Los_Angeles'))).split('.')[0] plt.text(0.99, 0.01, 'Created: ' + nowLocal + ' Local', horizontalalignment='right', verticalalignment='bottom', transform=ax.transAxes) if not forGeotiff: m.drawparallels(np.linspace(e[1], e[3], num=3), labels=[True, False, False, False], linewidth=0) m.drawmeridians(np.linspace(e[0], e[2], num=3), labels=[False, False, False, True], linewidth=0) try: plt.title(self.title) except AttributeError: pass fig.savefig(fileName) print('Wrote file', self.args.pngFileName) else: plt.axis('off') try: plt.text(0.5, 0.95, self.title, horizontalalignment='center', verticalalignment='top', transform=ax.transAxes) except AttributeError: pass fig.savefig(fileName, transparent=True, dpi=300, bbox_inches='tight', pad_inches=0) plt.clf() plt.close()