def global_calendar(request, *args, **kwargs): user_now = request.session.get('USERNAME', False) user_head_portrait = User.objects.get(username=user_now).head_portrait if User.objects.get(username=user_now).is_superuser: admin = True global_manage = True tasks = Taskorder.objects.all().order_by('-time') calendar = [] for task in tasks: start_date = task.start_date end_date = task.end_date summary = task.summary color = task.type.color textColor = task.type.textColor calendar.append({ 'id': task.id, 'text': summary, 'start_date': start_date, 'end_date': end_date, 'color': color, 'textColor': textColor }) scheduler_css = 'dhtmlxscheduler_flat.css' scheduler_css_url = 'codebase/' + scheduler_css return render_to_response('index/index_calendar.html', locals())
def calendar(self) -> List[List[int]]: """Return a calendar instance""" year, month = self._date.year, self._date.month calendar: List[List[int]] = self._calendar.monthdayscalendar(year, month) if len(calendar) is 5: calendar.append([0] * 7) return calendar
def import_gtfs_feed(dbhost, dbuser, dbpass, dbname, feeddir, action): print "\nUniBus-MySQL-Tool Suite" print "Import GTFS data from csv format to MySQL database." print "Copyright @ Zhenwang.Yao 2008. \n" if not os.path.exists(feeddir): print "Error: Path %s does not exist!!!" % feeddir sys.exit(1) if action == "-append": agency.append(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "agency.txt")) calendar.append(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "calendar.txt")) calendar_dates.append(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "calendar_dates.txt")) routes.append(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "routes.txt")) shapes.append(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "shapes.txt")) stops.append(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "stops.txt")) stop_times.append(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "stop_times.txt")) trips.append(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "trips.txt")) else: #action == "-refresh" #citydb.drop(dbname) citydb.create(dbhost, dbuser, dbpass, dbname) agency.refresh(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "agency.txt")) calendar.refresh(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "calendar.txt")) calendar_dates.refresh(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "calendar_dates.txt")) routes.refresh(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "routes.txt")) shapes.refresh(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "shapes.txt")) stops.refresh(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "stops.txt")) stop_times.refresh(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "stop_times.txt")) trips.refresh(dbhost, dbuser, dbpass, dbname, os.path.join(feeddir, "trips.txt"))
def changeToresult(): # sql = 'SELECT a.`SITEID` FROM wb_info a' # cursor.execute(sql) # for item in cursor.fetchall(): # wbid.append(str(item[0])) with open('../processDataJson/child.json', 'r', encoding='utf-8') as f: data = json.load(f) wbid = data.keys() for k in wbid: temp = [] for i in list(range(0, 7)): for j in list(range(0, 24)): temp.append([i, j, data[k]["circleS"][i][j]]) circleMax.append(data[k]["circleS"][i][j]) data[k]["circleS"] = temp calendar = [] for b in data[k]['calendar'].items(): calendar.append(list(b)) calendarMax.append(list(b)[1]) data[k]["calendar"] = calendar # print(circleMax, calendarMax) with open('../processDataJson/childData.json', 'w', encoding='utf-8') as f: f.write(json.dumps(data))
def compute_delay(self, baseline, batches): import calendar #1. select the interested batches sel_bat = batches[batches.opportunity_id.isin(baseline)] l1sel = (sel_bat.cable_production_line == 'Line1') | \ ((sel_bat.cable_voltage <= self.MAX_VOLTAGE_L1) & \ (sel_bat.cable_area <= self.MAX_AREA_L1)) l2sel = (sel_bat.cable_production_line == 'Line2') | \ ((sel_bat.cable_voltage > self.MAX_VOLTAGE_L1) | \ (sel_bat.cable_area > self.MAX_AREA_L1) | \ (sel_bat.cable_kind == 'SEG')) sel_bat_L1 = sel_bat[l1sel] sel_bat_L2 = sel_bat[l2sel] #2. find the first production batches d = sel_bat.delivery_date.min() start_date = datetime(d.year, d.month, 1) self.end_date = sel_bat.delivery_date.max() curr_date = start_date + relativedelta(months=1) L1_workload = [] L2_workload = [] calendar = [] while start_date < self.end_date: l1batch = sel_bat_L1[(sel_bat_L1.delivery_date >= start_date) & \ (sel_bat_L1.delivery_date < curr_date)] l2batch = sel_bat_L2[(sel_bat_L2.delivery_date >= start_date) & \ (sel_bat_L2.delivery_date < curr_date)] L1_workload.append(l1batch.workload.sum()) L2_workload.append(l2batch.workload.sum()) calendar.append((curr_date - start_date).days * 24.) start_date = start_date + relativedelta(months=1) curr_date = curr_date + relativedelta(months=1) L1_workload = np.asarray(L1_workload) L2_workload = np.asarray(L2_workload) calendar = np.asarray(calendar) overload = np.sum((L1_workload - calendar).clip(min = 0)+\ (L2_workload - calendar).clip(min = 0)) return overload
def import_to_offline_db(dbname, feeddir, action, version): print "\nUniBus-SQLite-Tool Suite" print "Covert GTFS data from csv format to sqlite format." print "Copyright @ Zhenwang.Yao 2008. \n" if not os.path.exists(feeddir): print "Error: Path %s does not exist!!!" % feeddir sys.exit(1) if action == "-append": calendar.append(dbname, os.path.join(feeddir, "calendar.txt")) calendar_dates.append(dbname, os.path.join(feeddir, "calendar_dates.txt")) stop_times.append(dbname, os.path.join(feeddir, "stop_times.txt")) trips.append(dbname, os.path.join(feeddir, "trips.txt")) dbinfo.add(dbname, version) else: #action == "-refresh" #citydb.drop(dbname) calendar.refresh(dbname, os.path.join(feeddir, "calendar.txt")) calendar_dates.refresh(dbname, os.path.join(feeddir, "calendar_dates.txt")) stop_times.refresh(dbname, os.path.join(feeddir, "stop_times.txt")) trips.refresh(dbname, os.path.join(feeddir, "trips.txt")) dbinfo.add(dbname, version)
def regenerate_web(self): notebook_dirname = lambda x: 'n{:03}'.format(x) page_filename = lambda p, n: 'n{:03}/p{:06}.html'.format(n, p) notebook_covername = lambda x: 'images/note/n{:07x}_0.png'.format(x) schedule_dirname = lambda x: 's{:03}'.format(x) sch_page_filename = lambda p, n: 's{:03}/p{:06}.html'.format(n, p) schedule_covername = lambda x: 'images/schedule/s{:07x}_0.png'.format(x ) def form_filename(id_, thumb=False): notebook = self.d['forms'][id_]['notebook'] if notebook == -1: # uform return 'images/{thumb}uform/f{id:07x}_0.png'.format( thumb='thumbnail/' if thumb else '', id=id_) elif notebook == 0: # built-in form return 'images/{thumb}form/f{id:07x}_0.png'.format( thumb='thumbnail/' if thumb else '', id=id_) else: # imported form return 'images/{thumb}impt/n{nb:06x}/f{id:07x}_0.png'.format( thumb='thumbnail/' if thumb else '', id=id_, nb=notebook) def page_imagename(id_, notebook, layer, thumb=False): return 'images/{thumb}page/n{nb:06x}/{tp}{id:07x}_{layer}.png'.format( thumb='thumbnail/' if thumb else '', tp='t' if thumb else 'p', id=id_, nb=notebook, layer=layer) def sch_form_filename(id_, schedule): return 'images/sch_form/s{sch:06x}/f{id:07x}_0.png'.format( sch=schedule, id=id_) def sch_page_imagename(id_, schedule, layer, thumb=False): return 'images/{thumb}sch_page/s{sch:06x}/{tp}{id:07x}_{layer}.png'.format( thumb='thumbnail/' if thumb else '', tp='t' if thumb else 'p', id=id_, sch=schedule, layer=layer) # copy over static files self._mkdir('static') provider = get_provider('chicraccoon') static_dir = provider.get_resource_filename(ResourceManager(), 'web_static') for entry in os.scandir(static_dir): shutil.copy(entry.path, self._path('static')) # generate HTML env = Environment(loader=PackageLoader('chicraccoon', 'web_templates'), autoescape=select_autoescape(['html']), trim_blocks=True, lstrip_blocks=True) # generate index page index_template = env.get_template('index.html') notebooks = [] for id_ in self.d['notebooks']: cover = notebook_covername(id_) if not os.path.exists(self._path(cover)): cover = 'static/notebook_default.png' notebooks.append({ 'link': '{}/index.html'.format(notebook_dirname(id_)), 'cover': cover }) schedules = [] for id_ in self.d['schedules']: cover = schedule_covername(id_) if not os.path.exists(self._path(cover)): cover = 'static/schedule_default.png' schedules.append({ 'link': '{}/index.html'.format(schedule_dirname(id_)), 'cover': cover }) with open(self._path('index.html'), 'w') as f: f.write( index_template.render(notebooks=notebooks, schedules=schedules)) # generate note and notebook pages notebook_template = env.get_template('notebook.html') page_template = env.get_template('notebook_page.html') for id_, notebook in self.d['notebooks'].items(): self._mkdir(notebook_dirname(id_)) pages = [] page_ids = notebook['pages'] for i, page_id in enumerate(page_ids): page = self.d['pages'][page_id] thumb_layers = [form_filename(page['form'], True)] layers = [form_filename(page['form'])] if os.path.exists(self._path(page_imagename(page_id, id_, 0))): thumb_layers.append(page_imagename(page_id, id_, 0, True)) thumb_layers.append(page_imagename(page_id, id_, 1, True)) layers.append(page_imagename(page_id, id_, 0)) layers.append(page_imagename(page_id, id_, 1)) prev_link = None if i != 0: prev_link = page_filename(page_ids[i - 1], id_) next_link = None if i != len(page_ids) - 1: next_link = page_filename(page_ids[i + 1], id_) with open(self._path(page_filename(page_id, id_)), 'w') as f: f.write( page_template.render(layers=layers, base_dir='../', page_num=i + 1, pages_total=len(page_ids), prev_link=prev_link, next_link=next_link)) pages.append({ 'layers': thumb_layers, 'link': page_filename(page_id, id_) }) with open(self._path(notebook_dirname(id_), 'index.html'), 'w') as f: f.write(notebook_template.render(pages=pages, base_dir='../')) # generate schedule pages one_day = datetime.timedelta(days=1) parse_date = lambda x: datetime.datetime.utcfromtimestamp(x).date() schedule_template = env.get_template('schedule.html') sch_page_template = env.get_template('schedule_page.html') for id_, schedule in self.d['schedules'].items(): self._mkdir(schedule_dirname(id_)) page_objects = list([(x, self.d['sch_pages'][x]) for x in schedule['pages']]) start_date = parse_date(schedule['start_date']) end_date = parse_date(schedule['end_date']) calendar = [] last_month = -1 week = [] date = start_date date -= one_day * date.weekday() # go to beginning of the week page = 0 while date <= end_date: while (page < len(page_objects) - 1) and \ (date > parse_date(page_objects[page][1]['end_date'])): page += 1 week.append({ 'day': date.day, 'date': date.strftime('%Y-%m-%d'), 'link': sch_page_filename(page_objects[page][0], id_), 'touched': page_objects[page][1]['touched'] }) if date.weekday() == 6: if last_month != date.month: calendar.append({ 'days': week, 'month': date.strftime('%B %Y') }) last_month = date.month else: calendar.append({'days': week}) week = [] date += one_day if len(week) > 0: calendar.append(week) week = [] with open(self._path(schedule_dirname(id_), 'index.html'), 'w') as f: f.write( schedule_template.render(calendar=calendar, base_dir='../')) for i, (page_id, page) in enumerate(page_objects): layers = [sch_form_filename(page_id, id_)] if os.path.exists( self._path(sch_page_imagename(page_id, id_, 0))): layers.append(sch_page_imagename(page_id, id_, 0)) layers.append(sch_page_imagename(page_id, id_, 1)) prev_link = None if i != 0: prev_link = sch_page_filename(page_objects[i - 1][0], id_) next_link = None if i != len(page_objects) - 1: next_link = sch_page_filename(page_objects[i + 1][0], id_) start_date = parse_date(page['start_date']).isoformat() end_date = parse_date(page['end_date']).isoformat() with open(self._path(sch_page_filename(page_id, id_)), 'w') as f: f.write( sch_page_template.render(base_dir='../', layers=layers, prev_link=prev_link, next_link=next_link, start_date=start_date, end_date=end_date))