def import_zip(path, filename, version): conn = psycopg2.connect("dbname='dino'") prefix = "AVV" try: data = {} data["OPERATOR"] = getOperator(conn, prefix=prefix, website="http://www.avv.de") data["MERGESTRATEGY"] = [{"type": "DATASOURCE", "datasourceref": "1"}] data["DATASOURCE"] = getDataSource() data["VERSION"] = getVersion(conn, prefix=prefix, filename=filename) data["DESTINATIONDISPLAY"] = getDestinationDisplays(conn, prefix=prefix) data["LINE"] = getLines(conn, prefix=prefix) data["STOPPOINT"] = getStopPoints(conn, prefix=prefix) data["STOPAREA"] = getStopAreas(conn, prefix=prefix) data["AVAILABILITYCONDITION"] = getAvailabilityConditions(conn, prefix=prefix) data["PRODUCTCATEGORY"] = getProductCategories(conn, prefix=prefix) data["ADMINISTRATIVEZONE"] = {"AVV": {"operator_id": "AVV", "name": "Aachener Verkehrsverbund"}} data["TIMEDEMANDGROUP"] = getTimeDemandGroups(conn, prefix=prefix) data["ROUTE"] = clusterPatternsIntoRoute(conn, prefix=prefix) data["JOURNEYPATTERN"] = getJourneyPatterns(conn, data["ROUTE"], prefix=prefix) data["JOURNEY"] = getJourneys(conn, prefix=prefix) data["NOTICEASSIGNMENT"] = {} data["NOTICE"] = {} data["NOTICEGROUP"] = {} conn.close() insert(data) except: raise
def main(): text = removePageBreaks(sys.argv[1]) semester,year = extractTiming(text) text2 = removePreambles(text) evals = splitCourseEvals(text2) inserter.insert(evals, semester, year)
def import_zip(path, filename, version): meta, conn = load(path, filename) if datetime.strptime(meta['enddate'].replace('-', ''), '%Y%m%d') < (datetime.now() - timedelta(days=1)): data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = { 'privatecode': 'HTMBUZZ:' + filename, 'datasourceref': '1', 'operator_id': 'HTMBUZZ:' + filename, 'startdate': meta['startdate'], 'enddate': meta['enddate'], 'error': 'ALREADY_EXPIRED', 'description': filename } logger.info('Reject ' + filename + '\n' + str(data['VERSION']['1'])) reject(data) conn.commit() conn.close() return try: data = {} cleanDest(conn) if pool_generation_enabled: generatePool(conn) data['OPERATOR'] = getOperator() data['MERGESTRATEGY'] = getMergeStrategies(conn) data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = { 'privatecode': 'HTMBUZZ:' + filename, 'datasourceref': '1', 'operator_id': 'HTMBUZZ:' + filename, 'startdate': meta['startdate'], 'enddate': meta['enddate'], 'description': filename } data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn) data['LINE'] = getLineWithGeneratedNames(conn) data['STOPPOINT'] = getStopPoints(conn) data['STOPAREA'] = getStopAreas(conn) data['AVAILABILITYCONDITION'] = getAvailabilityConditionsUsingOperday( conn) data['PRODUCTCATEGORY'] = getBISONproductcategories() data['ADMINISTRATIVEZONE'] = getAdministrativeZones(conn) timedemandGroupRefForJourney, data[ 'TIMEDEMANDGROUP'] = calculateTimeDemandGroups(conn) routeRefForPattern, data['ROUTE'] = clusterPatternsIntoRoute( conn, getPool811) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern, conn, data['ROUTE']) data['JOURNEY'] = getJourneys(timedemandGroupRefForJourney, conn) data['NOTICEASSIGNMENT'] = {} data['NOTICE'] = {} data['NOTICEGROUP'] = {} insert(data) conn.close() setLineColors() except: raise
def import_zip(path,filename,version): print (path,filename) meta,conn = load(path,filename) cur = conn.cursor() cur.execute("SELECT DISTINCT station||':'||coalesce(platform,'0') FROM passtimes WHERE station||':'||coalesce(platform,'0') not in (select id from quays)") for row in cur.fetchall(): print row cur.close() try: data = {} data['OPERATOR'] = getOperator(conn) data['MERGESTRATEGY'] = [{'type' : 'DATASOURCE', 'datasourceref' : '1'}] data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = getVersion(conn,filename) data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn) data['LINE'] = getLines(conn) data['STOPPOINT'] = getStopPoints(conn) data['STOPAREA'] = getStopAreas(conn) data['AVAILABILITYCONDITION'] = getAvailabilityConditions(conn) data['PRODUCTCATEGORY'] = getProductCategories(conn) data['ADMINISTRATIVEZONE'] = {} timedemandGroupRefForJourney,data['TIMEDEMANDGROUP'] = calculateTimeDemandGroups(conn) routeRefForPattern,data['ROUTE'] = clusterPatternsIntoRoute(conn,getPoolIFF) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern,conn,data['ROUTE']) data['JOURNEY'] = getJourneys(timedemandGroupRefForJourney,conn) data['JOURNEYTRANSFERS'] = getTripTransfers(conn) data['NOTICEASSIGNMENT'] = getNoticeAssignments(conn) data['NOTICE'] = getNotices(conn) data['NOTICEGROUP'] = getNoticeGroups(conn) conn.close() insert(data,recycle_journeyids=recycle_journeyids) except: raise
def import_zip(path,filename,meta=None): deprecated,conn = load(path,filename) try: data = {} data['DATASOURCE'] = getDataSource() data['OPERATOR'] = getOperator() data['MERGESTRATEGY'] = [] data['VERSION'] = {} data['VERSION']['1'] = {'privatecode' : ':'.join(['GVB',meta['key'],meta['dataownerversion']]), 'datasourceref' : '1', 'operator_id' : ':'.join(['GVB',meta['key'],meta['dataownerversion']]), 'startdate' : meta['validfrom'], 'versionmajor' : meta['index'], 'versionminor' : meta['dataownerversion'], 'enddate' : meta['validthru'], 'description' : filename} data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn) data['LINE'] = getLines(conn) data['STOPPOINT'] = getStopPoints(conn) data['STOPAREA'] = getStopAreas(conn) data['AVAILABILITYCONDITION'] = getAvailabilityConditionsFromSchedvers(conn) data['PRODUCTCATEGORY'] = getBISONproductcategories() data['ADMINISTRATIVEZONE'] = getAdministrativeZones(conn) timedemandGroupRefForJourney,data['TIMEDEMANDGROUP'] = calculateTimeDemandGroupsGVB(conn) routeRefForPattern,data['ROUTE'] = clusterPatternsIntoRoute(conn,getPool811) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern,conn,data['ROUTE']) data['JOURNEY'] = getJourneys(timedemandGroupRefForJourney,conn) data['NOTICEASSIGNMENT'] = {} data['NOTICE'] = {} data['NOTICEGROUP'] = {} insert(data) conn.close() setLineColors() except: raise
def import_zip(path,filename,version): meta,conn = load(path,filename) try: removeZeroPoints(conn) data = {} data['OPERATOR'] = getOperator() data['MERGESTRATEGY'] = getMergeStrategies(conn) data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = {'privatecode' : 'SYNTUS:'+filename, 'datasourceref' : '1', 'operator_id' : 'SYNTUS:'+filename, 'startdate' : meta['startdate'], 'enddate' : meta['enddate'], 'description' : filename} data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn) data['LINE'] = getLineWithGeneratedNames(conn) data['STOPPOINT'] = getStopPoints(conn) data['STOPAREA'] = getStopAreas(conn) data['AVAILABILITYCONDITION'] = getAvailabilityConditionsUsingOperday(conn) data['PRODUCTCATEGORY'] = getBISONproductcategories() data['ADMINISTRATIVEZONE'] = getAdministrativeZones(conn) timedemandGroupRefForJourney,data['TIMEDEMANDGROUP'] = calculateTimeDemandGroups(conn) routeRefForPattern,data['ROUTE'] = clusterPatternsIntoRoute(conn,getPool805) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern,conn,data['ROUTE']) data['JOURNEY'] = getJourneys(timedemandGroupRefForJourney,conn) data['NOTICEASSIGNMENT'] = {} data['NOTICE'] = {} data['NOTICEGROUP'] = {} conn.close() insert(data) except: raise
def import_zip(path, filename, version): validthru = '2015-01-04' meta, conn = load(path, filename) validfrom = version['validfrom'] print validfrom cur = conn.cursor() cur.execute("""create index on pool(userstopcodebegin,userstopcodeend);""") cur.close() reconstruct_excopday(conn) try: data = {} data['_validfrom'] = version['validfrom'] data['OPERATOR'] = getOperator() data['MERGESTRATEGY'] = [{ 'type': 'DATASOURCE', 'fromdate': validfrom, 'datasourceref': '1' }] data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = { 'privatecode': 'CXX:' + filename, 'datasourceref': '1', 'operator_id': 'CXX:' + filename, 'startdate': validfrom, 'enddate': validthru, 'description': filename } data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn) data['LINE'] = getLines(conn) data['STOPPOINT'] = getStopPoints(conn) data['STOPAREA'] = getStopAreas(conn) data['AVAILABILITYCONDITION'] = getAvailabilityConditionsFromCalendars( conn) data['JOURNEY'] = {} for key, journey in getJourneysFromPujo(conn).items(): if journey['availabilityconditionref'] not in data[ 'AVAILABILITYCONDITION']: logging.warning('Servicecalendar %s missing for %s' % (journey['availabilityconditionref'], journey['operator_id'])) else: data['JOURNEY'][key] = journey data['PRODUCTCATEGORY'] = getBISONproductcategories() data['ADMINISTRATIVEZONE'] = getAdministrativeZones(conn) data['TIMEDEMANDGROUP'] = getTimeDemandGroups(conn) routeRefForPattern, data['ROUTE'] = clusterPatternsIntoRoute( conn, getPool805) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern, conn, data['ROUTE']) data['NOTICEASSIGNMENT'] = {} data['NOTICE'] = {} data['NOTICEGROUP'] = {} insert(data, recycle_journeyids=recycle_journeyids) conn.commit() conn.close() except: raise
def import_subzip(zip, versionname, unitcode, remove_old=False): meta, conn = load(zip) conn.commit() try: data = {} data['OPERATOR'] = { 'TEC': { 'url': 'http://www.infotec.be', 'language': 'nl', 'phone': '0', 'timezone': 'Europe/Amsterdam', 'operator_id': 'TEC', 'name': 'TEC', 'privatecode': 'TEC' } } data['MERGESTRATEGY'] = [] if remove_old: data['MERGESTRATEGY'].append({ 'type': 'DATASOURCE', 'datasourceref': '1' }) data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = getVersion(conn, versionname, prefix='TEC') data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn, prefix='TEC') data['LINE'] = getLines(conn, prefix='TEC', operatorref='TEC', unitcode=unitcode[3:]) data['STOPPOINT'] = getStopPoints(conn, prefix='TEC') data['STOPAREA'] = getStopAreas(conn, prefix='TEC') data['AVAILABILITYCONDITION'] = getAvailabilityConditions( conn, prefix='TEC', unitcode=unitcode[3:]) data['PRODUCTCATEGORY'] = getProductCategories(conn) data['ADMINISTRATIVEZONE'] = {} timedemandGroupRefForJourney, data[ 'TIMEDEMANDGROUP'] = calculateTimeDemandGroups( conn, prefix='TEC', unitcode=unitcode[3:]) routeRefForPattern, data['ROUTE'] = clusterPatternsIntoRoute( conn, getFakePool, prefix='TEC', unitcode=unitcode[3:]) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern, conn, data['ROUTE'], prefix='TEC') data['JOURNEY'] = getJourneys(timedemandGroupRefForJourney, conn, prefix='TEC', unitcode=unitcode[3:]) data['NOTICEASSIGNMENT'] = getNoticeAssignments(conn, prefix='TEC') data['NOTICE'] = getNotices(conn, prefix='TEC') data['NOTICEGROUP'] = getNoticeGroups(conn, prefix='TEC') conn.close() insert(data) except: raise
def import_zip(path, filename, version): meta, conn = load(path, filename, point_from_pool=True) try: cur = conn.cursor() cur.execute("SELECT COUNT(*) FROM link WHERE transporttype = 'TRAM'") if pool_generation_enabled and cur.fetchone()[0] > 0: cur.execute(""" UPDATE pool_utram set linkvalidfrom = (SELECT DISTINCT validfrom FROM LINK where transporttype = 'TRAM'); update point set locationx_ew = '135335', locationy_ns = '451223' where locationx_ew = '135639' and locationy_ns = '451663'; update point set locationx_ew = '134669', locationy_ns = '450853' where locationx_ew = '134591' and locationy_ns = '450911'; update point set locationx_ew = '133029', locationy_ns = '447900' where locationx_ew = '132473' and locationy_ns = '448026'; update point set locationx_ew = '132907', locationy_ns = '447965' where locationx_ew = '132672' and locationy_ns = '448044'; update point set locationx_ew = '135335', locationy_ns = '451314' where locationx_ew = '135533' and locationy_ns = '451628'; update point set locationx_ew = '134356', locationy_ns = '448631' where locationx_ew = '134318' and locationy_ns = '448697'; update point set locationx_ew = '131710', locationy_ns = '448728' where locationx_ew = '131731' and locationy_ns = '448705'; insert into POINT (SELECT * from point_utram); insert into POOL (SELECT * FROM pool_utram WHERE userstopcodebegin||':'||userstopcodeend in (SELECT userstopcodebegin||':'||userstopcodeend));""" ) data = {} data['OPERATOR'] = getOperator() data['MERGESTRATEGY'] = [] #getMergeStrategies(conn) data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = { 'privatecode': 'QBUZZ:' + filename, 'datasourceref': '1', 'operator_id': 'QBUZZ:' + filename, 'startdate': meta['startdate'], 'enddate': meta['enddate'], 'description': filename } data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn) data['LINE'] = getLines(conn) data['STOPPOINT'] = getStopPoints(conn) data['STOPAREA'] = getStopAreas(conn) data['AVAILABILITYCONDITION'] = getAvailabilityConditionsUsingOperday( conn) data['PRODUCTCATEGORY'] = getBISONproductcategories() data['ADMINISTRATIVEZONE'] = getAdministrativeZones(conn) timedemandGroupRefForJourney, data[ 'TIMEDEMANDGROUP'] = calculateTimeDemandGroups(conn) routeRefForPattern, data['ROUTE'] = clusterPatternsIntoRoute( conn, getPool811) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern, conn, data['ROUTE']) data['JOURNEY'] = getJourneys(timedemandGroupRefForJourney, conn) data['NOTICEASSIGNMENT'] = {} data['NOTICE'] = {} data['NOTICEGROUP'] = {} insert(data) conn.close() setLineColors() except: raise
def import_zip(path,filename,version): meta,conn = load(path,filename) if datetime.strptime(meta['enddate'].replace('-',''),'%Y%m%d') < (datetime.now() - timedelta(days=1)): data = {} data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = {'privatecode' : 'HTM:'+filename, 'datasourceref' : '1', 'operator_id' : 'HTM:'+filename, 'startdate' : meta['startdate'], 'enddate' : meta['enddate'], 'error' : 'ALREADY_EXPIRED', 'description' : filename} logger.info('Reject '+filename+'\n'+str(data['VERSION']['1'])) reject(data) conn.commit() conn.close() return try: cleanDest(conn) if pool_generation_enabled: generatePool(conn) data = {} data['OPERATOR'] = getOperator() data['MERGESTRATEGY'] = getMergeStrategies(conn) data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = {'privatecode' : 'HTM:'+filename, 'datasourceref' : '1', 'operator_id' : 'HTM:'+filename, 'startdate' : meta['startdate'], 'enddate' : meta['enddate'], 'description' : filename} data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn) data['LINE'] = getLineWithGeneratedNames(conn) data['STOPPOINT'] = getStopPoints(conn) data['STOPAREA'] = getStopAreas(conn) data['AVAILABILITYCONDITION'] = getAvailabilityConditionsUsingOperday(conn) data['PRODUCTCATEGORY'] = getBISONproductcategories() data['ADMINISTRATIVEZONE'] = getAdministrativeZones(conn) timedemandGroupRefForJourney,data['TIMEDEMANDGROUP'] = calculateTimeDemandGroups(conn) routeRefForPattern,data['ROUTE'] = clusterPatternsIntoRoute(conn,getPool811) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern,conn,data['ROUTE']) data['JOURNEY'] = getJourneys(timedemandGroupRefForJourney,conn) data['NOTICEASSIGNMENT'] = {} data['NOTICE'] = {} data['NOTICEGROUP'] = {} insert(data) conn.close() setLineColors() except: raise
def import_zip(path,filename,version): meta,conn = load(path,filename,point_from_pool=True) try: cur = conn.cursor() cur.execute("SELECT COUNT(*) FROM link WHERE transporttype = 'TRAM'") if pool_generation_enabled and cur.fetchone()[0] > 0: cur.execute(""" UPDATE pool_utram set linkvalidfrom = (SELECT DISTINCT validfrom FROM LINK where transporttype = 'TRAM'); update point set locationx_ew = '135335', locationy_ns = '451223' where locationx_ew = '135639' and locationy_ns = '451663'; update point set locationx_ew = '134669', locationy_ns = '450853' where locationx_ew = '134591' and locationy_ns = '450911'; update point set locationx_ew = '133029', locationy_ns = '447900' where locationx_ew = '132473' and locationy_ns = '448026'; update point set locationx_ew = '132907', locationy_ns = '447965' where locationx_ew = '132672' and locationy_ns = '448044'; update point set locationx_ew = '135335', locationy_ns = '451314' where locationx_ew = '135533' and locationy_ns = '451628'; update point set locationx_ew = '134356', locationy_ns = '448631' where locationx_ew = '134318' and locationy_ns = '448697'; update point set locationx_ew = '131710', locationy_ns = '448728' where locationx_ew = '131731' and locationy_ns = '448705'; insert into POINT (SELECT * from point_utram); insert into POOL (SELECT * FROM pool_utram WHERE userstopcodebegin||':'||userstopcodeend in (SELECT userstopcodebegin||':'||userstopcodeend));""") data = {} data['OPERATOR'] = getOperator() data['MERGESTRATEGY'] = []#getMergeStrategies(conn) data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = {'privatecode' : 'QBUZZ:'+filename, 'datasourceref' : '1', 'operator_id' : 'QBUZZ:'+filename, 'startdate' : meta['startdate'], 'enddate' : meta['enddate'], 'description' : filename} data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn) data['LINE'] = getLines(conn) data['STOPPOINT'] = getStopPoints(conn) data['STOPAREA'] = getStopAreas(conn) data['AVAILABILITYCONDITION'] = getAvailabilityConditionsUsingOperday(conn) data['PRODUCTCATEGORY'] = getBISONproductcategories() data['ADMINISTRATIVEZONE'] = getAdministrativeZones(conn) timedemandGroupRefForJourney,data['TIMEDEMANDGROUP'] = calculateTimeDemandGroups(conn) routeRefForPattern,data['ROUTE'] = clusterPatternsIntoRoute(conn,getPool811) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern,conn,data['ROUTE']) data['JOURNEY'] = getJourneys(timedemandGroupRefForJourney,conn) data['NOTICEASSIGNMENT'] = {} data['NOTICE'] = {} data['NOTICEGROUP'] = {} insert(data) conn.close() setLineColors() except: raise
def import_zip(path,filename,version): validthru = '2015-01-04' meta,conn = load(path,filename) validfrom = version['validfrom'] print validfrom cur = conn.cursor() cur.execute("""create index on pool(userstopcodebegin,userstopcodeend);""") cur.close() reconstruct_excopday(conn) try: data = {} data['_validfrom'] = version['validfrom'] data['OPERATOR'] = getOperator() data['MERGESTRATEGY'] = [{'type' : 'DATASOURCE', 'fromdate' : validfrom, 'datasourceref' : '1'}] data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = {'privatecode' : 'CXX:'+filename, 'datasourceref' : '1', 'operator_id' : 'CXX:'+filename, 'startdate' : validfrom, 'enddate' : validthru, 'description' : filename} data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn) data['LINE'] = getLines(conn) data['STOPPOINT'] = getStopPoints(conn) data['STOPAREA'] = getStopAreas(conn) data['AVAILABILITYCONDITION'] = getAvailabilityConditionsFromCalendars(conn) data['JOURNEY'] = {} for key,journey in getJourneysFromPujo(conn).items(): if journey['availabilityconditionref'] not in data['AVAILABILITYCONDITION']: logging.warning('Servicecalendar %s missing for %s' % (journey['availabilityconditionref'],journey['operator_id'])) else: data['JOURNEY'][key] = journey data['PRODUCTCATEGORY'] = getBISONproductcategories() data['ADMINISTRATIVEZONE'] = getAdministrativeZones(conn) data['TIMEDEMANDGROUP'] = getTimeDemandGroups(conn) routeRefForPattern,data['ROUTE'] = clusterPatternsIntoRoute(conn,getPool805) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern,conn,data['ROUTE']) data['NOTICEASSIGNMENT'] = {} data['NOTICE'] = {} data['NOTICEGROUP'] = {} insert(data,recycle_journeyids=recycle_journeyids) conn.commit() conn.close() except: raise
def import_zip(path, filename, meta=None): deprecated, conn = load(path, filename) try: data = {} data['DATASOURCE'] = getDataSource() data['OPERATOR'] = getOperator() data['MERGESTRATEGY'] = [] data['VERSION'] = {} data['VERSION']['1'] = { 'privatecode': ':'.join(['GVB', meta['key'], meta['dataownerversion']]), 'datasourceref': '1', 'operator_id': ':'.join(['GVB', meta['key'], meta['dataownerversion']]), 'startdate': meta['validfrom'], 'versionmajor': meta['index'], 'versionminor': meta['dataownerversion'], 'enddate': meta['validthru'], 'description': filename } data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn) data['LINE'] = getLines(conn) data['STOPPOINT'] = getStopPoints(conn) data['STOPAREA'] = getStopAreas(conn) data['AVAILABILITYCONDITION'] = getAvailabilityConditionsFromSchedvers( conn) data['PRODUCTCATEGORY'] = getBISONproductcategories() data['ADMINISTRATIVEZONE'] = getAdministrativeZones(conn) timedemandGroupRefForJourney, data[ 'TIMEDEMANDGROUP'] = calculateTimeDemandGroupsGVB(conn) routeRefForPattern, data['ROUTE'] = clusterPatternsIntoRoute( conn, getPool811) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern, conn, data['ROUTE']) data['JOURNEY'] = getJourneys(timedemandGroupRefForJourney, conn) data['NOTICEASSIGNMENT'] = {} data['NOTICE'] = {} data['NOTICEGROUP'] = {} insert(data) conn.close() setLineColors() except: raise
def import_zip(path, filename, version): print(path, filename) meta, conn = load(path, filename) cur = conn.cursor() cur.execute( "SELECT DISTINCT station||':'||coalesce(platform,'0') FROM passtimes WHERE station||':'||coalesce(platform,'0') not in (select id from quays)" ) for row in cur.fetchall(): print row cur.close() try: data = {} data['OPERATOR'] = getOperator(conn) data['MERGESTRATEGY'] = [{'type': 'DATASOURCE', 'datasourceref': '1'}] data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = getVersion(conn, filename) data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn) data['LINE'] = getLines(conn) data['STOPPOINT'] = getStopPoints(conn) data['STOPAREA'] = getStopAreas(conn) data['AVAILABILITYCONDITION'] = getAvailabilityConditions(conn) data['PRODUCTCATEGORY'] = getProductCategories(conn) data['ADMINISTRATIVEZONE'] = {} timedemandGroupRefForJourney, data[ 'TIMEDEMANDGROUP'] = calculateTimeDemandGroups(conn) routeRefForPattern, data['ROUTE'] = clusterPatternsIntoRoute( conn, getPoolIFF) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern, conn, data['ROUTE']) data['JOURNEY'] = getJourneys(timedemandGroupRefForJourney, conn) data['JOURNEYTRANSFERS'] = getTripTransfers(conn) data['NOTICEASSIGNMENT'] = getNoticeAssignments(conn) data['NOTICE'] = getNotices(conn) data['NOTICEGROUP'] = getNoticeGroups(conn) conn.close() insert(data, recycle_journeyids=recycle_journeyids) except: raise
def import_zip(path, filename, version): conn = psycopg2.connect("dbname='dino'") prefix = 'AVV' try: data = {} data['OPERATOR'] = getOperator(conn, prefix=prefix, website='http://www.avv.de') data['MERGESTRATEGY'] = [{'type': 'DATASOURCE', 'datasourceref': '1'}] data['DATASOURCE'] = getDataSource() data['VERSION'] = getVersion(conn, prefix=prefix, filename=filename) data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn, prefix=prefix) data['LINE'] = getLines(conn, prefix=prefix) data['STOPPOINT'] = getStopPoints(conn, prefix=prefix) data['STOPAREA'] = getStopAreas(conn, prefix=prefix) data['AVAILABILITYCONDITION'] = getAvailabilityConditions( conn, prefix=prefix) data['PRODUCTCATEGORY'] = getProductCategories(conn, prefix=prefix) data['ADMINISTRATIVEZONE'] = { 'AVV': { 'operator_id': 'AVV', 'name': 'Aachener Verkehrsverbund' } } data['TIMEDEMANDGROUP'] = getTimeDemandGroups(conn, prefix=prefix) data['ROUTE'] = clusterPatternsIntoRoute(conn, prefix=prefix) data['JOURNEYPATTERN'] = getJourneyPatterns(conn, data['ROUTE'], prefix=prefix) data['JOURNEY'] = getJourneys(conn, prefix=prefix) data['NOTICEASSIGNMENT'] = {} data['NOTICE'] = {} data['NOTICEGROUP'] = {} conn.close() insert(data) except: raise
def import_subzip(zip,versionname,unitcode,remove_old=False): meta,conn = load(zip) conn.commit() try: data = {} data['OPERATOR'] = {'TEC' : {'url' : 'http://www.infotec.be', 'language' : 'nl', 'phone' : '0', 'timezone' : 'Europe/Amsterdam', 'operator_id' : 'TEC', 'name' : 'TEC', 'privatecode' : 'TEC'}} data['MERGESTRATEGY'] = [] if remove_old: data['MERGESTRATEGY'].append({'type' : 'DATASOURCE', 'datasourceref' : '1'}) data['DATASOURCE'] = getDataSource() data['VERSION'] = {} data['VERSION']['1'] = getVersion(conn,versionname,prefix='TEC') data['DESTINATIONDISPLAY'] = getDestinationDisplays(conn,prefix='TEC') data['LINE'] = getLines(conn,prefix='TEC',operatorref='TEC',unitcode=unitcode[3:]) data['STOPPOINT'] = getStopPoints(conn,prefix='TEC') data['STOPAREA'] = getStopAreas(conn,prefix='TEC') data['AVAILABILITYCONDITION'] = getAvailabilityConditions(conn,prefix='TEC',unitcode=unitcode[3:]) data['PRODUCTCATEGORY'] = getProductCategories(conn) data['ADMINISTRATIVEZONE'] = {} timedemandGroupRefForJourney,data['TIMEDEMANDGROUP'] = calculateTimeDemandGroups(conn,prefix='TEC',unitcode=unitcode[3:]) routeRefForPattern,data['ROUTE'] = clusterPatternsIntoRoute(conn,getFakePool,prefix='TEC',unitcode=unitcode[3:]) data['JOURNEYPATTERN'] = getJourneyPatterns(routeRefForPattern,conn,data['ROUTE'],prefix='TEC') data['JOURNEY'] = getJourneys(timedemandGroupRefForJourney,conn,prefix='TEC',unitcode=unitcode[3:]) data['NOTICEASSIGNMENT'] = getNoticeAssignments(conn,prefix='TEC') data['NOTICE'] = getNotices(conn,prefix='TEC') data['NOTICEGROUP'] = getNoticeGroups(conn,prefix='TEC') conn.close() insert(data) except: raise
def import_zip(path,filename,version): data = {} data['OPERATOR'] = { 'OHMSHUTTLE' : {'privatecode' : 'OHMSHUTTLE', 'operator_id' : 'OHMSHUTTLE', 'name' : 'OHM shuttlebus', 'phone' : '0', 'url' : 'http://ohm2013.org/site/', 'timezone' : 'Europe/Amsterdam', 'language' : 'nl'} } data['MERGESTRATEGY'] = [{'type' : 'DATASOURCE', 'datasourceref' : '1'}] #This replaces all previous timetables in the database. data['DATASOURCE'] = { '1' : { 'operator_id' : 'OHM', 'name' : 'OHM shuttlebus', 'description' : 'OHM shuttlebus special', 'email' : None, 'url' : None} } data['VERSION'] = { '1' : {'privatecode' : 'OHMSHUTTLE:2013', 'datasourceref' : '1', 'operator_id' : 'OHMSHUTTLE:2013', 'startdate' : '2013-07-06', 'enddate' : '2013-08-08', 'description' : 'Speciale dienstregeling shuttle OHM2013'} } data['DESTINATIONDISPLAY'] = {'OHMSHUTTLE:OHM2013' : { 'privatecode' : 'OHMSHUTTLE:OHM2013', 'operator_id' : 'OHMSHUTTLE:OHM2013', 'name' : 'Shuttlebus naar OHM2013', 'shortname' : 'OHM2013', 'vianame' : None}, 'OHMSHUTTLE:STATION' : { 'privatecode' : 'OHMSHUTTLE:STATION', 'operator_id' : 'OHMSHUTTLE:STATION', 'name' : 'Shuttlebus naar Station', 'shortname' : 'Station', 'vianame' : None} } data['LINE'] = {'OHMSHUTTLE:1' : {'operatorref' : 'OHMSHUTTLE', 'privatecode' : 'OHMSHUTTLE:1', 'operator_id' : 'OHMSHUTTLE:1', 'transportmode' : 'BUS', 'publiccode' : 'OHMShuttle', 'name' : 'OHM2013 Shuttleservice', 'monitored' : False} } data['STOPPOINT'] = {'OHM2013' : {'privatecode' : 'OHM2013', 'operator_id' : 'OHMSHUTTLE:OHM2013', 'isscheduled' : True, 'stoparearef' : None, 'name' : 'Shuttle stop OHM2013', 'town' : 'Warmenhuizen', 'latitude' : 52.694511, 'longitude' : 4.755342}, 'STATION' : {'privatecode' : 'STATION', 'operator_id' : 'OHMSHUTTLE:STATION', 'isscheduled' : True, 'stoparearef' : None, 'name' : 'Station Heerhugowaard', 'town' : 'Heerhugowaard', 'latitude' : 52.669693, 'longitude' : 4.824374} } data['STOPAREA'] = {} d1 = date(2013,7,27) d2 = date(2013,8,07) data['AVAILABILITYCONDITION'] = { 'start' : {'operator_id' : 'OHMSHUTTLE:startdag', 'privatecode' : 'startdag', 'unitcode' : 'OHM2013', 'versionref' : '1', 'fromdate' : '2013-07-26', 'todate' : '2013-07-26', 'DAYS' : {'validdates' : ['2013-07-26']} }, 'tussen' : {'operator_id' : 'OHMSHUTTLE:tussendagen', 'unitcode' : 'OHM2013', 'privatecode' : 'startdag', 'versionref' : '1', 'fromdate' : str(d1), 'todate' : str(d2), 'DAYS' : {'validdates' : [str(d1 + timedelta(days=x)) for x in range((d2-d1).days + 1)]} }, 'eind' : {'operator_id' : 'OHMSHUTTLE:einddag', 'unitcode' : 'OHM2013', 'privatecode' : 'startdag', 'versionref' : '1', 'fromdate' : '2013-08-08', 'todate' : '2013-08-08', 'DAYS' : {'validdates' : ['2013-08-08']} } } data['PRODUCTCATEGORY'] = {'SHUTTLEBUS' : {'operator_id' : 'SHUTTLEBUS', 'privatecode' : 'SHUTTLEBUS', 'shortname' : 'Shuttlebus', 'name' : 'Shuttlebus'} } data['ADMINISTRATIVEZONE'] = {'1' : {'operator_id' : 'OHMSHUTTLE:1', 'privatecode' : 'OHMSHUTTLE:1', 'name' : 'OHM2013 shuttlevervoer', 'description' : 'OHM2013 organistatie'} } data['TIMEDEMANDGROUP'] = {'1' : {'operator_id' : 'OHMSHUTTLE:1', 'privatecode' : 'OHMSHUTTLE', 'POINTS' : [{'pointorder' : 1,'totaldrivetime' : 0 ,'stopwaittime':0}, {'pointorder' : 2,'totaldrivetime' : 780 ,'stopwaittime':0}] } } data['ROUTE'] = {'HEEN' : {'operator_id' : 'OHMSHUTTLE:heen', 'lineref' : 'OHMSHUTTLE:1', 'POINTS' : [{'pointorder': 1, 'latitude' : 52.669693,'longitude' : 4.824374,'distancefromstart' : 0}, {'pointorder': 2, 'latitude' : 52.694511,'longitude' : 4.755342,'distancefromstart' : 8200}, ] }, 'TERUG' : {'operator_id' : 'OHMSHUTTLE:terug', 'lineref' : 'OHMSHUTTLE:1', 'POINTS' : [{'pointorder': 1, 'latitude' : 52.694511,'longitude' : 4.755342,'distancefromstart' : 0}, {'pointorder': 2, 'latitude' : 52.669693,'longitude' : 4.824374,'distancefromstart' : 8200}, ] } } data['JOURNEYPATTERN'] = {'HEEN' : {'operator_id' : 'OHMSHUTTLE:heen', 'routeref' : 'HEEN', 'directiontype': '1', 'destinationdisplayref': 'OHMSHUTTLE:OHM2013', 'POINTS' : [{'pointorder': 1, 'pointref' : 'STATION', 'onwardpointref' : 'OHM2013', 'iswaitpoint' : True, 'forboarding' : True, 'foralighting' : False,'destinationdisplayref' : 'OHMSHUTTLE:OHM2013'}, {'pointorder': 2, 'pointref' : 'OHM2013', 'onwardpointref' : None, 'iswaitpoint' : False, 'forboarding' : False, 'foralighting' : True,'destinationdisplayref' : 'OHMSHUTTLE:OHM2013','distancefromstartroute':8200}, ] }, 'TERUG' : {'operator_id' : 'OHMSHUTTLE:terug', 'routeref' : 'TERUG', 'directiontype': '2', 'destinationdisplayref': 'OHMSHUTTLE:STATION', 'POINTS' : [{'pointorder': 1, 'pointref' : 'OHM2013', 'onwardpointref' : 'STATION', 'iswaitpoint' : True, 'forboarding' : True, 'foralighting' : False,'destinationdisplayref' : 'OHMSHUTTLE:STATION'}, {'pointorder': 2, 'pointref' : 'STATION', 'onwardpointref' : None, 'iswaitpoint' : False, 'forboarding' : False, 'foralighting' : True,'destinationdisplayref' : 'OHMSHUTTLE:STATION','distancefromstartroute':8200}, ] } } data['JOURNEY'] = getJourneys() data['NOTICEASSIGNMENT'] = {} data['NOTICE'] = {} data['NOTICEGROUP'] = {} insert(data)