def NoonCrt(start, end): '''中午时间校正 ''' crtHour = 0 #if start.CompareTo(DateTime.Parse('12:00:00')) == -1: # if end.CompareTo(DateTime.Parse('13:00:00')) in (-1, 0): # crtHour = float(end.ToUniversalTime().Ticks - DateTime.Parse('13:00:00').ToUniversalTime().Ticks)/10000000/3600 # else: # crtHour = 1 #elif start.CompareTo(DateTime.Parse('12:00:00')) in (0, 1) and start.CompareTo(DateTime.Parse('13:00:00')) == -1: # crtHour = float(DateTime.Parse('13:00:00').ToUniversalTime().Ticks - start.ToUniversalTime().Ticks)/10000000/3600 noonStart = DateTime(start.Year, start.Month, start.Day, 12, 0, 0) noonEnd = DateTime(end.Year, end.Month, end.Day, 13, 0, 0) if start.CompareTo(noonStart) == -1: crtHour = min( max( float(end.ToUniversalTime().Ticks - noonStart.ToUniversalTime().Ticks) / 10000000 / 3600, 0), 1) else: if end.CompareTo(noonEnd) in (-1, 0): crtHour = float(end.ToUniversalTime().Ticks - start.ToUniversalTime().Ticks) / 10000000 / 3600 else: crtHour = max( float(noonEnd.ToUniversalTime().Ticks - start.ToUniversalTime().Ticks) / 10000000 / 3600, 0) return crtHour
def testMarketFromRecordString(self): market = betfair.Market.fromRecordString("12~" "Market \\~ name~" "Type~" "Status~" "31536000000~" "\\Menu\\Path\\To\\Market~" "event hierarchy~" "bet delay~" "12345~" "country code~" "94608000000~" "55~" "2~" "1.234556~" "N~" "Y") self.assertEquals(market.Id, 12) self.assertEquals(market.Name, "Market \\~ name") self.assertEquals(market.Type, "Type") self.assertEquals(market.Status, "Status") self.assertEquals(market.StartDate, DateTime(1971, 1, 1)) self.assertEquals(market.Path, "\\Menu\\Path\\To\\Market") self.assertEquals(market.EventHierarchy, "event hierarchy") self.assertEquals(market.BetDelay, "bet delay") self.assertEquals(market.ExchangeId, 12345) self.assertEquals(market.CountryCode, "country code") self.assertEquals(market.LastRefresh, DateTime(1972, 12, 31)) self.assertEquals(market.NumberOfRunners, 55) self.assertEquals(market.NumberOfWinners, 2) self.assertEquals(market.TotalAmountMatched, 1.234556) self.assertEquals(market.BSPMarket, False) self.assertEquals(market.TurningInPlay, True)
def f2_2_11(): # monitorType = 11 tunnelId = 24 installId = 3 from System import DateTime startDate = DateTime(2017, 2, 6) endDate = DateTime(2018, 2, 6) view = _log(monitorSrv.QueryWebView11)(tunnelId, installId, startDate, endDate) print '\n地質資訊' print view.InstallInfo print '\n監測資料' showTable(view.GeoDefRecords) print '\nGoogle Map 測站資料' showTable(view.GeoDefMarksRecords) print '\n歷時曲線圖(East)' showChart(view.TimeChartEast) print '\n歷時曲線圖(North)' showChart(view.TimeChartNorth) print '\n歷時曲線圖(Up)' showChart(view.TimeChartUp) return view
def testDateTimeFromPosix(self): self.assertEquals(DateTime(1970, 1, 1), betfair.DateTimeFromPosix(0)) self.assertEquals(DateTime(1970, 1, 1, 1, 0, 0), betfair.DateTimeFromPosix(3600000)) self.assertEquals(DateTime(1970, 1, 2), betfair.DateTimeFromPosix(3600000 * 24)) self.assertEquals(DateTime(1971, 1, 1), betfair.DateTimeFromPosix(3600000 * 24 * 365)) self.assertEquals(DateTime(1979, 12, 30), betfair.DateTimeFromPosix(3600000 * 24 * 365 * 10))
def wait_until_ready(): ''' Waits until a fixed amount of time has passed since this function was last called. Returns immediately if that much time has already passed. ''' global __next_query_time_ms, __QUERY_DELAY_MS time_ms = (DateTime.Now-DateTime(1970,1,1)).TotalMilliseconds wait_ms = __next_query_time_ms - time_ms if wait_ms > 0: t = Thread(ThreadStart(lambda x=0: Thread.CurrentThread.Sleep(wait_ms))) t.Start() t.Join() time_ms = (DateTime.Now-DateTime(1970,1,1)).TotalMilliseconds __next_query_time_ms = time_ms + __QUERY_DELAY_MS
def UpdateTOF(): """ <Script> <Author>ANK</Author> <Description>This script modifies scenario dates </Description> </Script> """ scenarioFullPath = "/Group of cali/cali/Scenario of cali" TOF = DateTime(2014,2,1) SOS = TOF.AddDays(-1) EOS = TOF.AddDays(1) scmgr = app.Modules.Get("Scenario Manager") scenario = scmgr.ScenarioList.Fetch(scenarioFullPath); # first make sure we can set the dates without conflict (must hav SOS<=TOF<=EOS) scenario.SimulationStartDate = DateTime.MinValue scenario.SimulationTimeOfForecast = DateTime.MinValue scenario.SimulationEndDate = DateTime.MinValue # then set the dates scenario.SimulationEndDate = EOS scenario.SimulationTimeOfForecast = TOF scenario.SimulationStartDate = SOS
def test_System_DateTime_conversion(self): example = datetime.datetime(2015, 4, 25, 8, 39, 54) result = clr.Convert(example, DateTime) self.assertIsInstance(result, DateTime) expected = DateTime(2015, 4, 25, 8, 39, 54) self.assertEqual(expected, result)
def to_date(self, date): """ Turn datetime object into a C# datetime object """ return DateTime( date.year, date.month, date.day, date.hour, date.minute, date.second )
def test_System_DateTime_binding(self): pydt = datetime.datetime(2015, 4, 25, 8, 39, 54) netdt = DateTime(2015, 4, 25, 9, 39, 54) result = netdt.Subtract(pydt) expected = TimeSpan(1, 0, 0) self.assertEqual(expected, result)
def __init__(self, pandasSeries): t = str(pandasSeries.name)[:10].split("-") self.Date = DateTime(int(t[0]), int(t[1]), int(t[2])) self.Open = Decimal(float(pandasSeries["Open"])) self.High = Decimal(float(pandasSeries["High"])) self.Low = Decimal(float(pandasSeries["Low"])) self.Close = Decimal(float(pandasSeries["Close"])) self.Volume = Decimal(float(pandasSeries["Volume"]))
def f2_3_8(): tunnelId = 5 # 雪山隧道 structureId = 24 # 南下線 projectId = 2000 # 2050年TEST_2050測試專案 from System import DateTime startDate = DateTime(2018, 1, 1) endDate = DateTime(2018, 12, 31) view = _log(inspectSrv.QueryWebView8)( structureId, projectId, startDate, endDate) print '排水設施資料表' showTable(view.Records) print return view
def fromPlatformIndDateTime(dt): dt = str(dt) (date, time) = dt.split("_") (day, month, year) = date.split("/") (hour, minute, second) = time.split(":") return DateTime(int(year), int(month), int(day), int(hour), int(minute), int(second))
def setup_date_mapping(): date_from = dict() #dt = DateTime(DateTime.Today.Year, DateTime.Today.Month, 1) dt = DateTime(2024, DateTime.Today.Month, 1) date_from['HenryHub'] = dt date_from['Contract1'] = dt.AddMonths(1) date_from['Contract2'] = dt.AddMonths(2) date_from['Contract3'] = dt.AddMonths(3) date_from['Contract4'] = dt.AddMonths(4) return date_from
def from_datetime_like(datetime_like, time_period_type): """ Converts either a pandas Period, datetime or date to a .NET Time Period""" if (hasattr(datetime_like, 'hour')): time_args = (datetime_like.hour, datetime_like.minute, datetime_like.second) else: time_args = (0, 0, 0) date_time = DateTime(datetime_like.year, datetime_like.month, datetime_like.day, *time_args) return TimePeriodFactory.FromDateTime[time_period_type](date_time)
def Cleanup(numberOfDays, scenarioFullPath, jobName): """ <Script> <Author>SNI</Author> <Description>This script will cleanup the simulations and job logs older than specified number of days.</Description> <Parameters> <Parameter name="numberOfDays" type="int">Number of days.</Parameter> <Parameter name="scenarioFullPath" type="string">Full path of the scenario whose old simulations are to be cleaned.</Parameter> <Parameter name="jobName" type="string">Name of the job whose old instances are to be cleaned.</Parameter> </Parameters> <ReturnValue type="IType">Function returns object of type IType</ReturnValue> </Script> """ cutoffDT = datetime.datetime.now() - timedelta(days=numberOfDays) cutoffDateTime = DateTime(cutoffDT.year, cutoffDT.month, cutoffDT.day, cutoffDT.hour, cutoffDT.minute, cutoffDT.second) scmgr = app.Modules.Get("Scenario Manager") scenario = scmgr.ScenarioList.Fetch(scenarioFullPath) allSimulationList = scmgr.SimulationList.FetchAll() simulationList = [] for simulation in allSimulationList: if simulation.ScenarioId == scenario.Id: simulationList.append(simulation) #simulationList = scmgr.SimulationList.Get(scenario) #simulationList = scmgr.SimulationList.Fetch(scenarioFullPath) simulationsToBeDeleted = [] for simulation in simulationList: if simulation.TimeOfSimulationRun < cutoffDateTime: simulationsToBeDeleted.append(simulation) if len(simulationsToBeDeleted) > 0: for simulation in simulationsToBeDeleted: print 'Deleting Simulation: ' + simulation.Name scmgr.SimulationList.Delete(simulation, True) # Delete job logs older than 7 days as well. jobmgr = app.Modules.Get("Job Manager") job = jobmgr.JobList.Fetch(jobName) jobInstances = jobmgr.JobInstanceList.FetchJobInstance(job.Id) jobInstancesToBeDeleted = [] for jobInstance in jobInstances: if jobInstance.ExecutedAt < cutoffDateTime: jobInstancesToBeDeleted.append(jobInstance) if len(jobInstancesToBeDeleted) > 0: for jobInstance in jobInstancesToBeDeleted: print 'Deleting Job Instance: ' + jobInstace.Name jobmgr.JobInstanceList.Delete(jobInstance)
def DataChanged(e): if str(this.Context.ClientType) != 'Mobile': if e.Key == 'FType': # 1=>工作日加班;2=>节假日加班;3=>双休日加班 # 支付方式:1付费;2调休 ftype = this.View.Model.GetValue('FType') if ftype in ('1', '3'): this.View.Model.SetValue('FPayType', 2) this.View.StyleManager.SetEnabled("FPayType", "", False) elif ftype == '2': this.View.StyleManager.SetEnabled("FPayType", "", True) # 加班时间选择时提醒 if e.Key == 'F_SDatetime' or e.Key == 'F_EDatetime': _FStart = this.View.Model.GetValue('F_SDatetime', e.Row) _FEnd = this.View.Model.GetValue('F_EDatetime', e.Row) if _FStart != None and _FEnd != None: #sql = 'select * from T_ENG_WorkCalData where FDay='.format() #DBUtils.ExecuteDataSet(this.Context, sql).Tables[0] ## 如果开始在17:30之前,结束在18:00后 #if _FStart.CompareTo(DateTime.Parse('17:30:00')) in (-1, 0) and _FEnd.CompareTo(DateTime.Parse('18:00:00')) in (0, 1): # _FStart = DateTime.Parse('18:00:00') #从表单默认加载的时间有误差,此处校准 _FStart = DateTime(_FStart.Year, _FStart.Month, _FStart.Day, _FStart.Hour, _FStart.Minute, _FStart.Second) hours = float( _FEnd.ToUniversalTime().Ticks - _FStart.ToUniversalTime().Ticks) / 10000000 / 3600 if hours > 0: floor = lambda x: int(str(x).split('.')[0]) + 0.5 if float( '0.' + str(x).split('.')[1]) >= 0.5 else int( str(x).split('.')[0]) last_hours = floor(hours - NoonCrt(_FStart, _FEnd)) this.View.Model.SetValue('F_ora_PreTime', last_hours, e.Row) this.View.Model.SetValue('F_ora_RealTime', last_hours, e.Row) else: this.View.ShowMessage('加班结束日期不能小于开始日期') #this.View.BillModel.SetValue('F_ora_EndTime', _FStart, e.Row) this.View.Model.SetValue('F_ora_PreTime', 0, e.Row) this.View.Model.SetValue('F_ora_RealTime', 0, e.Row)
def Run(self): instrument1 = self.InstrumentManager.Instruments["AAPL"] instrument2 = self.InstrumentManager.Instruments["MSFT"] self.strategy = MyStrategy(self.framework, "BollingerBands") self.strategy.AddInstrument(instrument1) self.strategy.AddInstrument(instrument2) self.DataSimulator.DateTime1 = DateTime(2013, 01, 01) self.DataSimulator.DateTime2 = DateTime(2013, 12, 31) self.BarFactory.Add(instrument1, BarType.Time, self.barSize) self.BarFactory.Add(instrument2, BarType.Time, self.barSize) self.StartStrategy()
def __getattr__(self, key): try: _type = self.get_type(key) except AttributeError: raise AttributeError(f"{key} is not a valid field of {self.db_name}") default_arguments = (self.data, getattr(self.api, key.upper())) if _type == self.api.ADK_FIELD_TYPE.eChar: return self.api.AdkGetStr(*default_arguments, String(""))[1] elif _type == self.api.ADK_FIELD_TYPE.eDouble: return self.api.AdkGetDouble(*default_arguments, Double(0.0))[1] elif _type == self.api.ADK_FIELD_TYPE.eBool: return self.api.AdkGetBool(*default_arguments, Boolean(0))[1] elif _type == self.api.ADK_FIELD_TYPE.eDate: return self.api.AdkGetDate(*default_arguments, DateTime())[1]
def onCompleted(task): global dateTime if entryList.Count > 0: dt = DateTime(0) newEntryList = List[Entry]() for entry in entryList: if entry.Modified > dateTime: newEntryList.Add(entry) if entry.Modified > dt: dt = entry.Modified if dt > dateTime: dateTime = dt else: dateTime = DateTime.Now if newEntryList.Count > 0: Script.Instance.Alert(newEntryList) dictionary = Dictionary[Char, List[String]]() for word in Script.Instance.Words: if word.Name.Length > 0: if not dictionary.ContainsKey(word.Name[0]): dictionary.Add(word.Name[0], List[String]()) dictionary[word.Name[0]].Add(word.Name) for entry in newEntryList: termList = getTermList(dictionary, entry.Title) if termList.Count > 0: sequenceList = List[Sequence]() for sequence in Script.Instance.Sequences: if sequence.Name.Equals("Activate"): sequenceList.Add(sequence) if Script.Instance.TryEnqueue( Script.Instance.Prepare( sequenceList, None, termList)): break
def __init__(self): """DateTimePickerSample class init function.""" # set up form self.Text = "DateTimePicker control" # set up label self.label = Label() self.label.Text = "The date you select is:" self.label.Size = Size(260, 30) self.label.Location = Point(0, 50) # set up domainupdown self.date_time_picker = DateTimePicker() self.date_time_picker.ShowCheckBox = True self.date_time_picker.Width = self.Width - 10 self.date_time_picker.ValueChanged += self.dt_value_changed # fixed date to 2009-01-01 self.date_time_picker.Value = DateTime(2009, 1, 1) # add controls self.Controls.Add(self.label) self.Controls.Add(self.date_time_picker)
def is_end_time_empty(self, date): day = DateTime(date.date.year, date.date.month, date.date.day) return self.accessor.ReadEndTime(day) == DateTime.MinValue
def converttoDateTime(self, date, hours): day = DateTime(date.year, date.month, date.day, hours.hour, hours.minute, hours.second) return day
def ImportFromSentekIrrimaxWeb(spreadsheetpath): """ <Script> <Author>ARE</Author> <Description>This will import measurements of Sentek agricultural probes from vendors online platform (www.irrimaxlive.com). All Settings and further description are provided through the configuration spreadsheet </Description> <Parameters> <Parameter name="spreadsheetpath" type="String">path to config spreadsheet</Parameter> </Parameters> <ReturnValue type="IType">None</ReturnValue> </Script> """ # write your code here # In[ ]: # get Settings from Spreadsheet print("reading settings from {}:".format(spreadsheetpath)) sheetName = "Configuration" sheetMgr = app.Modules.Get("Spreadsheet Manager") importSheetConfig = sheetMgr.OpenSpreadsheet(spreadsheetpath) api_key = sheetMgr.GetCellValue(importSheetConfig, sheetName, 2, 1) print("\tusing API Key: "+api_key) from_time_d = sheetMgr.GetCellValue(importSheetConfig, sheetName, 3, 1) from_time = DateTime(1899,12,30).AddDays(from_time_d) print("\tabsolute import period starts {} ".format(from_time)) to_time_d = sheetMgr.GetCellValue(importSheetConfig, sheetName, 4, 1) to_time = DateTime(1899,12,30).AddDays(to_time_d) print("\tabsolute import period ends {}".format(to_time)) basepath = sheetMgr.GetCellValue(importSheetConfig, sheetName, 5, 1) print("\troot folder for import "+basepath) relative_to_now = sheetMgr.GetCellValue(importSheetConfig, sheetName, 6, 1) print("\timport interval relative to current time: {}".format(relative_to_now)) relative_interval = sheetMgr.GetCellValue(importSheetConfig, sheetName, 7, 1) print("\trelative import period {} h".format(relative_interval)) if relative_to_now: from_time = System.DateTime.Now.AddHours(-relative_interval) to_time = System.DateTime.Now.AddDays(1) print("Importing data for last {} h + 1 day lead time ({} to {}).".format(relative_interval, from_time, to_time)) else: print("Importing all available data between {} and {}".format(from_time, to_time)) # In[ ]: # get list of loggers from API url_getloggers = "http://www.irrimaxlive.com/api/?cmd=getloggers&key="+api_key print("reading " + url_getloggers) xml_string = urllib2.urlopen(url_getloggers).read() # remove encoded characters printable = set(string.printable) xml_string = filter(lambda x: x in printable, xml_string) # parse string to XML object xml_tree = etree.ElementTree(etree.fromstring(xml_string)) # In[ ]: # print all loggers for logger in xml_tree.iter("Logger"): print("found logger {} (id={})".format(logger.attrib["name"], logger.attrib["id"])) # In[ ]: datetimeformat = "{:04d}{:02d}{:02d}{:02d}{:02d}{:02d}" from_str = datetimeformat.format(from_time.Year, from_time.Month, from_time.Day, from_time.Hour, from_time.Minute, from_time.Second) to_str = datetimeformat.format(to_time.Year, to_time.Month, to_time.Day, to_time.Hour, to_time.Minute, to_time.Second) url_getreadings_byid = "http://www.irrimaxlive.com/api/?cmd=getreadings&key={}&id={}&from={}&to={}" # In[ ]: def timeseries_exists(path): tsmgr = app.Modules.Get('Time series Manager') if tsmgr is None: raise NameError('Could not load time series manager') if tsmgr.TimeSeriesList.Fetch(path) is None: return False else: return True # In[ ]: print(System.Double(1.)) # In[ ]: def CreateTimeSeries(timeSeries, unitType, unitVariable, valueType): """ <Script> <Author>jga/are</Author> <Description>Create time series</Description> <Parameters> <Parameter name="timeSeries" type="string">destination time series name</Parameter> <Parameter name="unitType" type="string">unit type</Parameter> <Parameter name="unitVariable" type="string">Variable type</Parameter> <Parameter name="valueType" type="string">"Instantaneous", "Accumulated", "Step Accumulated" or "Reverse Mean Step Accumulated"</Parameter> </Parameters> </Script> """ timeSeriesManager = app.Modules.Get('Time series Manager') if timeSeriesManager is None: raise NameError('Could not load time series manager') dataSeries = GetDataSeries(timeSeries) if dataSeries is None: dataSeries = timeSeriesManager.TimeSeriesList.CreateNew(timeSeries) dataSeries.YAxisVariable = unitType # for Rainfall Depth time series, create as Accumulated, Rainfall Step Accumulated others default to Instantaneous if valueType == "Instantaneous": dataSeries.ValueType = DataSeriesValueType.Instantaneous elif valueType == "Accumulated": dataSeries.ValueType = DataSeriesValueType.Accumulated elif valueType == "Step Accumulated": dataSeries.ValueType = DataSeriesValueType.Step_Accumulated elif valueType == "Reverse Mean Step Accumulated": dataSeries.ValueType = DataSeriesValueType.Reverse_Mean_Step_Accumulated elif type(valueType) == DataSeriesValueType: # if dataseries value type has been provided, assign it dataSeries.ValueType = valueType try: dataSeries.SetYAxisUnit(unitVariable, False) # robustness againast unit name change between MIKE 2014 -> 2016 except System.Exception as e: if unitVariable == "m^3/day": # 2014 unit unitVariable = "m^3/d" # 2016 unit dataSeries.SetYAxisUnit(unitVariable, False) elif unitVariable == "m^3/d": # 2016 unit unitVariable = "m^3/day" # 2014 unit dataSeries.SetYAxisUnit(unitVariable, False) else: raise e # something else is wrong timeSeriesManager.TimeSeriesList.Add(dataSeries) else: dataSeries.DeleteAll() dataSeries.ClearData() del dataSeries # In[ ]: def timeseries_exists(path): tsmgr = app.Modules.Get('Time series Manager') if tsmgr is None: raise NameError('Could not load time series manager') if tsmgr.TimeSeriesList.Fetch(path) is None: return False else: return True # In[ ]: def GetDataSeries(timeSeries): """ <Script> <Author>admin</Author> <Description>write python list to time series</Description> <Parameters> <Parameter name="timeSeries" type="string">destination time series path</Parameter> </Parameters> </Script> """ timeSeriesManager = app.Modules.Get('Time series Manager') if timeSeriesManager is None: raise NameError('Could not load time series manager') dataSeries = timeSeriesManager.TimeSeriesList.Fetch(timeSeries) return dataSeries # In[ ]: def add_steps_to_ts(plist, ts_path): tmgr = app.Modules.Get('Time series Manager') ts = tmgr.TimeSeriesList.Fetch(ts_path) timestepts = ts.FetchAll() if(timestepts.Count > 0): lastTimestep = timestepts[timestepts.Count - 1].XValue else: lastTimestep = DateTime.MinValue count = 0 for x, y in plist: date = x if date > lastTimestep: value = System.Double(y) step = ts.CreateNew() step.XValue = date step.YValue = value ts.Add(step) count+=1 tmgr.TimeSeriesList.Update(ts) # In[ ]: # iterate all loggers: for logger in xml_tree.iter("Logger"): print("logger {} (id={})".format(logger.attrib["name"], logger.attrib["id"])) # download logger data logger_id = logger.attrib["id"] url_request = url_getreadings_byid.format(api_key, logger_id , from_str, to_str) print("reading data from "+url_request) csv_string = urllib2.urlopen(url_request).read() # create dictionary {header name: column number} headers = StringIO(csv_string).readline().split(",") header_of = {headers[i].split("(")[0]:i for i in range(len(headers))} # iterate over sites > probes > sensors for site in logger.iter("Site"): print("\tsite {}".format(site.attrib["name"])) for probe in site.iter("Probe"): print("\t\tprobe {}".format(probe.attrib["name"])) for sensor in probe.iter("Sensor"): print("\t\t\tsensor {}: {} ({})".format(sensor.attrib["name"], sensor.attrib["type"], sensor.attrib["unit"])) column = header_of[sensor.attrib["name"]] # sensor logics sensor_variable = None sensor_unit = None sensor_factor = 1. sensor_comment = "" if sensor.attrib["type"] == "Voltage": sensor_variable = "Voltage" sensor_comment = sensor.attrib["description"] if sensor.attrib["unit"] == "V": sensor_unit = "V" if sensor.attrib["type"] == "Soil Water Content": sensor_variable = "Volumetric Water Content" sensor_comment = "{} cm".format(sensor.attrib["depth_cm"]) if sensor.attrib["unit"] == "mm": sensor_unit = "%" if sensor.attrib["type"] == "V.I.C.": sensor_variable = "Undefined" sensor_comment = "{} cm".format(sensor.attrib["depth_cm"]) if sensor.attrib["unit"] == "VIC": sensor_unit = "-" if sensor.attrib["type"] == "Temperature": sensor_variable = "Temperature" sensor_comment = "{} cm".format(sensor.attrib["depth_cm"]) if sensor.attrib["unit"] == "C": sensor_unit = "deg C" if sensor_variable is None: print("unknown sensor type "+sensor.attrib["type"]) if sensor_unit is None: print("unknow sensor unit "+sensor.attrib["unit"]) if sensor_variable is None or sensor_unit is None: print("skipped.") continue # set path of time series ts_path = basepath+"/{}/{}/{}/{}({})".format(logger.attrib["name"], site.attrib["name"], probe.attrib["name"], sensor.attrib["name"], sensor_comment) # check if TS exists and create if necessary if not timeseries_exists(ts_path): print("\t\t\t\tCreating Time Series "+str(ts_path)) CreateTimeSeries(ts_path, sensor_variable, sensor_unit, "Instantaneous") # Add new measurements to time series # create data list [(DateTime, float)] from column in csv ts = [] csv = StringIO(csv_string) csv.readline() # discard headers while True: line = csv.readline() if line == "": break words = line.split(',') # parse string to DateTime (via datetime) dt = datetime.datetime.strptime(words[0], "%Y/%m/%d %H:%M:%S") DT = DateTime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) # parse string to float ts.append((DT, float(words[column]))) if len(ts) == 0: print("\t\t\t\tNo new measurements found.") else: print("\t\t\t\tAdding {} measurments to {} ".format(len(ts), ts_path)) add_steps_to_ts(ts, ts_path)
# -*- coding: utf-8 -*- """ jinja2._ipysupport ~~~~~~~~~~~~~~~~~~ IronPython support library. This library exports functionality from the CLR to Python that is normally available in the standard library. :copyright: Copyright 2008 by Armin Ronacher. :license: BSD. """ from System import DateTime from System.IO import Path, File, FileInfo epoch = DateTime(1970, 1, 1) class _PathModule(object): """A minimal path module.""" sep = str(Path.DirectorySeparatorChar) altsep = str(Path.AltDirectorySeparatorChar) pardir = '..' def join(self, path, *args): args = list(args[::-1]) while args: path = Path.Combine(path, args.pop()) return path def isfile(self, filename):
def ImportFromPesslFieldclimate(spreadsheetpath): """ <Script> <Author>ARE</Author> <Description>This will import measurements of Pessl weather station from the vendors online platform (www.fieldclimate.com). All Settings and further description are provided through the configuration spreadsheet. </Description> <Parameters> <Parameter name="spreadsheetpath" type="String">path to config spreadsheet</Parameter> </Parameters> <ReturnValue type="IType">None</ReturnValue> </Script> """ # SCRIPT PARAMETER #spreadsheetpath = "/DataImport/Timeseries/ImportFromPesslFieldclimateWeb/Configuration" # In[ ]: # get Settings from Spreadsheet print("reading settings from {}:".format(spreadsheetpath)) sheetName = "Configuration" sheetMgr = app.Modules.Get("Spreadsheet Manager") importSheetConfig = sheetMgr.OpenSpreadsheet(spreadsheetpath) # read settings auth_user = sheetMgr.GetCellValue(importSheetConfig, sheetName, 2, 1) if auth_user is None: print("\tuser name:\tNOT SET!") else: print("\tuser name:\t{}".format(auth_user)) auth_passw = sheetMgr.GetCellValue(importSheetConfig, sheetName, 3, 1) if auth_passw is None: print("\tpassword: NOT SET!") else: print("\tpassword:\t" + "*" * len(auth_passw)) basepath = sheetMgr.GetCellValue(importSheetConfig, sheetName, 4, 1) print("\troot folder:\t " + basepath) relative_to_now = sheetMgr.GetCellValue(importSheetConfig, sheetName, 5, 1) print("\timport interval relative to current time: {}".format( relative_to_now)) from_time_d = sheetMgr.GetCellValue(importSheetConfig, sheetName, 6, 1) from_time = DateTime(1899, 12, 30).AddDays(from_time_d) print("\tabsolute import period starts {} ".format(from_time)) to_time_d = sheetMgr.GetCellValue(importSheetConfig, sheetName, 7, 1) to_time = DateTime(1899, 12, 30).AddDays(to_time_d) print("\tabsolute import period ends {}".format(to_time)) relative_interval = sheetMgr.GetCellValue(importSheetConfig, sheetName, 8, 1) print("\trelative import period {} h".format(relative_interval)) max_rows = int(sheetMgr.GetCellValue(importSheetConfig, sheetName, 9, 1)) print("\tmax rows to read: {}".format(max_rows)) if relative_to_now: from_time = System.DateTime.Now.AddHours(-relative_interval) to_time = System.DateTime.Now.AddDays(1) print("Importing data for last {} h + 1 day lead time ({} to {}).". format(relative_interval, from_time, to_time)) else: print("Importing all available data between {} and {}".format( from_time, to_time)) # In[ ]: def api_request(command, parameters=None): url_base = "http://www.fieldclimate.com/api/" str_auth = "user_name={}&user_passw={}".format(auth_user, auth_passw) str_request = url_base + command + "?" + str_auth if parameters is not None: str_request += "&" + "&".join(parameters) print("reading " + str_request) try: json_str = urllib2.urlopen(str_request).read() json_dict = json.loads(json_str) return json_dict except urllib2.HTTPError as e: print("HTTPError reading " + str_request) raise e # In[ ]: def api_request2(command, parameters=[]): url_base = "http://www.fieldclimate.com/api/index.php?" parameters.append("action=" + command) parameters.append("user_name=" + auth_user) parameters.append("user_passw=" + auth_passw) str_request = url_base + "&".join(parameters) print(str_request) try: json_str = urllib2.urlopen(str_request).read() json_dict = json.loads(json_str) return json_dict except urllib2.HTTPError as e: print("HTTPError reading " + str_request) raise e # In[ ]: def timeseries_exists(path): tsmgr = app.Modules.Get('Time series Manager') if tsmgr is None: raise NameError('Could not load time series manager') if tsmgr.TimeSeriesList.Fetch(path) is None: return False else: return True # In[ ]: def GetDataSeries(timeSeries): """ <Script> <Author>admin</Author> <Description>write python list to time series</Description> <Parameters> <Parameter name="timeSeries" type="string">destination time series path</Parameter> </Parameters> </Script> """ timeSeriesManager = app.Modules.Get('Time series Manager') if timeSeriesManager is None: raise NameError('Could not load time series manager') dataSeries = timeSeriesManager.TimeSeriesList.Fetch(timeSeries) return dataSeries # In[ ]: def add_steps_to_ts(plist, ts_path): tmgr = app.Modules.Get('Time series Manager') ts = tmgr.TimeSeriesList.Fetch(ts_path) timestepts = ts.FetchAll() if (timestepts.Count > 0): lastTimestep = timestepts[timestepts.Count - 1].XValue else: lastTimestep = DateTime.MinValue count = 0 for x, y in plist: date = x if date > lastTimestep: value = System.Double(y) step = ts.CreateNew() step.XValue = date step.YValue = value ts.Add(step) count += 1 tmgr.TimeSeriesList.Update(ts) # In[ ]: def CreateTimeSeries(timeSeries, unitType, unitVariable, valueType): """ <Script> <Author>jga/are</Author> <Description>Create time series</Description> <Parameters> <Parameter name="timeSeries" type="string">destination time series name</Parameter> <Parameter name="unitType" type="string">unit type</Parameter> <Parameter name="unitVariable" type="string">Variable type</Parameter> <Parameter name="valueType" type="string">"Instantaneous", "Accumulated", "Step Accumulated" or "Reverse Mean Step Accumulated"</Parameter> </Parameters> </Script> """ timeSeriesManager = app.Modules.Get('Time series Manager') if timeSeriesManager is None: raise NameError('Could not load time series manager') dataSeries = GetDataSeries(timeSeries) if dataSeries is None: dataSeries = timeSeriesManager.TimeSeriesList.CreateNew(timeSeries) dataSeries.YAxisVariable = unitType # for Rainfall Depth time series, create as Accumulated, Rainfall Step Accumulated others default to Instantaneous if valueType == "Instantaneous": dataSeries.ValueType = DataSeriesValueType.Instantaneous elif valueType == "Accumulated": dataSeries.ValueType = DataSeriesValueType.Accumulated elif valueType == "Step Accumulated": dataSeries.ValueType = DataSeriesValueType.Step_Accumulated elif valueType == "Reverse Mean Step Accumulated": dataSeries.ValueType = DataSeriesValueType.Reverse_Mean_Step_Accumulated elif type( valueType ) == DataSeriesValueType: # if dataseries value type has been provided, assign it dataSeries.ValueType = valueType try: dataSeries.SetYAxisUnit(unitVariable, False) # robustness againast unit name change between MIKE 2014 -> 2016 except System.Exception as e: if unitVariable == "m^3/day": # 2014 unit unitVariable = "m^3/d" # 2016 unit dataSeries.SetYAxisUnit(unitVariable, False) elif unitVariable == "m^3/d": # 2016 unit unitVariable = "m^3/day" # 2014 unit dataSeries.SetYAxisUnit(unitVariable, False) else: raise e # something else is wrong timeSeriesManager.TimeSeriesList.Add(dataSeries) else: dataSeries.DeleteAll() dataSeries.ClearData() del dataSeries # In[ ]: def to_datetime(DT): return datetime.datetime(from_time.Year, from_time.Month, from_time.Day, from_time.Hour, from_time.Minute, from_time.Second) to_datetime(from_time) # ## Dev Script # In[ ]: # iterate over list of weather stations weather_stations = api_request( "CIDIStationList/GetStations")['ReturnDataSet'] print("found {} weather stations:".format(len(weather_stations))) for station_id in weather_stations: # get information on station and its sensors station_name = weather_stations[station_id]["custom_name"] print("\t{} ({})".format(station_id, station_name)) station_sensors = api_request( "CIDIStationSensors/Get", ["station_name=" + station_id ])['ReturnDataSet'] # station name: f_name / serial number print("\tfound {} sensors:".format(len(station_sensors))) print("\ttchannel\tcode\tunit\tdescription") for sensor_id in range(len(station_sensors)): sensor_info = station_sensors[sensor_id] sensor_channel = sensor_info["f_sensor_ch"] sensor_code = sensor_info["f_sensor_code"] sensor_name = sensor_info["f_name"] sensor_name_user = sensor_info["f_sensor_user_name"] sensor_unit = sensor_info["f_unit"] print(u"\t{}\t{}\t[{}]\t{} ('{}')".format(sensor_channel, sensor_code, sensor_unit, sensor_name, sensor_name_user)) sensor_by_code = {s["f_sensor_code"]: s for s in station_sensors} # read out stations measurment data response = api_request2("CIDIStationData3_GetFromDate", [ "station_name=" + station_id, "row_count=" + str(max_rows), "dt_from=" + to_datetime(from_time).strftime("%Y-%m-%dT%H:%M:%S") ]) for reading in response["ReturnDataSet"]: reading_time = reading["f_date"] sys.stdout.write( "\nReading Measurements for {}:".format(reading_time)) # parse string to DateTime (via datetime) dt = datetime.datetime.strptime(reading_time, "%Y-%m-%d %H:%M:%S") DT = DateTime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) for sensor in reading.keys(): # skip if not a sensor: if not sensor.split("_")[0] == "sens": continue _, sensor_type, sensor_channel, sensor_code = sensor.split("_") sensor_reading = reading[sensor] # skip min/max values or if no reading was taken if sensor_type in ["min", "max"]: continue if sensor_reading is None: continue # sensor logics sensor_variable = None sensor_unit = None sensor_factor = 1. sensor_comment = "" value_type = None if sensor_code == "0": # air temperature sensor_variable = "Temperature" sensor_unit = "deg C" if sensor_type == "last": value_type = "Instantaneous" if sensor_type == "aver": value_type = "Reverse Mean Step Accumulated" # TODO: check if this makes sense in EUM if sensor_code == "5": # Wind Speed sensor_variable = "Wind speed" sensor_unit = "m/s" if sensor_type == "last": value_type = "Instantaneous" if sensor_type == "aver": value_type = "Reverse Mean Step Accumulated" # TODO: check if this makes sense in EUM if sensor_code == "6": # precipitation sensor_variable = "Rainfall" sensor_unit = "mm" if sensor_type == "sum": value_type = "Reverse Mean Step Accumulated" # TODO: check if this makes sense in EUM if sensor_code == "7": # Battery Voltage sensor_variable = "Voltage" sensor_unit = "V" sensor_factor = 1 / 1000. if sensor_type == "last": value_type = "Instantaneous" if sensor_type == "aver": value_type = "Reverse Mean Step Accumulated" # TODO: check if this makes sense in EUM if sensor_code == "21": # Dew Point sensor_variable = "Temperature" sensor_unit = "deg C" if sensor_type == "last": value_type = "Instantaneous" if sensor_type == "aver": value_type = "Reverse Mean Step Accumulated" # TODO: check if this makes sense in EUM if sensor_code == "1201": # ET_0 sensor_variable = "Evaporation" sensor_unit = "mm" if sensor_type == "aver": value_type = "Reverse Mean Step Accumulated" # TODO: check if this makes sense in EUM # TODO: #f_sensor_code f_name f_sensor_user_name f_unit #4 Leaf Wetness Leaf Wetness Min #16 Soil temperature Bodentemperatur C #25 VPD VPD kPa #30 Solar Panel Solarpanel mV #43 Water meter 1L - Differential Water Meter 1L - Differential L #506 HC Air temperature HC Lufttemperatur C #506 HC Air temperature HC Air temperature C #507 HC Relative humidity HC Luftfeuchte % #507 HC Relative humidity HC Relative humidity % #508 HC Serial Number HC Serial Number #600 Solar radiation Solar radiation Dgt W/mm # set path of time series ts_path = basepath + "/{} ({})/ch{:04d} {}".format( station_id, station_name, int(sensor_by_code[sensor_code]["f_sensor_ch"]), sensor_by_code[sensor_code]["f_name"]) desc = "ch{:04d} {}".format( int(sensor_by_code[sensor_code]["f_sensor_ch"]), sensor_by_code[sensor_code]["f_name"]) if sensor_variable is None or sensor_unit is None or value_type is None: #print("\t\t\t\tskipping {}".format(desc)) continue # check if TS exists and create if necessary if not timeseries_exists(ts_path): print("\nCreating Time Series " + str(ts_path)) _ = CreateTimeSeries(ts_path, sensor_variable, sensor_unit, value_type) sys.stdout.write(" ch{:04d}".format( int(sensor_by_code[sensor_code]["f_sensor_ch"]))) #print("\t\t\t\tAdding {}".format(desc)) add_steps_to_ts([(DT, float(sensor_reading) * sensor_factor)], ts_path)
def update(self): ''' Overridden to implement abstract method defined in superclass. Writes all eligible properties in this object out to their counterparts in ComicRack (i.e. back into the ComicBook object that was passed into __init__.) ''' ok_to_update = self.updated_properties() # removes commas from the the given string def cleanup(s): s = re.sub(r",(\s+)", r"\1", s) if s else "" return re.sub(r",", r" ", s) if "series_s" in ok_to_update: self.__crbook.Series = self.series_s ok_to_update.remove("series_s") if "issue_num_s" in ok_to_update: self.__crbook.Number = self.issue_num_s ok_to_update.remove("issue_num_s") if "volume_year_n" in ok_to_update: self.__crbook.Volume = self.volume_year_n ok_to_update.remove("volume_year_n") if "format_s" in ok_to_update: self.__crbook.Format = self.format_s ok_to_update.remove("format_s") if "title_s" in ok_to_update: self.__crbook.Title = self.title_s ok_to_update.remove("title_s") if "crossovers_sl" in ok_to_update: self.__crbook.AlternateSeries = \ ', '.join([cleanup(x) for x in self.crossovers_sl]) ok_to_update.remove("crossovers_sl") if "summary_s" in ok_to_update: self.__crbook.Summary = self.summary_s ok_to_update.remove("summary_s") if "publisher_s" in ok_to_update: self.__crbook.Publisher = self.publisher_s ok_to_update.remove("publisher_s") if "imprint_s" in ok_to_update: self.__crbook.Imprint = self.imprint_s ok_to_update.remove("imprint_s") if "characters_sl" in ok_to_update: self.__crbook.Characters = \ ', '.join([cleanup(x) for x in self.characters_sl]) ok_to_update.remove("characters_sl") if "teams_sl" in ok_to_update: self.__crbook.Teams = \ ', '.join([cleanup(x) for x in self.teams_sl]) ok_to_update.remove("teams_sl") if "locations_sl" in ok_to_update: self.__crbook.Locations = \ ', '.join([cleanup(x) for x in self.locations_sl]) ok_to_update.remove("locations_sl") if "writers_sl" in ok_to_update: self.__crbook.Writer = \ ', '.join([cleanup(x) for x in self.writers_sl]) ok_to_update.remove("writers_sl") if "pencillers_sl" in ok_to_update: self.__crbook.Penciller = \ ', '.join([cleanup(x) for x in self.pencillers_sl]) ok_to_update.remove("pencillers_sl") if "inkers_sl" in ok_to_update: self.__crbook.Inker = \ ', '.join([cleanup(x) for x in self.inkers_sl]) ok_to_update.remove("inkers_sl") if "colorists_sl" in ok_to_update: self.__crbook.Colorist = \ ', '.join([cleanup(x) for x in self.colorists_sl]) ok_to_update.remove("colorists_sl") if "letterers_sl" in ok_to_update: self.__crbook.Letterer = \ ', '.join([cleanup(x) for x in self.letterers_sl]) ok_to_update.remove("letterers_sl") if "cover_artists_sl" in ok_to_update: self.__crbook.CoverArtist = \ ', '.join([cleanup(x) for x in self.cover_artists_sl]) ok_to_update.remove("cover_artists_sl") if "editors_sl" in ok_to_update: self.__crbook.Editor = \ ', '.join([cleanup(x) for x in self.editors_sl]) ok_to_update.remove("editors_sl") if "tags_sl" in ok_to_update: self.__crbook.Tags = \ ', '.join([cleanup(x) for x in self.tags_sl]) ok_to_update.remove("tags_sl") if "notes_s" in ok_to_update: self.__crbook.Notes = self.notes_s ok_to_update.remove("notes_s") if "webpage_s" in ok_to_update: self.__crbook.Web = self.webpage_s ok_to_update.remove("webpage_s") if "rating_n" in ok_to_update: self.__crbook.CommunityRating = self.rating_n ok_to_update.remove("rating_n") if "issue_key_s" in ok_to_update: self.__crbook.SetCustomValue( PluginBookData.__ISSUE_KEY, sstr(self.issue_key_s)) ok_to_update.remove("issue_key_s") if "series_key_s" in ok_to_update: self.__crbook.SetCustomValue( PluginBookData.__SERIES_KEY, sstr(self.series_key_s)) ok_to_update.remove("series_key_s") # dates are a little special. any element in the data could be blank # (missing), and we only update the released date if NONE of the # elements are missing. the published date, however, can have a missing # day, or a missing day and month, and we'll still update the rest if "rel_year_n" in ok_to_update and \ "rel_month_n" in ok_to_update and \ "rel_day_n" in ok_to_update: if self.rel_year_n != BookData.blank("rel_year_n") and \ self.rel_month_n != BookData.blank("rel_month_n") and \ self.rel_day_n != BookData.blank("rel_day_n"): date = DateTime(self.rel_year_n, self.rel_month_n, self.rel_day_n) self.__crbook.ReleasedTime = date ok_to_update.remove("rel_year_n") ok_to_update.remove("rel_month_n") ok_to_update.remove("rel_day_n") if "pub_year_n" in ok_to_update: if self.pub_year_n != BookData.blank("pub_year_n"): self.__crbook.Year = self.pub_year_n ok_to_update.remove("pub_year_n") if "pub_month_n" in ok_to_update: if self.pub_year_n != BookData.blank("pub_year_n") and \ self.pub_month_n != BookData.blank("pub_month_n"): self.__crbook.Month = self.pub_month_n ok_to_update.remove("pub_month_n") if "pub_day_n" in ok_to_update: if self.pub_year_n != BookData.blank("pub_year_n") and \ self.pub_month_n != BookData.blank("pub_month_n") and \ self.pub_day_n != BookData.blank("pub_day_n"): self.__crbook.Day = self.pub_day_n ok_to_update.remove("pub_day_n") # we only download and install a thumbnail for fileless CR books, and # even then, only if the user's prefs indicate that they want us to if "cover_url_s" in ok_to_update: already_has_thumb = self.__crbook.CustomThumbnailKey book_is_fileless = not self.path_s config = self.__scraper.config if not self.cover_url_s or not book_is_fileless or \ not config.download_thumbs_b or \ (already_has_thumb and config.preserve_thumbs_b): pass else: image = db.query_image(self.cover_url_s) if not image: log.debug("ERROR: can't download thumbnail: ", self.cover_url_s) else: cr = self.__scraper.comicrack.App success = cr.SetCustomBookThumbnail(self.__crbook, image) if not success: log.debug("ERROR: can't set thumbnail: ", self.cover_url_s) ok_to_update.remove("cover_url_s") # a nice safety check to make sure we didn't miss anything if len(ok_to_update) > 0: for s in ok_to_update: log.debug(self.__class__.__name__ + " can't update property: " + s) raise Exception()
print(value.FullName) # set an object's Geo Location FieldDevice = connection.GetObject("SELogger.2820015789 DLLTE4-SA.Logger" ) GeoAgg = FieldDevice.Aggregates["GISLocationSource"]; print( GeoAgg.Enabled); GeoAgg.ClassName = "CGISLocationSrcDynamic" print( GeoAgg.ClassName); GeoAgg["Latitude"] = 3 GeoAgg["Longitude"] = 4 # Find and set internal point values pointObject = connection.GetObject("Example Projects.Oil and Gas.Transportation.Graphics.End Station.Valve 3.Position Control" ) for i in range(1,100,30): pointObject.InvokeMethod("CurrentValue", i ) print( "Point set to: " + str(pointObject.GetProperty("CurrentValue" ) ) ) # Find a historic point pointObject2 = connection.GetObject("Example Projects.Oil and Gas.Transportation.Inflow Computer.GasFlow" ) from System import DateTime # To support .Net date/time # Historic arguments are start, end, index(=0), maxrecords, forwards=true, reason="All" hisStart = DateTime( 2021,1,19,0,0,0 ) hisEnd = DateTime( 2021,1,20,0,0,0 ) hisArgs = [ hisStart, hisEnd, 0, 100, True, "All" ] # Call methods to get values and times. Could also read quality, or use .ProcessedValue to get fixed interval data hisValues = pointObject2.InvokeMethod("Historic.RawValues", hisArgs ) hisQualities = pointObject2.InvokeMethod("Historic.RawQualities", hisArgs ) hisTimeStamps = pointObject2.InvokeMethod("Historic.RawTimestamps", hisArgs ) for i in range( hisTimeStamps.Length): print( hisTimeStamps[i], hisValues[i], hisQualities[i] )
def importJIRATasks(actionContext): from TimeCockpit.Data import EntityObject, DataContextType dc = actionContext.DataContext if dc.DataContextType != DataContextType.Server: raise ArgumentException( "Action can only be executed on a server data context.") clr.AddReference("TimeCockpit.Common") from System.Collections.Generic import List from System.Globalization import CultureInfo from TimeCockpit.Common import Logger, LogLevel from System import DateTime, String, Array, Convert from System.Text import Encoding clr.AddReference("System.Core") import System clr.ImportExtensions(System.Linq) # JIRA API class Issue(object): def __init__(self, key=None, type=None, summary=None, link=None, status=None, updated=None, timeOriginalEstimate=None, subTaskKeys=None): self.Key = key self.Type = type self.Summary = summary self.Link = link self.Status = status self.Updated = updated self.TimeOriginalEstimate = timeOriginalEstimate self.SubTaskKeys = subTaskKeys class Jira(object): def __init__(self, repository, username, password): from System import Uri self.repository = Uri(repository) self.username = username self.password = password self.requestedFields = [ "summary", "issuetype", "status", "updated", "timeoriginalestimate", "subtasks" ] def search(self, jql): clr.AddReference("System.Web") from System.Web import HttpUtility from System.Net import HttpWebRequest from System.IO import StreamReader clr.AddReference("Newtonsoft.Json") from Newtonsoft.Json import JsonTextReader from Newtonsoft.Json.Linq import JObject from System import Decimal import Newtonsoft.Json clr.ImportExtensions(Newtonsoft.Json.Linq) usernamepw = Convert.ToBase64String( Encoding.UTF8.GetBytes( String.Format("{0}:{1}", self.username, self.password))) fieldsparam = String.Join(",", self.requestedFields) requestUri = String.Format( "{0}rest/api/2/search?jql={1}&fields={2}", self.repository.AbsoluteUri, HttpUtility.UrlEncode(jql), fieldsparam) Logger.Write(LogLevel.Verbose, "Jira.Search: {0}", requestUri) request = HttpWebRequest.Create(requestUri) request.ContentType = "application/json" request.Headers.Add("Authorization", "Basic " + usernamepw) request.Method = "GET" with request.GetResponse() as response: with StreamReader(response.GetResponseStream()) as sr: with JsonTextReader(sr) as jr: result = JObject.Load(jr) issues = result["issues"] items = list() for issue in issues: item = Issue() item.Key = Newtonsoft.Json.Linq.Extensions.Value[ String](issue["key"]) fields = issue["fields"] item.Updated = Newtonsoft.Json.Linq.Extensions.Value[ DateTime](fields["updated"]) # transform seconds to hours estimate = Newtonsoft.Json.Linq.Extensions.Value[ System.Object](fields["timeoriginalestimate"]) if estimate is not None: estimate = Newtonsoft.Json.Linq.Extensions.Value[ Decimal](fields["timeoriginalestimate"]) estimate = estimate / (60.0 * 60.0) item.TimeOriginalEstimate = estimate status = fields["status"] item.Status = Newtonsoft.Json.Linq.Extensions.Value[ String](status["name"]) item.Summary = Newtonsoft.Json.Linq.Extensions.Value[ String](fields["summary"]) type = fields["issuetype"] item.Type = Newtonsoft.Json.Linq.Extensions.Value[ String](type["name"]) item.Link = self.repository.ToString( ) + "browse/" + item.Key subTasks = fields["subtasks"] item.SubTaskKeys = System.Linq.Enumerable.Cast[ JObject](subTasks).Select( lambda t: Newtonsoft.Json.Linq.Extensions. Value[String](t["key"])).ToArray[String]() items.Add(item) return items commit = True timeDelta = 0.01 jira = Jira("https://....atlassian.net/", "...", "...") jiraProjects = dc.Select( "From P In Project Where :IsNullOrEmpty(P.JiraProject) = False Select P" ) for jiraProject in jiraProjects: dc.BeginTransaction() try: jiraName = jiraProject.JiraProject Logger.Write(LogLevel.Information, "JiraImport: Handling project '{0}'", jiraName) projectUuid = jiraProject.ProjectUuid lastUpdated = dc.SelectSingleWithParams({ "Query": "From T In Task Where T.Project = @ProjectUuid Select New With { .LastUpdated = Max(T.JiraUpdated) }", "@ProjectUuid": projectUuid }).LastUpdated if lastUpdated is None: lastUpdated = DateTime(1970, 1, 1) jqlAdditionalCondition = String.Format( " and updated >= '{0}' order by updated asc", lastUpdated.ToString("yyyy-MM-dd HH:mm", CultureInfo.InvariantCulture)) jql = String.Format("project='{0}'{1}", jiraName, jqlAdditionalCondition) issues = jira.search(jql).ToDictionary(lambda i: i.Key) if issues.Any(): query = String.Format( "From T In Task.Include(*) Where T.Project = @ProjectUuid And T.Code In ({0}) Select T", String.Join( ", ", issues.Select(lambda i: String.Format('"{0}"', i.Key)). ToArray())) tasks = dc.SelectWithParams({ "Query": query, "@ProjectUuid": projectUuid }).GroupBy(lambda t: t.Code).ToDictionary( lambda g: g.Key, lambda g: g.Single()) newIssues = issues.Keys.Except(tasks.Keys).ToArray() updatedIssues = issues.Keys.Except(newIssues).ToArray() Logger.Write( LogLevel.Information, "JiraImport: {0} new issues, {1} updated issues for query {2}", newIssues.Length, updatedIssues.Length, jql) for key in newIssues: issue = issues[key] task = dc.CreateTask() task.APP_BudgetInHours = issue.TimeOriginalEstimate task.APP_Code = issue.Key task.APP_Project = jiraProject task.USR_JiraLink = issue.Link task.USR_JiraStatus = issue.Status task.USR_JiraType = issue.Type task.USR_JiraUpdated = issue.Updated task.APP_Description = issue.Summary Logger.Write(LogLevel.Information, "JiraImport: Adding task {0}", key) dc.SaveObject(task) for key in updatedIssues: changed = False task = tasks[key] issue = issues[key] if task.APP_BudgetInHours <> issue.TimeOriginalEstimate: if (task.APP_BudgetInHours is None and issue.TimeOriginalEstimate is not None ) or (task.APP_BudgetInHours is not None and issue.TimeOriginalEstimate is None) or ( abs(task.APP_BudgetInHours - issue.TimeOriginalEstimate) > timeDelta): Logger.Write( LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "TimeOriginalEstimate") task.APP_BudgetInHours = issue.TimeOriginalEstimate changed = True if task.USR_JiraLink <> issue.Link: Logger.Write( LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "Link") task.USR_JiraLink = issue.Link changed = True if task.USR_JiraStatus <> issue.Status: Logger.Write( LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "Status") task.USR_JiraStatus = issue.Status changed = True if task.USR_JiraType <> issue.Type: Logger.Write( LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "Type") task.USR_JiraType = issue.Type changed = True if task.USR_JiraUpdated <> issue.Updated: Logger.Write( LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "Updated") task.USR_JiraUpdated = issue.Updated changed = True if task.APP_Description <> issue.Summary: Logger.Write( LogLevel.Verbose, "JiraImport: Changed property for task {0}: {1}", key, "Summary") task.APP_Description = issue.Summary changed = True if changed: Logger.Write(LogLevel.Information, "JiraImport: Updating task {0}", key) dc.SaveObject(task) else: Logger.Write( LogLevel.Information, "JiraImport: Skipping unchanged task {0}", key) if commit: dc.TryCommitTransaction() else: dc.TryRollbackTransaction() except System.Exception, e: dc.TryRollbackTransaction() Logger.Write( LogLevel.Warning, "JiraImport: Exception while handling {0}: {1}\r\n{2}", jiraProject.JiraProject, e.Message, e.StackTrace)
# get slice of date ( just testing some stuff) # # # http://api.mongodb.org/csharp/1.2/html/92a76252-d1b1-1acb-4584-ad2eaeb66091.htm # import clr # external libraries clr.AddReference('MongoDB.Bson') # mongo-db clr.AddReference('MongoDB.Driver') from MongoDB.Bson import * from MongoDB.Driver import * from System import DateTime server = MongoServer.Create('mongodb://localhost') database = server.GetDatabase('henry') collection = database.GetCollection('item.base') idnt = BsonObjectId('51efd5cb773e6f0ab8354d68') start = BsonDateTime(DateTime(2012, 1, 1)) end = BsonDateTime(DateTime(2012, 12, 31)) query = Builders.Query.GT("creationTime", start).LT(end) #print start.ToString() #print end .ToString() #collection.Find({created_on: {$gte: start, $lt: end}}); for row in collection.Find(query): if row['_id'] < idnt: print row['_id'].ToString() print 'danke fuer Ihre Aufmerksamkeit !!'
APPLE_MAIL_READ_STATUS = { model_mail.SEND_STATUS_UNREAD: MessageStatus.Unread, model_mail.SEND_STATUS_READ: MessageStatus.Read, } status_dict = { 0: MessageStatus.Unread, 1: MessageStatus.Read } content_type_priority = { 'text/html': 1, 'text/plain': 2 } epoch = DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc) hexdigits = set(string.hexdigits) # iron python bug fix quopri.b2a_qp = quopri.a2b_qp = None DataRecord = namedtuple("DataRecord", "part partial complete data file") patt = re.compile(r'["\']*([a-zA-Z0-9@&+=)(.,_ -]+)["\']* *(?:<([a-zA-Z0-9@+=._-]+)>)*') @parse_decorator def analyze_emails(mail_dir, extractDeleted, extractSource): pr = ParserResults() res = [] try: