def next(self, dt=None): dt = dt or self.datas[0].datetime.datetime(0) print('next-start') print('%s open: %s' % (dt.isoformat(), self.datas[0].open[0])) print('%s high: %s' % (dt.isoformat(), self.datas[0].high[0])) print('%s low: %s' % (dt.isoformat(), self.datas[0].low[0])) print('%s close: %s' % (dt.isoformat(), self.datas[0].close[0])) print('next-end')
def MSG(payload_bytes, counter, datetime, data_rate, coding_date, rssi, snr): """ Re-implementation of the MSG class of TTN MQTT hidden and buried somewhere in their code :param payload_bytes: Payload bytes (bytes-array) :param counter: Counter of the packet (up to 2**16-1) :param datetime: Timestamp :param data_rate: Data Rate index (from 7 to 12) :param coding_date: Coding Ratio index (from 5 to 8) :param rssi: RSSI of the transmission :param snr: SNR of the transmission :return: """ payload_fields = {'error': 'empty'} gateways = { 'gtw_id': 'gateway_under_test', 'gtw_trusted': True, 'timestamp': datetime.timestamp(), 'time': datetime.isoformat(), 'channel': 0, 'rssi': rssi, 'snr': snr, 'rf_chain': 1, 'latitude': 0, 'longitude': 0, 'location_source': 'registry' } metadata = { 'time': datetime.isoformat(), 'frequency': '868.1', 'modulation': 'LORA', 'data_rate': 'SF' + str(data_rate) + 'BW125', 'airtime': 0, 'coding_rate': '4/' + str(coding_date), 'gateways': [convert(gateways)] } payload_raw = base64.b64encode(payload_bytes).decode('utf-8') msg_dict = { 'app_id': 'app-prueba-2', 'dev_id': 'cambridge_1', 'hardware_serial': 'XXXXXXXX', 'port': 2, 'counter': counter, 'payload_raw': payload_raw, 'payload_fields': convert(payload_fields), 'metadata': convert(metadata) } msg = convert(msg_dict) return msg
def normalize_date(datestring): if len( datestring ) < 8: # Unfortunately dateutil.parser.parse will think strings like "319/1" are dates, so to avoid that we filter by length. If it's shorter than "YYYYMMDD" then we won't possibly normalizing it into a date. return datestring try: datestring = normalize_malformed_date(datestring) datetime = dateutil.parser.parse(datestring) isoformat = datetime.isoformat() if datetime.isoformat() != "1972-01-19T00:00:00": datestring = isoformat except Exception as e: if datestring.startswith("D:"): return normalize_date(datestring[2:].replace("'", "")) return datestring
def log(self, txt, dt=None, fgPrint=False): # 增强型log记录函数,带fgPrint打印开关变量 if self.params.fgPrint or fgPrint: dt = dt or self.datas[0].datetime.date(0) tn = tq.timNSec('', self.tim0wrk) #print('%s, %s,tn:%.2f' % (dt.isoformat(), txt)) print('%s, %s,tim:%.2f s' % (dt.isoformat(), txt, tn))
def format_datetime(self, datetime): """ Converts a datetime instance into a string representation. Subclasses can override this method to customise the date string produced by render(). """ return datetime.isoformat()
def poke(self, context, session=None): if self.execution_delta: dttm = context['execution_date'] - self.execution_delta elif self.execution_date_fn: dttm = self.execution_date_fn(context['execution_date']) else: dttm = context['execution_date'] dttm_filter = dttm if isinstance(dttm, list) else [dttm] serialized_dttm_filter = ','.join( [datetime.isoformat() for datetime in dttm_filter]) self.log.info('Poking for %s.%s on %s ... ', self.external_dag_id, self.external_task_id, serialized_dttm_filter) DM = DagModel # we only do the check for 1st time, no need for subsequent poke if self.check_existence and not self.has_checked_existence: dag_to_wait = session.query(DM).filter( DM.dag_id == self.external_dag_id).first() if not dag_to_wait: raise AirflowException('The external DAG ' '{} does not exist.'.format( self.external_dag_id)) else: if not os.path.exists(dag_to_wait.fileloc): raise AirflowException('The external DAG ' '{} was deleted.'.format( self.external_dag_id)) if self.external_task_id: refreshed_dag_info = DagBag(dag_to_wait.fileloc).get_dag( self.external_dag_id) if not refreshed_dag_info.has_task(self.external_task_id): raise AirflowException( 'The external task' '{} in DAG {} does not exist.'.format( self.external_task_id, self.external_dag_id)) self.has_checked_existence = True count_allowed = self.get_count(dttm_filter, session, self.allowed_states) count_failed = -1 if len(self.failed_states) > 0: count_failed = self.get_count(dttm_filter, session, self.failed_states) session.commit() if count_failed == len(dttm_filter): if self.external_task_id: raise AirflowException( 'The external task {} in DAG {} failed.'.format( self.external_task_id, self.external_dag_id)) else: raise AirflowException('The external DAG {} failed.'.format( self.external_dag_id)) return count_allowed == len(dttm_filter)
def log(self, txt, dt=None, fgPrint=False): ''' Logging function fot this strategy''' if self.params.fgPrint or fgPrint: dt = dt or self.datas[0].datetime.date(0) tn = tq.timNSec('', self.tim0wrk) #print('%s, %s,tn:%.2f' % (dt.isoformat(), txt)) print('%s, %s,tim:%.2f s' % (dt.isoformat(), txt, tn))
def get_rain(bbox, width, height, datetime, srs='epsg:28992', layer='radar:5min', server='https://raster.lizard.net'): """ Fetches rain from raster server. """ x1, x2, y1, y2 = bbox parameters = {'sr': srs, 'width': width, 'height': height, 'layers': layer, 'compress': 'deflate', 'request': 'getgeotiff', 'time': datetime.isoformat(), 'geom': POLYGON.format(x1=x1, y1=y1, x2=x2, y2=y2)} url = '{path}?{pars}'.format(pars=urllib.urlencode(parameters), path=urlparse.urljoin(server, 'data')) logger.info('Loading rain data from %s...' % url) # receive tif into temporary file fileno, path = tempfile.mkstemp() url_file = urllib.urlopen(url) os.write(fileno, url_file.read()) os.close(fileno) # read array and remove tempfile rain = gdal.Open(path).ReadAsArray() os.remove(path) return rain
def getDataForJSON(self, dateIni, dateEnd, returnData=True, returnInfo=True): # get data data = self.getData(dateIni, dateEnd) idxs = np.where((np.nansum(data['data']+1, axis=0)!=0).ravel())[0] idxsList = idxs.tolist() # trim data if len(data)>0: data['dates'] = [dt.isoformat() for dt in data['dates']] data['missing'] = data['missing'].tolist() if returnInfo: data['lon'] = data['lon'].tolist() data['lat'] = data['lat'].tolist() data['idxs'] = idxsList else: data.pop('lon', None) data.pop('lat', None) data.pop('idxs', None) if returnData: tmp = [] for i0 in range(data['data'].shape[0]): tmpValidData = data['data'][i0,:,:].ravel()[idxsList] tmpValidData[np.isnan(tmpValidData)] = -999; tmpPositiveIdxs = np.where(tmpValidData!=0)[0] tmp.append({'idxs': idxs[tmpPositiveIdxs].tolist(), 'values': tmpValidData[tmpPositiveIdxs].tolist()}) data['data'] = tmp else: data.pop('data', None) return data else: return {}
def poke(self, context, session=None): if self.execution_delta: dttm = context['execution_date'] - self.execution_delta elif self.execution_date_fn: dttm = self._handle_execution_date_fn(context=context) else: dttm = context['execution_date'] dttm_filter = dttm if isinstance(dttm, list) else [dttm] serialized_dttm_filter = ','.join( [datetime.isoformat() for datetime in dttm_filter]) self.log.info('Poking for %s.%s on %s ... ', self.external_dag_id, self.external_task_id, serialized_dttm_filter) DM = DagModel TI = TaskInstance DR = DagRun if self.check_existence: dag_to_wait = session.query(DM).filter( DM.dag_id == self.external_dag_id).first() if not dag_to_wait: raise AirflowException('The external DAG ' '{} does not exist.'.format( self.external_dag_id)) else: if not os.path.exists(dag_to_wait.fileloc): raise AirflowException('The external DAG ' '{} was deleted.'.format( self.external_dag_id)) if self.external_task_id: refreshed_dag_info = DagBag(dag_to_wait.fileloc).get_dag( self.external_dag_id) if not refreshed_dag_info.has_task(self.external_task_id): raise AirflowException( 'The external task' '{} in DAG {} does not exist.'.format( self.external_task_id, self.external_dag_id)) if self.external_task_id: # .count() is inefficient count = session.query(func.count()).filter( TI.dag_id == self.external_dag_id, TI.task_id == self.external_task_id, TI.state.in_(self.allowed_states), TI.execution_date.in_(dttm_filter), ).scalar() else: # .count() is inefficient count = session.query(func.count()).filter( DR.dag_id == self.external_dag_id, DR.state.in_(self.allowed_states), DR.execution_date.in_(dttm_filter), ).scalar() session.commit() return count == len(dttm_filter)
def isoDateString(datetime): """Returns a date time string formatted correctly for international utc. Used for serializing our objects to ensure that we get a real, honest-to-goodness ISO formatted string including time zone that other apps can read in correctly. NOTE: YES, ALL OUR TIMESTAMPS ARE UTC. YOURS SHOULD BE TOO! """ return datetime.isoformat() + "+00:00"
def transform_date(self, key, content): if key == 'datetime': datetime = dateparser.parse(content) if datetime: content = datetime.isoformat(sep=" ") else: content = None return content
async def change_avatar(current_user: User = Depends(get_current_active_user)): user = User(**users.find_one({"username": current_user.username})) time_delta = datetime.now(timezone.utc) - datetime.strptime(user.modified_at, datetime.isoformat()) if time_delta.days >= 0: save_avatar(current_user) user.modified_at = datetime.now(timezone.utc).isoformat() return {"result": True, "message": "Avatar changed successfully"} time_left = timedelta(days=10) - time_delta return {"result": False, "message": f"Time left: {time_left}"}
def log(self, txt: str, datetime=None): """Logging function for this strategy. Args: txt: A string output message. datetime: A datetime object. """ # Logging function for this strategy. datetime = datetime or self.data.datetime.datetime(0) print('%s, %s' % (datetime.isoformat(), txt))
def default(self, obj): try: if isinstance(obj, datetime): return datetime.isoformat() iterable = iter(obj) except TypeError: pass else: return list(iterable) return JSONEncoder.default(self, obj)
def _calc_times(): """ Calculates open/close times from miles, using rules described at https://rusa.org/octime_alg.html. Expects one URL-encoded argument, the number of miles. """ km = request.args.get('km', 0, type=float) distance = request.args.get('distance', 0, type=float) date = request.args.get('date', 0, type=str) time = request.args.get('time', 0, type=str) datetime = arrow.get(date + ' ' + time + ':00', 'YYYY-MM-DD HH:mm:ss', tzinfo=tz.tzutc()) open_time = acp_times.open_time(abs(km), distance, datetime.isoformat()) close_time = acp_times.close_time(abs(km), distance, datetime.isoformat()) result = {"open": open_time, "close": close_time} return jsonify(result=result)
def get_departures(header, eva_number, datetime): url = "https://api.deutschebahn.com/fahrplan-plus/v1/departureBoard/" + str( eva_number) data = {'date': datetime.isoformat()} response = requests.get(url, data, headers=header) assert response.status_code == 200 return response.json()
def buildTileRequestDocument(tileorigin, tilesource, x, y, z, status, datetime, ip): r = { 'ip': ip, 'origin': tileorigin if tileorigin else "", 'source': tilesource, 'location': z+'/'+x+'/'+y, 'z': z, 'status': status, 'year': datetime.strftime('%Y'), 'month': datetime.strftime('%Y-%m'), 'date': datetime.strftime('%Y-%m-%d'), 'date_iso': datetime.isoformat() } return r
def buildTileRequestDocument(tileorigin, tilesource, x, y, z, status, datetime, ip): r = { 'ip': ip, 'origin': tileorigin if tileorigin else "", 'source': tilesource, 'location': z + '/' + x + '/' + y, 'z': z, 'status': status, 'year': datetime.strftime('%Y'), 'month': datetime.strftime('%Y-%m'), 'date': datetime.strftime('%Y-%m-%d'), 'date_iso': datetime.isoformat() } return r
def addUpdate(self, trainer, xp, datetime=datetime.datetime.utcnow()): """Add a Update object to the database""" url = self.url + 'update/' payload = { 'trainer': trainer, 'xp': xp, 'datetime': datetime.isoformat() } r = requests.post(url, data=json.dumps(payload), headers=self.headers) print("{}: {} - {}".format(inspect.currentframe().f_code.co_name, r.status_code, r.json())) status = r.raise_for_status() if status is not None: return status else: return r.json()['id']
def upload_file(self,file_path, name=None): """Upload a file to the repository and return the URL, or an exception on errors""" from datetime import datetime import os import urlparse import re # see ckan/public/application.js:makeUploadKey for why the file_key # is derived this way. ts = datetime.isoformat(datetime.now()).replace(':','').split('.')[0] if name is None: name = os.path.basename(file_path) norm_name = name.replace(' ', '-') file_key = os.path.join(ts, norm_name) # Inexplicably, this URL can't have the version number url = re.sub('\/\d$','', self.url)+'/storage/auth/form/{}'.format(file_key.strip('/')) r = requests.get(url,headers = self.auth_headers) url_path = r.json()['action'] files = [('file', os.path.basename(file_key), open(base_path).read())] fields = [('key', file_key)] content_type, body = self._encode_multipart_formdata(fields, files) headers= self.auth_headers headers['Content-Type'] = content_type headers['Content-Length'] = str(len(body)) # And this one not only doesn't have the api version, it also doesn't have # 'api' netloc = urlparse.urlparse(self.url).netloc url = 'http://'+ netloc+ url_path r = requests.post(url,headers = headers,data=body) try: r.raise_for_status() except: print 'ERROR for url: {}'.format(url) print r.content raise return '%s/storage/f/%s' % (re.sub('/api\/\d$','', self.url), file_key)
def __init__(self, charset='utf-8', indent=None, serializers=None): super(JsonBody, self).__init__() if serializers is None: serializers = {} serializers[surly.Url] = lambda url: str(url) serializers[tuple] = lambda tuple: list(tuple) serializers[set] = lambda set: list(set) serializers[datetime.date] = lambda date: date.isoformat() serializers[datetime.datetime] = lambda datetime: datetime.isoformat() serializers[bytes] = lambda bytes: bytes.decode(charset) serializers[numpy.ndarray] = lambda array: array.tolist() self.serializers = serializers self.indent = indent self.charset = charset self.content_type = 'application/json; charset={}'.format(charset) self.content_encoding = None self._cached_content = None self._compression_level = None self._compression_threshold = None self._compression_requested = False
def __init__(self, charset='utf-8', indent=None, serializers=None): super(JsonBody, self).__init__() if serializers is None: serializers = {} serializers[surly.Url] = lambda url: str(url) serializers[tuple] = lambda tuple: list(tuple) serializers[set] = lambda set: list(set) serializers[datetime.date] = lambda date: date.isoformat() serializers[ datetime.datetime] = lambda datetime: datetime.isoformat() serializers[bytes] = lambda bytes: bytes.decode(charset) serializers[numpy.ndarray] = lambda array: array.tolist() self.serializers = serializers self.indent = indent self.charset = charset self.content_type = 'application/json; charset={}'.format(charset) self.content_encoding = None self._cached_content = None self._compression_level = None self._compression_threshold = None self._compression_requested = False
def ping(): hostname = hostName.hostName() data = {} #find the start time and convert it to hours:minutes startTime = datetime.datetime.now() startHour = str(startTime.hour) startMinute = str(startTime.minute) if len(startMinute) == 0: startMinute = "00" elif len(startMinute) == 1: startMinute = "0" + startMinute if len(startHour) == 1: startHour = "0" + startHour startTime = startHour + ":" + startMinute data["time"] = startTime #get the current date as well startDate = datetime.date.today() data["date"] = str(startDate) createdAt = datetime.isoformat() #call one ping, get the results and return the dictionary. ping is using windows switches response = os.system("ping -c 1 " + hostname + " > ping.txt") terminal = open("ping.txt", "r") for line in terminal: if(len(line) > 2): pingLag = "mdev = " result = line.find(pingLag) if(result != -1): result = re.findall(r'\d+', line) if(result): data["ping"] = result[0] terminal.close() return data data["ping"] = -1 data["createdAt"] = createdAt terminal.close() return data
def get_rain(bbox, width, height, datetime, srs='epsg:28992', layer='radar:5min', server='https://raster.lizard.net'): """ Fetches rain from raster server. """ x1, x2, y1, y2 = bbox parameters = { 'sr': srs, 'width': width, 'height': height, 'layers': layer, 'compress': 'deflate', 'request': 'getgeotiff', 'time': datetime.isoformat(), 'geom': POLYGON.format(x1=x1, y1=y1, x2=x2, y2=y2) } url = '{path}?{pars}'.format(pars=urllib.urlencode(parameters), path=urlparse.urljoin(server, 'data')) logger.info('Loading rain data from %s...' % url) # receive tif into temporary file fileno, path = tempfile.mkstemp() url_file = urllib.urlopen(url) os.write(fileno, url_file.read()) os.close(fileno) # read array and remove tempfile rain = gdal.Open(path).ReadAsArray() os.remove(path) return rain
def addTrainer(self, username, team, has_cheated=False, last_cheated=None, currently_cheats=False, statistics=True, daily_goal=None, total_goal=None, prefered=True, datetime=datetime.datetime.utcnow(), account=None): """Add a trainer to the database""" url = self.url + 'trainers/' payload = { 'username': username, 'faction': team, 'has_cheated': has_cheated, 'last_cheated': last_cheated, 'currently_cheats': currently_cheats, 'statistics': statistics, 'daily_goal': daily_goal, 'total_goal': total_goal, 'prefered': prefered, 'last_modified': datetime.isoformat(), 'account': account } r = requests.post(url, data=json.dumps(payload), headers=self.headers) print("{}: {} - {}".format(inspect.currentframe().f_code.co_name, r.status_code, r.json())) status = r.raise_for_status() if status is not None: return status else: return r.json()['id']
#!/usr/bin/python import datetime from dateutil.parser import parse date1 = datetime.date(2001, 9, 11) date2 = datetime.date(2011, 4, 30) date3 = datetime.date.today() datetime1 = datetime.datetime(2001, 9, 11, 23, 59, 59) datetime2 = datetime.datetime.now() datetime3 = parse("2009-01-05 22:14:39") delta1 = date3 - date2 delta2 = date3 - date1 delta3 = datetime.timedelta(days=1, hours=1, microseconds=-1) print "Dates" for date in [date1, date2, date3]: print date print "\nDatetimes:" for datetime in [datetime1, datetime2, datetime3]: print datetime, datetime.strftime("%d-%b-%Y"), datetime.isoformat() print "\nDeltas:" for delta in [delta1, delta2, delta3]: print delta print delta.days, delta.seconds, delta.microseconds
def isoformat(datetime): import re # Date String is close enough, just replace three trailing zeroes with Z datestring = datetime.isoformat() return re.sub(r"000$", "Z", datestring)
def logTileRequest(tileorigin, tilesource, x, y, z, ext, status, datetime, ip, gw_logs=None): log_root = settings.LOG_REQUEST_ROOT log_format = settings.LOG_REQUEST_FORMAT if log_root and log_format: log_file = log_root+os.sep+"requests_tiles_"+datetime.strftime('%Y-%m-%d')+".tsv" line = log_format.format(status=status,tileorigin=tileorigin,tilesource=tilesource,z=z,x=x,y=y,ext=ext,ip=ip,datetime=datetime.isoformat()) # Write to File client, producer = provision_producer("file", path=log_file, codec="GeoWatchCodecPlain", verbose=True) producer.send_text(line) # Write to Log Stream if not gw_logs: topic = settings.TILEJET_GEOWATCH_TOPIC_LOGS client, gw_logs = provision_geowatch_producer(topic, "GeoWatchCodecPlain", max_tries=3, sleep_period=5, verbose=True) gw_logs.send_text(line)
from datetime import datetime datetime.time from datetime import datetime as Datetime import datetime as dt dt.time Datetime.time dt.datetime(2003, 5, 12, 14, 33, 22, 245323) dt.datetime.today() dt.datetime.now() dt.datetime.utcnow() dt.datetime.fromordinal(5) dt.datetime.fromtimestamp(3635352) dt.datetime.utcfromtimestamp(3635352) d = dt.date.today() d t = dt.time(8, 15) t dt.datetime.combine(d, t) dt = dt.datetime.strptime('Monday 6 January 2014, 12:13:31', '%A %d %B %Y, %H:%M:%S' ) dt dt.date() dt.time() dt.day dt.isoformat()
def log(self, txt, dt=None, fgPrint=False): # 增强型log记录函数,带fgPrint打印开关变量 if self.params.fgPrint or fgPrint: dt = dt or self.datas[0].datetime.date(0) print('%s, %s' % (dt.isoformat(), txt))
def log(self, txt, dt=None): # log记录函数 dt = dt or self.datas[0].datetime.date(0) print('%s, %s' % (dt.isoformat(), txt))
def log(self, txt, dt=None): ''' Logging function fot this strategy''' dt = dt or self.datas[0].datetime.date(0) print('%s, %s' % (dt.isoformat(), txt))
def create_rfq_campaign(request): # countries1 = countries.objects.all() countries1 = countries_list.objects.all() region_list = region1.objects.all() # region_list=region.objects.all() industry_types = industry_type.objects.all() company_sizes = company_size.objects.all() vendor_type = VendorType.objects.all() revenue_size = RevenueSize.objects.all() message, success, title = "", 0, "error" abm_status, suppression_status, lead_validation_list = 0, 0, [] if request.method == "POST": form = CampaignForm(request.POST) if form.is_valid(): form.save() campaign = Campaign.objects.latest("id") campaign_id = campaign.id # converting normal datetime to iso format for passing to js function expiry = datetime.strptime(request.POST.get('rfq_timer'), '%Y/%m/%d %H:%M') expiry = datetime.isoformat(expiry) expiry_timer = Campaign.objects.get(id=campaign_id) expiry_timer.rfq_timer = expiry expiry_timer.save() # create other table with empty data # so we can open edit case paer page directly if 'abm_status' not in request.POST: abm_status = 0 else: abm_status = 1 if 'suppression_status' not in request.POST: suppression_status = 0 else: suppression_status = 1 specification = Specification.objects.create( campaign_id=campaign_id, abm_status=abm_status, suppression_status=suppression_status) mapping = Mapping.objects.create( campaign_id=campaign_id, industry_type=','.join(request.POST.getlist(u'industry_type')), special_instructions=request.POST.get('special_instructions'), job_title_function=request.POST.get('job_title_function'), country=','.join(request.POST.getlist('geo')), company_size=','.join(request.POST.getlist('company_size')), revenue_size=','.join(request.POST.getlist('revenue_size')), custom_question=int(request.POST.get('custom_question', 0))) terms = Terms.objects.create(campaign_id=campaign_id) delivery = Delivery.objects.create(campaign_id=campaign_id) #store default lead validation in database lead_validation = LeadValidationComponents.objects.filter( is_default=1) for lead in lead_validation: lead_validation_list.append(lead.function_name) SelectedLeadValidation.objects.create( campaign=campaign, component_list=lead_validation_list) HeaderSpecsValidation.objects.create(campaign=campaign, company_limit=4) message = "Campaign Created Successfully" # add notification title = 'New RFQ Campaign Created' desc = "Campaign named '" + \ str(campaign.name) + "' created successfully" sender_id = request.session['userid'] receiver_id = request.session['userid'] superadmins = user.objects.filter(usertype__id=4) from client.utils import noti_via_mail for superad in superadmins: noti_via_mail(superad.id, title, desc, mail_noti_new_campaign) RegisterNotification(sender_id, superad.id, desc, title, 1, campaign, None) #send request to vendors for rfq campaign rfq_vendor_allocation(campaign_id, sender_id) # add set as default data #objects_list = [campaign, specification, mapping, terms, delivery] #hook_add_campaign_data(campaign.user, objects_list) return redirect('client_pending_campaign') else: message += str(form.errors) specificationform = SpecificationForm() print(message) # return HttpResponse(message) context = { 'campaignform': form, 'region_list': region_list, 'countries': countries1, "industry_types": industry_types, 'mappingform': MappingForm(), "specificationform": specificationform, "company_sizes": company_sizes, "vendor_type": vendor_type, "revenue_size": revenue_size, "company_limit": 4, } # send with errors return render(request, 'campaign/create_rfq_campaign.html', context) # get method else: campaignform = CampaignForm() specificationform = SpecificationForm() context = { 'campaignform': campaignform, 'region_list': region_list, 'countries': countries1, "industry_types": industry_types, 'mappingform': MappingForm(), "specificationform": specificationform, "company_sizes": company_sizes, "vendor_type": vendor_type, "revenue_size": revenue_size, "company_limit": 4, } # return render(request,'campaign/createcampaign.html', context) return render(request, 'campaign/create_rfq_campaign.html', context)
def iso_string_from_datetime(datetime): return datetime.isoformat() + 'Z'
def logTileRequest(tileorigin,tilesource, x, y, z, status, datetime, ip): #starttime = time.clock() #==# log_root = settings.LOG_REQUEST_ROOT #log_format = settings.LOG_REQUEST_FORMAT['tile_request'] log_format = settings.LOG_REQUEST_FORMAT if log_root and log_format: #if not os.path.exists(log_root): # os.makedirs(log_root) log_file = log_root+os.sep+"requests_tiles_"+datetime.strftime('%Y-%m-%d')+".tsv" with open(log_file,'a') as f: line = log_format.format(status=status,tileorigin=tileorigin.name,tilesource=tilesource.name,z=z,x=x,y=y,ip=ip,datetime=datetime.isoformat()) f.write(line+"\n") # Import Gevent and monkey patch from gevent import monkey monkey.patch_all() # Update MongoDB from pymongo import MongoClient client = None db = None r = None try: #client = MongoClient('localhost', 27017) client = MongoClient('/tmp/mongodb-27017.sock') db = client.ittc r = buildTileRequestDocument(tileorigin.name,tilesource.name, x, y, z, status, datetime, ip) except: client = None db = None errorline = "Error: Could not connet to log database. Most likely issue with connection pool" error_file = settings.LOG_ERRORS_ROOT+os.sep+"requests_tiles_"+datetime.strftime('%Y-%m-%d')+"_errors.txt" with open(error_file,'a') as f: f.write(errorline+"\n") # Update Mongo Logs if client and db and r: try: db[settings.LOG_REQUEST_COLLECTION].insert(r, w=0) except: errorline = "Error: Could not write log entry into database. Most likely socket issue. For the following: "+line error_file = settings.LOG_ERRORS_ROOT+os.sep+"requests_tiles_"+datetime.strftime('%Y-%m-%d')+"_errors.txt" with open(error_file,'a') as f: f.write(errorline+"\n") # Update Mongo Aggregate Stats stats = buildStats(r) # Sync stats if settings.ASYNC_STATS: try: taskIncStats.apply_async( args=[stats], kwargs=None, queue="statistics") except: errorline = "Error: Could not queue taskIncStats. Most likely issue with rabbitmq." error_file = settings.LOG_ERRORS_ROOT+os.sep+"requests_tiles_"+datetime.strftime('%Y-%m-%d')+"_errors.txt" with open(error_file,'a') as f: f.write(errorline+"\n") else: incStats(db, stats)
def isoformat(datetime): return datetime.isoformat().replace(" ", "T")
datetime = msg.getDatetime() try: note = msg.note except AttributeError: note = '' #if msg.separator: # print (' %s=%i' % (binascii.hexlify(msg.separator) , len(msg.separator)) ) t1='' t2='' t3='' t4='' ret='' if datetime: print ('%s=%i \n -> %s logtime:"%s"' % (msg.dump(), len(msg.bytes) , datetime.isoformat(' '), note)) elif msg.istTemperatur(): t1 = 'Aussen=%.2f' % msg.getTemperaturAussen() t2 = 'KesselSoll=%.2f' % msg.getTemperaturKesselSoll() t3 = 'KesselIst=%.2f' % msg.getTemperaturKesselIst() t4 = 'P33=%.2f' % msg.getP33() print ('%s=%i\n -> %s %s %s logtime:"%s"' % (msg.dump(), len(msg.bytes) , t1, t2, t3, note)) elif msg.istSync(): print ('%s=%i' % (binascii.hexlify(msg.bytes), len(msg.bytes) )) else: print ('%s=%i\n -> %s' % (msg.dump(), len(msg.bytes), msg.toString()) ) # ret = state.consumeMessage(msg) if ret: print ret + ' %s %s %s %s' % (t1, t2, t3, t4) sys.stderr.flush() sys.stdout.flush()
def serialize_date(datetime): """Dates are serialized as ISO 8601-compatible strings.""" return datetime.isoformat()
def default_datetime(self, datetime): return datetime.isoformat()
def log(txt, dt=None, data=None): dt = dt or data.datetime[0] dt = bt.num2date(dt) _logger.info('%s, %s' % (dt.isoformat(), txt))
def iso8601s(tzinfo: tzinfo | None = None, end_datetime: dt.datetime | None = None) -> SearchStrategy[str]: return date_times(tzinfo=tzinfo, end_datetime=end_datetime).map(lambda dt: dt.isoformat())
def dtg2iso(dtg, full=False): ''' convert YYYYMMDDHH to YYYY-mm-ddTHH:MM:SS ''' import datetime fmt = '%Y%m%d%H' if not full else '%Y%m%d%H%M%S' dt = datetime.datetime.strptime(dtg, fmt) return dt.isoformat()
def from_datetime(cls, datetime): return datetime.isoformat()
def convert_datetime_to_iso(datetime): if datetime is not None: return datetime.isoformat("T") else: return None