def runAction(self, node, op=""): """ The actual proccessing of the node object takes place here. Read out the values of attrname and accesstype if any. Generate the ACL-rule, and save it. """ l_date = node.get(self.get('attrname')) if l_date: if date.validateDateString(l_date): try: node.set('updatetime', date.format_date(date.parse_date(l_date))) formated_date = date.format_date(date.parse_date(l_date), "dd.mm.yyyy") d = formated_date.split('.') rule = get_or_add_defer_daterange_rule( int(d[2]), int(d[1]), int(d[0])) for access_type in self.get('accesstype').split(';'): special_access_ruleset = node.get_or_add_special_access_ruleset( ruletype=access_type) special_access_ruleset.rule_assocs.append( AccessRulesetToRule(rule=rule)) db.session.commit() except ValueError: logg.exception( "exception in workflow step defer, runAction failed")
def get_datelists(nodes): ''' helper funtion to update default context before calling TAL interpreter ''' update_date = [] if len(nodes) == 1: for node in nodes: if node.updatetime: try: date = parse_date( node.updatetime, "%Y-%m-%dT%H:%M:%S") datestr = format_date(date, format='%d.%m.%Y %H:%M:%S') except: datestr = node.updatetime update_date.append([node.get("updateuser"), datestr]) creation_date = [] if len(nodes) == 1: for node in nodes: if node.get("creationtime"): try: date = parse_date( node.get("creationtime"), "%Y-%m-%dT%H:%M:%S") datestr = format_date(date, format='%d.%m.%Y %H:%M:%S') except: datestr = node.get("creationtime") creation_date.append([node.get("creator"), datestr]) return update_date, creation_date
def pretty_format_update_time(self): """XXX: move this somewhere else!""" if self.updatetime: return format_date(parse_date(self.updatetime), '%d.%m.%Y, %H:%M:%S') if self.creationtime: return format_date(parse_date(self.creationtime), '%d.%m.%Y, %H:%M:%S') return ''
def runAction(self, node, op=""): """ The actual proccessing of the node object takes place here. Read out the values of attrname and accesstype if any. Generate the ACL-rule, and save it. """ l_date = node.get(self.get('attrname')) if l_date: if date.validateDateString(l_date): try: node.set('updatetime', date.format_date(date.parse_date(l_date))) formated_date = date.format_date(date.parse_date(l_date), "dd.mm.yyyy") for item in self.get('accesstype').split(';'): node.setAccess(item, "{date >= %s}" % formated_date) node.getLocalRead() if self.get('recipient'): # if the recipient-email was entered, create a scheduler attr_dict = {'single_trigger': l_date, 'function': "test_sendmail01", 'nodelist': list(node.id), 'attr_recipient': self.get('recipient'), 'attr_subject': "{} ID: {}".format(self.get('subject'), node.id), 'attr_body': self.get('body')} schedules.create_schedule("WorkflowStep_Defer", attr_dict) except ValueError as e: print "Error: %s" % e
def getUpdatedDate(self, format=None): if format is None: format = STANDARD_FORMAT if self.get('updatetime'): return format_date(parse_date(self.get('updatetime')), '%d.%m.%Y, %H:%M:%S') if self.get('creationtime'): return format_date(parse_date(self.get('creationtime')), '%d.%m.%Y, %H:%M:%S') return ''
def replace_or_add_defer_date(node, iso_date_old, iso_date_new, access_types=[u'read', u'data']): """ examples: # set new date in the future (for read and data access) replace_or_add_defer_date(node, u"2016-11-03T00:00:00", u"2017-11-03T00:00:00") # remove existing rule replace_or_add_defer_date(node, u"2016-11-03T00:00:00", u"") # add new rule replace_or_add_defer_date(node, u"", u"2017-11-03T00:00:00") """ if (not (iso_date_old or iso_date_new)) or (not access_types): print ("noting to be done") return import utils.date as date from psycopg2.extras import DateRange import datetime from core import AccessRule, AccessRulesetToRule, db q = db.query from core.permission import get_or_add_access_rule if iso_date_old: formatted_date_old = date.format_date(date.parse_date(iso_date_old), "dd.mm.yyyy") day_old, month_old, year_old = map(int, formatted_date_old.split('.')) dateranges_old = set([DateRange(datetime.date(year_old, month_old, day_old), datetime.date(9999, 12, 31), '[)')]) # !!! exit if more than one rule for this daterange exists, should be fixed by looping through the nodes rules rule_old = q(AccessRule).filter_by(group_ids=None, dateranges=dateranges_old, subnets=None, invert_group=False, invert_date=False, invert_subnet=False).one() if not rule_old: raise ValueError('rule not found for old date {}'.format(iso_date_old)) print("found old rule {}".format(rule_old.to_dict())) else: rule_old = None if iso_date_new: formatted_date_new = date.format_date(date.parse_date(iso_date_new), "dd.mm.yyyy") day_new, month_new, year_new = map(int, formatted_date_new.split('.')) dateranges_new = set([DateRange(datetime.date(year_new, month_new, day_new), datetime.date(9999, 12, 31), '[)')]) rule_new = get_or_add_access_rule(dateranges=dateranges_new) else: rule_new = None for access_type in access_types: special_access_ruleset = node.get_or_add_special_access_ruleset(ruletype=access_type) if rule_old: arr_old = q(AccessRulesetToRule).filter_by(ruleset=special_access_ruleset, rule=rule_old).one() special_access_ruleset.rule_assocs.remove(arr_old) if rule_new: special_access_ruleset.rule_assocs.append(AccessRulesetToRule(rule=rule_new))
def write_iptc_tags(image_path, tag_dict): ''' Writes iptc tags with exiftool to a given image path (overwrites the sourcefile). Emty tags (tagname='') will be removed. :param image_path: imaqe path to write :param tag_dict: tagname / tagvalue :return status ''' try: utils.process.call(['exiftool']) except OSError: logg.error('No exiftool installed.') return image_path = os.path.abspath(image_path) if not os.path.exists(image_path): logg.info(u'Image {} for writing IPTC metadata does not exist.'.format(image_path)) return if not isinstance(tag_dict, dict): logg.error(u'No dictionary of tags.') return command_list = [u'exiftool'] command_list.append(u'-overwrite_original') command_list.append(u'-charset') command_list.append(u'iptc=UTF8') command_list.append(image_path) for tag_name in tag_dict.keys(): tag_value = tag_dict[tag_name] if tag_dict[tag_name] == '': command_list.append(u'-{}='.format(tag_name)) elif tag_name == u'DateCreated': if validateDate(parse_date(tag_value.split('T')[0], format='%Y-%m-%d')): tag_value = format_date(parse_date(tag_value.split('T')[0], format='%Y-%m-%d'), '%Y:%m:%d') else: logg.error(u'Could not validate {}.'.format(tag_value)) command_list.append(u'-charset iptc=UTF8') command_list.append(u'-{}={}'.format(tag_name, tag_value)) logg.info(u'Command: {} will be executed.'.format(command_list)) process = utils.process.Popen(command_list, stdout=subprocess.PIPE) output, error = process.communicate() if error is not None: logg.info('Exiftool output: {}'.format(output)) logg.error('Exiftool error: {}'.format(error))
def get_iptc_tags(image_path, tags=None): """ get the IPTC tags/values from a given image file :rtype : object :param image_path: path to the image file :param tags: dictionary with wanted iptc tags :return: dictionary with tag/value """ if tags == None: tags = get_wanted_iptc_tags() if not isinstance(tags, dict): logg.warn('No Tags to read.') return if image_path is None: logg.warn('No file path for reading iptc.') return if not os.path.exists(image_path): logg.warn('Could not read IPTC metadata from non existing file.') return if os.path.basename(image_path).startswith('-'): logg.warn('Will not read IPTC metadata to files starting with a hyphen, caused by exiftool security issues.') return # fetch metadata dict from exiftool exiftool_exe = config.get("external.exiftool", "exiftool") with exiftool.ExifTool(exiftool_exe) as et: iptc_metadata = et.get_metadata(image_path) ret = {} for iptc_tag in tags.keys(): key = "IPTC:" + iptc_tag if key in iptc_metadata: value = iptc_metadata[key] # format dates for date fields if iptc_tag == 'DateCreated': if validateDate(parse_date(value, format='%Y:%m:%d')): value = format_date(parse_date(value, format='%Y:%m:%d')) else: logg.error('Could not validate: {} as date value.'.format(value)) # join lists to strings if isinstance (value, list): value = ';'.join(ensure_unicode(e, silent=True) for e in value) ret[iptc_tag] = ensure_unicode(value, silent=True) logg.info('{} read from file.'.format(ret)) return ret
def str_is_date(s: str) -> bool: """Check whether or not a string is a date""" try: parse_date(s) date.fromisoformat(s) except ValueError: return False else: return True
def parseDate(string): if string.endswith("Z"): string = string[:-1] try: return date.parse_date(string, "%Y-%m-%d") except: try: return date.parse_date(string, "%Y-%m-%dT%H:%M:%S") except: return date.parse_date(string[12:], "%Y-%m-%d")
def try_node_date(node, attr_list=attr_list): for key in attr_list: timevalue = node.get(key) if date_pattern_standard.match(timevalue): rfc822_time = format_date(parse_date(timevalue), 'rfc822') break elif date_pattern_armus.match(timevalue): rfc822_time = format_date(parse_date(timevalue, format="%m/%d/%Y"), 'rfc822') break else: rfc822_time = '' return rfc822_time
def fill_cache(start_date: date): query_keys = ["sensor_data"] devices = get_devices() start_date = parse_date(start_date) end_date: date = date.today() number_of_days = (end_date - start_date).days number_of_queries = number_of_days * len(devices) progress = 1 print(f"Cache queries for dates: {start_date} - {end_date}.") print(f"Number of queries: {number_of_queries}") queries = list() for day in date_range(start_date, end_date): for device in devices: for key in query_keys: queries.append(Query(day, device, key)) print(f"Number of cached queries: {number_of_queries - len(queries)}") hline() for query in queries: day_str = query.day.strftime('%Y-%m-%d') print(colors.bold | f"[{progress:>4}/{len(queries)}]", end=" ") print(f"Fetch {day_str}, {query.device:<13} ...", end=" ", flush=True) try: cache_sensor_data(query.device, query.day) print(colors.green | "done") except: print(colors.red | "failed") progress += 1
def show_workflow_node(self, node, req): attrname = self.get("attrname") niss = self.get("niss") if attrname == "": attrname = "urn" # create urn only for nodes with files if len(node.files) > 0: urn = node.get(attrname) if urn: node.set(attrname, utilsurn.increaseURN(node.get(attrname))) else: for var in re.findall(r'\[(.+?)\]', niss): if var == "att:id": niss = niss.replace("[" + var + "]", unicode(node.id)) elif var.startswith("att:"): val = node.get(var[4:]) try: val = date.format_date(date.parse_date(val), '%Y%m%d') except: logg.exception( "exception in workflow step urn, date formatting failed, ignoring" ) niss = niss.replace("[" + var + "]", val) node.set( attrname, utilsurn.buildNBN(self.get("snid1"), self.get("snid2"), niss)) db.session.commit() return self.forwardAndShow(node, True, req)
def formatdate(value, f='%d.%m.%Y %H:%M:%S'): if not isinstance(value, unicode): value = unicode(value) try: return format_date(parse_date(value, "%Y-%m-%dT%H:%M:%S"), format=f) except ValueError: return value
def show_workflow_step(self, req): access = acl.AccessData(req) if not access.hasWriteAccess(self): return "<i>" + t(lang(req), "permission_denied") + "</i>" c = [] for item in self.getChildren(): c.append( { "id": str(item.id), "creationtime": date.format_date(date.parse_date(item.get("creationtime")), "dd.mm.yyyy HH:MM:SS"), "name": item.getName(), } ) c.sort(lambda x, y: cmp(x["name"], y["name"])) return req.getTAL( "workflow/workflow.html", { "children": c, "workflow": self.getParents()[0], "step": self, "nodelink": "/mask?id=" + self.id + "&obj=", "currentlang": lang(req), }, macro="workflow_show", )
def show_workflow_step(self, req): if not self.has_write_access(): return '<i>' + t(lang(req), "permission_denied") + '</i>' c = [] display_name_attr = self.parents[0].display_name_attribute i = 0 for item in self.children: c.append({ "id": unicode(item.id), "creationtime": date.format_date(date.parse_date(item.get('creationtime')), 'dd.mm.yyyy HH:MM:SS') }) if display_name_attr: c[i]["name"] = item.get(display_name_attr) else: c[i]["name"] = item.name i += 1 c.sort(lambda x, y: cmp(x['name'], y['name'])) return req.getTAL("workflow/workflow.html", { "children": c, "workflow": self.parents[0], "step": self, "nodelink": "/mask?id={}&obj=".format(self.id), 'currentlang': lang(req), "csrf": req.csrf_token.current_token }, macro="workflow_show")
def show_workflow_node(self, node, req): attrname = self.get("attrname") niss = self.get("niss") if attrname == "": attrname = "urn" # create urn only for nodes with files if len(node.getFiles()) > 0: urn = node.get(attrname) if urn: node.set(attrname, utilsurn.increaseURN(node.get(attrname))) else: for var in re.findall(r"\[(.+?)\]", niss): if var == "att:id": niss = niss.replace("[" + var + "]", node.id) elif var.startswith("att:"): val = node.get(var[4:]) try: val = date.format_date(date.parse_date(val), "%Y%m%d") except: pass niss = niss.replace("[" + var + "]", val) node.set(attrname, utilsurn.buildNBN(self.get("snid1"), self.get("snid2"), niss)) return self.forwardAndShow(node, True, req)
def writeRecord(req, node, metadataformat, mask=None): if not SET_LIST: initSetList(req) updatetime = node.get(DATEFIELD) if updatetime: d = ISO8601(date.parse_date(updatetime)) else: d = ISO8601(date.DateTime(EARLIEST_YEAR - 1, 12, 31, 23, 59, 59)) set_specs = getSetSpecsForNode(node) if DEBUG: timetable_update(req, " in writeRecord: getSetSpecsForNode: node: '%s, %s', metadataformat='%s' set_specs:%s" % (ustr(node.id), node.type, metadataformat, ustr(set_specs))) record_str = """ <record> <header><identifier>%s</identifier> <datestamp>%sZ</datestamp> %s </header> <metadata>""" % (mkIdentifier(node.id), d, set_specs) if DEBUG: timetable_update(req, " in writeRecord: writing header: node.id='%s', metadataformat='%s'" % (ustr(node.id), metadataformat)) if metadataformat == "mediatum": record_str += core.xmlnode.getSingleNodeXML(node) # in [masknode.name for masknode in getMetaType(node.getSchema()).getMasks() if masknode.get('masktype')=='exportmask']: #elif nodeHasOAIExportMask(node, metadataformat.lower()): # mask = getMetaType(node.getSchema()).getMask(u"oai_" + metadataformat.lower()) elif mask: if DEBUG: timetable_update( req, """ in writeRecord: mask = getMetaType(node.getSchema()).getMask(u"oai_"+metadataformat.lower()): node.id='%s', metadataformat='%s'""" % (ustr( node.id), metadataformat)) # XXX: fixXMLString is gone, do we need to sanitize XML here? record_str += mask.getViewHTML([node], flags=8).replace('lang=""', 'lang="unknown"') # for testing only, remove! if DEBUG: timetable_update( req, " in writeRecord: req.write(mask.getViewHTML([node], flags=8)): node.id='%s', metadataformat='%s'" % (ustr( node.id), metadataformat)) else: record_str += '<recordHasNoXMLRepresentation/>' record_str += '</metadata></record>' req.write(record_str) if DEBUG: timetable_update(req, "leaving writeRecord: node.id='%s', metadataformat='%s'" % (ustr(node.id), metadataformat))
def show_workflow_node(self, node, req): attrname = self.get("attrname") niss = self.get("niss") if attrname == "": attrname = "urn" # create urn only for nodes with files if len(node.files) > 0: urn = node.get(attrname) if urn: node.set(attrname, utilsurn.increaseURN(node.get(attrname))) else: for var in re.findall(r'\[(.+?)\]', niss): if var == "att:id": niss = niss.replace("[" + var + "]", unicode(node.id)) elif var.startswith("att:"): val = node.get(var[4:]) try: val = date.format_date(date.parse_date(val), '%Y%m%d') except: logg.exception("exception in workflow step urn, date formatting failed, ignoring") niss = niss.replace("[" + var + "]", val) node.set(attrname, utilsurn.buildNBN(self.get("snid1"), self.get("snid2"), niss)) db.session.commit() return self.forwardAndShow(node, True, req)
def ListIdentifiers(req): if not SET_LIST: initSetList(req) nodes, tokenstring, metadataformat = getNodes(req) if nodes is None: return writeError(req, tokenstring) if not len(nodes): return writeError(req, 'noRecordsMatch') req.write('<ListIdentifiers>') for n in nodes: updatetime = n.get(DATEFIELD) if updatetime: d = ISO8601(date.parse_date(updatetime)) else: d = ISO8601() req.write( '<header><identifier>%s</identifier><datestamp>%sZ</datestamp>%s\n</header>\n' % (mkIdentifier(n.id), d, getSetSpecsForNode(n))) if tokenstring: req.write(tokenstring) req.write('</ListIdentifiers>') if DEBUG: timetable_update(req, "leaving ListIdentifiers")
def archive_thread(self): if not time: return while True: time.sleep(int(config.get("archive.interval", 60))) archive_nodes_3 = db.getNodeIdByAttribute("archive_state", "3") archive_nodes_2 = [] date_now = format_date(now(), "yyymmddhhmmss") for manager in self.manager: # search for nodes to archive after access over period (state 2) for n in db.getNodeIdByAttribute("archive_state", "2"): try: node = tree.getNode(n) if node.get("archive_date"): date_archive = format_date(parse_date(node.get("archive_date"), "%Y-%m-%dT%H:%M:%S"), "yyymmddhhmmss") if date_now >= date_archive: archive_nodes_2.append(long(node.id)) except: pass # union to get all nodes with state 3 and 2 with over period archive_nodes = union((archive_nodes_3, archive_nodes_2)) nodes = intersection((db.getNodeIdByAttribute("archive_type", str(manager)), archive_nodes)) # run action defined in manager try: self.manager[manager].actionArchive(nodes) except: pass
def getFormatedValue(self, field, node, language=None, html=1): value = node.get(field.getName()) if not value or value == "0000-00-00T00:00:00": # dummy for unknown return (field.getLabel(), "") else: try: d = parse_date(value) except ValueError: return (field.getLabel(), value) value = format_date(d, format=field.getValues()) value_list = [] if re.search(r'\d{2}\W\d{2}\W', value): day_month = re.sub(r'00\W', '', re.search(r'\d{2}\W\d{2}\W', value).group()) value_list.append(day_month) if re.search(r'\d{4}\W\d{2}', value): year_month = re.sub(r'\W00', '', re.search(r'\d{4}-\d{2}', value).group()) value_list.append(year_month) elif re.search(r'\d{4}', value): value_list.append(re.search(r'\d{4}', value).group()) return (field.getLabel(), ''.join(value_list))
def getFormattedValue(self, metafield, maskitem, mask, node, language, html=True): ''' search with re if string could be a date appends this to a list and returns this :param metafield: metadatafield :param node: node with fields :return: formatted value ''' value = node.get(metafield.getName()) if not value or value == "0000-00-00T00:00:00": # dummy for unknown return (metafield.getLabel(), u"") else: try: d = parse_date(value) except ValueError: return (metafield.getLabel(), value) value = format_date(d, format=metafield.getValues()) value_list = [] if re.search(r'\d{2}\W\d{2}\W', value): day_month = re.sub(r'00\W', '', re.search(r'\d{2}\W\d{2}\W', value).group()) value_list.append(day_month) if re.search(r'\d{4}\W\d{2}', value): year_month = re.sub(r'\W00', '', re.search(r'\d{4}-\d{2}', value).group()) value_list.append(year_month) elif re.search(r'\d{4}', value): value_list.append(re.search(r'\d{4}', value).group()) return (metafield.getLabel(), ''.join(value_list))
def _get_datetime_from_iso_8601(datestring): try: return datetime.strptime(datestring, "%Y-%m-%dT%H:%M:%S") except (TypeError, ValueError): try: return parse_date(datestring, "%Y-%m-%dT%H:%M:%S") except: return datetime(year=1970, month=1, day=1)
def execute(self, attrs=None): if self.op and self.op != "=": if self.op == ">=": v1 = parse_date(self.value).daynum() v2 = 2147483648 elif self.op == ">": v1 = parse_date(self.value).daynum() + 1 v2 = 2147483648 elif self.op == "<": v1 = 0 v2 = parse_date(self.value).daynum() + 1 elif self.op == "<=": v1 = 0 v2 = parse_date(self.value).daynum() return numquery(self.field, v1, v2) else: return query(self.field, self.value)
def writeRecord(req, node, metadataformat): if not SET_LIST: initSetList(req) updatetime = node.get(DATEFIELD) if updatetime: d = ISO8601(date.parse_date(updatetime)) else: d = ISO8601(date.DateTime(EARLIEST_YEAR - 1, 12, 31, 23, 59, 59)) set_specs = getSetSpecsForNode(node) if DEBUG: timetable_update(req, " in writeRecord: getSetSpecsForNode: node: '%s, %s', metadataformat='%s' set_specs:%s" % (str(node.id), node.type, metadataformat, str(set_specs))) req.write(""" <record> <header><identifier>%s</identifier> <datestamp>%sZ</datestamp> %s </header> <metadata>""" % (mkIdentifier(node.id), d, set_specs)) if DEBUG: timetable_update(req, " in writeRecord: writing header: node.id='%s', metadataformat='%s'" % (str(node.id), metadataformat)) if metadataformat == "mediatum": req.write(core.xmlnode.getSingleNodeXML(node)) # in [masknode.name for masknode in getMetaType(node.getSchema()).getMasks() if masknode.get('masktype')=='exportmask']: elif nodeHasOAIExportMask(node, metadataformat.lower()): mask = getMetaType(node.getSchema()).getMask("oai_" + metadataformat.lower()) if DEBUG: timetable_update( req, """ in writeRecord: mask = getMetaType(node.getSchema()).getMask("oai_"+metadataformat.lower()): node.id='%s', metadataformat='%s'""" % (str( node.id), metadataformat)) try: req.write(fixXMLString(mask.getViewHTML([node], flags=8))) # fix xml errors except: req.write(mask.getViewHTML([node], flags=8)) if DEBUG: timetable_update( req, " in writeRecord: req.write(mask.getViewHTML([node], flags=8)): node.id='%s', metadataformat='%s'" % (str( node.id), metadataformat)) else: req.write('<recordHasNoXMLRepresentation/>') req.write('</metadata></record>') if DEBUG: timetable_update(req, "leaving writeRecord: node.id='%s', metadataformat='%s'" % (str(node.id), metadataformat))
def listdir(self, path, long=0): olddir = self.dir oldnode = self.node upload_dir = self.dir[0] if path: self.cwd(path) l = [] # convert nodefiles to nodes for nodefile in self.dir[-1].getFiles(): if not nodefile.getType().startswith( 'tile') and "ftp_" in nodefile.retrieveFile( ) and os.path.exists(nodefile.retrieveFile()): file_to_node(nodefile, upload_dir) # display folders and nodes for node in self.dir[-1].getChildren(): if node.getName().strip() != "" and node.isContainer(): nodedate = node.get("creationtime") if nodedate: t = parse_date(nodedate) l += [ "drwxrwxrwx 1 1001 100 4096 %d %d %d %s" % (t.month, t.day, t.year, node.getName()) ] else: l += [ "drwxrwxrwx 1 1001 100 4096 Jan 10 2008 %s" % node.getName() ] else: l += [ "-rw-rw-rw- 1 1001 100 0 %d %d %d (%s) %s" % (1, 1, 2000, node.id, node.getName()) ] # display any unconverted files for nodefile in self.dir[-1].getFiles(): if not nodefile.getType().startswith( 'tile') and "ftp_" in nodefile.retrieveFile( ) and os.path.exists(nodefile.retrieveFile()): t = os.stat( nodefile.retrieveFile())[8] # last modification date l += [ "-rw-rw-rw- 1 1001 100 %8d %d %d %d %s" % (nodefile.getSize(), time.localtime(t)[1], time.localtime(t)[2], time.localtime(t)[0], nodefile.getName()) ] self.dir = olddir self.node = oldnode return athana.list_producer(l)
def runAction(self, node, op=""): """ The actual proccessing of the node object takes place here. Read out the values of attrname and accesstype if any. Generate the ACL-rule, and save it. """ l_date = node.get(self.get('attrname')) if l_date: if date.validateDateString(l_date): try: node.set('updatetime', date.format_date(date.parse_date(l_date))) formated_date = date.format_date(date.parse_date(l_date), "dd.mm.yyyy") for item in self.get('accesstype').split(';'): node.setAccess(item, "{date >= %s}" % formated_date) node.getLocalRead() if self.get( 'recipient' ): # if the recipient-email was entered, create a scheduler attr_dict = { 'single_trigger': l_date, 'function': "test_sendmail01", 'nodelist': list(node.id), 'attr_recipient': self.get('recipient'), 'attr_subject': "{} ID: {}".format(self.get('subject'), node.id), 'attr_body': self.get('body') } schedules.create_schedule("WorkflowStep_Defer", attr_dict) except ValueError as e: print "Error: %s" % e
def show_workflow_step(self, req): access = acl.AccessData(req) if not access.hasWriteAccess(self): return '<i>' + t(lang(req), "permission_denied") + '</i>' c = [] for item in self.getChildren(): c.append({"id": str(item.id), "creationtime": date.format_date( date.parse_date(item.get('creationtime')), 'dd.mm.yyyy HH:MM:SS'), "name": item.getName()}) c.sort(lambda x, y: cmp(x['name'], y['name'])) return req.getTAL("workflow/workflow.html", {"children": c, "workflow": self.getParents()[ 0], "step": self, "nodelink": "/mask?id=" + self.id + "&obj=", 'currentlang': lang(req)}, macro="workflow_show")
def backend_plot_sensor(device: Optional[str] = None, start_date: Optional[str] = None, end_date: Optional[str] = None, sensor: str = "temperature", sample_rate: str = "AUTO"): logging.warning(f"{start_date} - {end_date}") if not end_date or not start_date: start_date = date.today() - timedelta(days=1) end_date = date.today() - timedelta(days=1) else: from utils.date import parse_date start_date = parse_date(start_date) end_date = parse_date(end_date) if not device: device = "PTL_RD_AT_001" all_sensors = [ "temperature", "humidity", "pressure", "brightness", "gas", "presence" ] if sensor == "ALL": active_sensors = all_sensors else: active_sensors = [sensor] ajax = PlotSensors( plot_parameters={ 'start_date': start_date, 'end_date': end_date, 'device': device, 'sensors': active_sensors, 'sample_rate': sample_rate }) data = ajax.fetch_data() if data.empty: return dict() plot = ajax._plot(data) from ajax_plots import reactify_bokeh return reactify_bokeh(plot)
def getFormatedValue(self, field, node, language=None, html=1): value = node.get(field.getName()) if not value or value == "0000-00-00T00:00:00": # dummy for unknown return (field.getLabel(), "") else: try: d = parse_date(value) except ValueError: return (field.getLabel(), value) value = format_date(d, format=field.getValues()) return (field.getLabel(), value)
def format_request_value_for_db(self, field, params, item, language=None): value = params.get(item) f = field.getSystemFormat(str(field.getValues())) if not f: return "" try: d = parse_date(str(value), f.getValue()) except ValueError: return "" if not validateDate(d): return "" return format_date(d, format='%Y-%m-%dT%H:%M:%S')
def get_iptc_values(file_path, tags=None): """ get the IPTC tags/values from a given image file :rtype : object :param file_path: path to the image file :param tags: dictionary with wanted iptc tags :return: dictionary with tag/value """ if not isinstance(tags, dict): return ret = {} if not os.path.exists(file_path): logger.info('Could not read IPTC metadata from non existing file.') return {} if os.path.basename(file_path).startswith('-'): logger.error('Will not read IPTC metadata to files starting with a hyphen, caused by exiftool security issues.') return {} with exiftool.ExifTool() as et: for tag in tags.keys(): if not et.get_tag_batch(tag.split('iptc_')[-1], [file_path])[0] is None: ret[tag.split('iptc_')[-1]] = et.get_tag_batch(tag.split('iptc_')[-1], [file_path])[0] if tag.split('iptc_')[-1] == 'DateCreated': if 'DateCreated' in ret.keys(): if validateDate(parse_date(ret['DateCreated'], format='%Y:%m:%d')): ret['DateCreated'] = format_date(parse_date(ret['DateCreated'], format='%Y:%m:%d')) else: logger.error('Could not validate: {}.'.format(ret['DateCreated'])) logger.info('{} read from file.'.format(ret)) return ret
def runAction(self, node, op=""): """ The actual proccessing of the node object takes place here. Read out the values of attrname and accesstype if any. Generate the ACL-rule, and save it. """ l_date = node.get(self.get('attrname')) if l_date: if date.validateDateString(l_date): try: node.set('updatetime', date.format_date(date.parse_date(l_date))) formated_date = date.format_date(date.parse_date(l_date), "dd.mm.yyyy") d = formated_date.split('.') rule = get_or_add_defer_daterange_rule(int(d[2]), int(d[1]), int(d[0])) for access_type in self.get('accesstype').split(';'): special_access_ruleset = node.get_or_add_special_access_ruleset(ruletype=access_type) special_access_ruleset.rule_assocs.append(AccessRulesetToRule(rule=rule)) db.session.commit() except ValueError: logg.exception("exception in workflow step defer, runAction failed")
def createUrn(node, namespace, urn_type): """ @param node for which the URN should be created @param namespace of the urn; list of the namespaces can be found here: http://www.iana.org/assignments/urn-namespaces/urn-namespaces.xml @param urn_type e.q. diss, epub, etc """ if node.get('urn') and (node.get('urn').strip() != ''): # keep the existing urn, if there is one logging.getLogger('everything').info('urn already exists for node %s' % node.id) else: try: d = date.parse_date(node.get('date-accepted')) except: d = date.now() niss = '%s-%s-%s-0' % (urn_type, date.format_date(d, '%Y%m%d'), node.id) node.set('urn', urn.buildNBN(namespace, config.get('urn.institutionid'), niss))
def test_create_update(session, req, guest_user, some_user, enable_athana_continuum_plugin): session.commit() req.app_cache = {} req.session["user_id"] = some_user.id node = DocumentFactory() session.add(node) node["testattr"] = "new" session.commit() req.app_cache = {} # well, guest users shouldn't update nodes, but it's ok for a test ;) req.session["user_id"] = guest_user.id node["testattr"] = "changed" session.commit() assert node.creator == some_user.getName() assert node.updateuser == guest_user.getName() assert node.creationtime <= node.updatetime assert parse_date(node.updatetime)
def show_workflow_step(self, req): if not self.has_write_access(): return '<i>' + t(lang(req), "permission_denied") + '</i>' c = [] display_name_attr = self.parents[0].display_name_attribute i = 0 for item in self.children: c.append({"id": unicode(item.id), "creationtime": date.format_date( date.parse_date(item.get('creationtime')), 'dd.mm.yyyy HH:MM:SS')}) if display_name_attr: c[i]["name"] = item.get(display_name_attr) else: c[i]["name"] = item.name i += 1 c.sort(lambda x, y: cmp(x['name'], y['name'])) return req.getTAL("workflow/workflow.html", {"children": c, "workflow": self.parents[ 0], "step": self, "nodelink": "/mask?id={}&obj=".format(self.id), 'currentlang': lang(req), "csrf": req.csrf_token.current_token}, macro="workflow_show")
async def search(self, ctx, *, name:str): data = await self.kitsu.get_anime(name) if len(data) == 0: return await ctx.send(':no_entry_sign: Not found.') anime = AttrDict(data[0]) attrs = AttrDict(anime.attributes) embed = discord.Embed(color=randomness.random_colour()) embed.title = f'{attrs.titles["ja_jp"]} ({attrs.titles["en_jp"]})' if not attrs.nsfw: embed.set_thumbnail(url=attrs.posterImage['medium']) embed.description = attrs.synopsis embed.url = f'https://kitsu.io/anime/{attrs.slug}' embed.set_footer(text='Powered by kitsu.io') embed.add_field(name='Started On', value=date.parse_date(attrs.startDate)) embed.add_field(name='Ended On', value='Ongoing' if attrs.status != 'finished' else date.parse_date(attrs.endDate)) await ctx.send(embed=embed)
def getEditorHTML(self, field, value="", width=400, lock=0, language=None, required=None): d = field.getSystemFormat(field.getValues()) if value == "?": value = date.format_date(date.now(), d.getValue()) with suppress(Exception, warn=False): value = date.format_date(date.parse_date(value), d.getValue()) return tal.getTAL("metadata/date.html", {"lock": lock, "value": value, "width": width, "name": field.getName(), "field": field, "pattern": self.get_input_pattern(field), "title": self.get_input_title(field), "placeholder": self.get_input_placeholder(field), "required": self.is_required(required)}, macro="editorfield", language=language)
def listdir(self, path, long=0): olddir = self.dir oldnode = self.node upload_dir = self.dir[0] if path: self.cwd(path) l = [] # convert nodefiles to nodes for nodefile in self.dir[-1].getFiles(): if not nodefile.getType().startswith('tile') and "ftp_" in nodefile.retrieveFile() and os.path.exists(nodefile.retrieveFile()): file_to_node(nodefile, upload_dir) # display folders and nodes for node in self.dir[-1].getChildren(): if node.getName().strip() != "" and node.isContainer(): nodedate = node.get("creationtime") if nodedate: t = parse_date(nodedate) l += ["drwxrwxrwx 1 1001 100 4096 %d %d %d %s" % (t.month, t.day, t.year, node.getName())] else: l += ["drwxrwxrwx 1 1001 100 4096 Jan 10 2008 %s" % node.getName()] else: l += ["-rw-rw-rw- 1 1001 100 0 %d %d %d (%s) %s" % (1, 1, 2000, node.id, node.getName())] # display any unconverted files for nodefile in self.dir[-1].getFiles(): if not nodefile.getType().startswith('tile') and "ftp_" in nodefile.retrieveFile() and os.path.exists(nodefile.retrieveFile()): t = os.stat(nodefile.retrieveFile())[8] # last modification date l += ["-rw-rw-rw- 1 1001 100 %8d %d %d %d %s" % (nodefile.getSize(), time.localtime(t)[1], time.localtime(t)[2], time.localtime(t)[0], nodefile.getName())] self.dir = olddir self.node = oldnode return athana.list_producer(l)
def createUrn(node, namespace, urn_type): """ @param node for which the URN should be created @param namespace of the urn; list of the namespaces can be found here: http://www.iana.org/assignments/urn-namespaces/urn-namespaces.xml @param urn_type e.q. diss, epub, etc """ if node.get('urn') and (node.get('urn').strip() != ''): # keep the existing urn, if there is one logging.getLogger('everything').info('urn already exists for node %s' % node.id) else: try: d = date.parse_date(node.get('date-accepted')) except: d = date.now() niss = '%s-%s-%s-0' % (urn_type, date.format_date(d, '%Y%m%d'), node.id) node.set( 'urn', urn.buildNBN(namespace, config.get('urn.institutionid'), niss))
def getAccessRights(node): """ Get acccess rights for the public. The values returned descend from http://wiki.surffoundation.nl/display/standards/info-eu-repo/#info-eu-repo-AccessRights. This values are used by OpenAIRE portal. """ try: # if node.get('updatetime') is empty, the method parse_date would raise an exception l_date = parse_date(node.get('updatetime')) except: l_date = date.now() guest_user = get_guest_user() if date.now() < l_date: return "embargoedAccess" elif node.has_read_access(user=guest_user): if node.has_data_access(user=guest_user): return "openAccess" else: return "restrictedAccess" else: return "closedAccess"
def getEditorHTML(self, field, value="", width=400, lock=0, language=None, required=None): d = field.getSystemFormat(str(field.getValues())) if value == "?": value = date.format_date(date.now(), d.getValue()) try: value = date.format_date(date.parse_date(value), d.getValue()) except: pass return tal.getTAL("metadata/date.html", {"lock": lock, "value": value, "width": width, "name": field.getName(), "field": field, "pattern": self.get_input_pattern(field), "title": self.get_input_title(field), "placeholder": self.get_input_placeholder(field), "required": self.is_required(required)}, macro="editorfield", language=language)
def xml_start_element(self, name, attrs): if name == "access": self.access = StatAccess() self.access.id = self.currentnodeid for key in attrs: if key == "date": self.access.date = attrs[key] elif key == "time": self.access.time = attrs[key] elif key == "visitor_number": self.access.visitor = attrs[key] elif key == "country": self.access.country = attrs[key].encode("utf-8") elif name == "nodelist": if "created" in attrs.keys(): self.created = parse_date(attrs["created"], "yyyy-mm-dd HH:MM:SS") elif name == "node": if "id" in attrs.keys(): self.currentnodeid = attrs["id"]
def ListIdentifiers(req): if not SET_LIST: initSetList(req) nodes, tokenstring, metadataformat = getNodes(req) if nodes is None: return writeError(req, tokenstring) if not len(nodes): return writeError(req, 'noRecordsMatch') req.write('<ListIdentifiers>') for n in nodes: updatetime = n.get(DATEFIELD) if updatetime: d = ISO8601(date.parse_date(updatetime)) else: d = ISO8601() req.write('<header><identifier>%s</identifier><datestamp>%sZ</datestamp>%s\n</header>\n' % (mkIdentifier(n.id), d, getSetSpecsForNode(n))) if tokenstring: req.write(tokenstring) req.write('</ListIdentifiers>') if DEBUG: timetable_update(req, "leaving ListIdentifiers")