def __init__(self, obj): super(DefaultTags, self).__init__(obj) self._tags["encodingtime"] = int(datetime.now(tz=timezone(DFTTIMEZONE)).timestamp()) self._tags["encodingyear"] = datetime.now(tz=timezone(DFTTIMEZONE)).strftime("%Y") self._tags["taggingtime"] = format_date(datetime.now(tz=timezone(DFTTIMEZONE)), template=TEMPLATE3) for tag in ["copyright", "description", "mediaprovider", "profile", "purchasedate"]: with suppress(KeyError): del self._tags[tag]
def rippeddiscs(collection): """ :param collection: :return: """ RippedDisc = namedtuple("RippedDisc", "rowid ripped readable_year readable_month artistsort albumsort origyear tear genre album upc disc tracks bootleg") root = ET.Element("rippeddiscs") se = ET.SubElement(root, "created") se.text = now() collection = iter(collection) for key, group in collection: group = list(group) se = ET.SubElement(root, "key", attrib={"label": format_date(date(int(key[:4]), int(key[-2:]), 1), template="$month $Y"), "count": str(len(group))}) for row in map(RippedDisc._make, group): sse = ET.SubElement(se, "disc", attrib={"uid": str(row.rowid)}) # ArtistSort. element = ET.SubElement(sse, "artistsort") element.text = row.artistsort # AlbumSort. element = ET.SubElement(sse, "albumsort") element.text = row.albumsort # Genre. element = ET.SubElement(sse, "genre") element.text = row.genre # UPC. element = ET.SubElement(sse, "upc") element.text = str(row.upc) # Album. element = ET.SubElement(sse, "album") element.text = row.album # Disc ID. element = ET.SubElement(sse, "discid") element.text = str(row.disc) # Origyear. element = ET.SubElement(sse, "origyear") element.text = str(row.origyear) # Year. element = ET.SubElement(sse, "year") element.text = str(row.year) # Ripped date. element = ET.SubElement(sse, "ripped", attrib=dict(ts=str(row.readable_date[1]))) element.text = row.readable_date[0] return root
def __init__(self, metadata): super(RemoveMetaData, self).__init__(metadata) self["encodingtime"] = int( datetime.now(tz=timezone(DFTTIMEZONE)).timestamp()) self["encodingyear"] = datetime.now( tz=timezone(DFTTIMEZONE)).strftime("%Y") self["taggingtime"] = format_date( datetime.now(tz=timezone(DFTTIMEZONE)), template=TEMPLATE3) for tag in [ "copyright", "description", "mediaprovider", "profile", "purchasedate" ]: with suppress(KeyError): del self[tag]
# ================ # Initializations. # ================ level, pp, arguments = 1, PrettyPrinter(indent=4, width=160), parser.parse_args() # =============== # Main algorithm. # =============== # 1. Ouverture de la connexion à la base de données. conn = sqlite3.connect(arguments.db) # 2. Restitution des tables. r = [ format_date(UTC.localize(datetime.utcnow()).astimezone(LOCAL), template=TEMPLATE2), list( conn.execute("SELECT * FROM {table} ORDER BY rowid".format( table=arguments.table))) ] if arguments.print: pp.pprint(r) with open(OUTFILE, mode=WRITE) as fp: json.dump(r, fp, indent=4, ensure_ascii=False) level = 0 # 3. Fermeture de la connexion à la base de données. conn.close() sys.exit(level)
def __init__(self, obj): super(EncodedFromLegalDSDFile, self).__init__(obj) self._tags["encodedby"] = "dBpoweramp Batch Converter on {0} from original nugs.net DSD file".format(format_date(datetime.now(tz=timezone(DFTTIMEZONE)), template=TEMPLATE3))
arguments = parser.parse_args() # ================ # Local functions. # ================ rjustify = partial(rjustify_index, width=4) # ======================= # Templating environment. # ======================= template = TemplatingEnvironment(_MYPARENT / "Templates") template.set_environment(filters={"rjustify": rjustify}) # ============ # Main script. # ============ stream = csv.reader(arguments.collection, CustomDialect()) collection = [(Path(image), format_date(LOCAL.localize(parse(datetime.replace(":", '-', 2))), template=TEMPLATE2), int(parse(datetime.replace(":", '-', 2)).timestamp())) for image, datetime in stream] # type: Any collection = sorted(collection, key=itemgetter(2)) collection = sorted(collection, key=itemgetter_(0)(attrgetter("parent"))) collection = enumerate(collection, start=1) collection = [((index, ), tuple(compress(file, [1, 1, 0]))) for index, file in collection] collection = [tuple(chain.from_iterable(item)) for item in collection] print(template.get_template("T01").render(collection=collection))
parser = argparse.ArgumentParser(parents=[database_parser]) parser.add_argument("--table", default="sqlite_master") parser.add_argument("--print", action="store_true") # ========== # Constants. # ========== OUTFILE = os.path.join(os.path.expandvars("%TEMP%"), "sqlite_master.json") # ================ # Initializations. # ================ pp, arguments = PrettyPrinter(indent=4, width=160), parser.parse_args() # =============== # Main algorithm. # =============== # 1. Ouverture de la connexion à la base de données. conn = sqlite3.connect(arguments.db) # 2. Restitution des tables. r = [format_date(UTC.localize(datetime.utcnow()).astimezone(LOCAL), template=TEMPLATE2), list(conn.execute("SELECT * FROM {table} ORDER BY rowid".format(table=arguments.table)))] if arguments.print: pp.pprint(r) with open(OUTFILE, mode=WRITE) as fp: json.dump(r, fp, indent=4, ensure_ascii=False) # 3. Fermeture de la connexion à la base de données. conn.close()
def __init__(self, metadata): super(EncodedFromNugsDSDFile, self).__init__(metadata) self[ "encodedby"] = "dBpoweramp Batch Converter on {0} from original nugs.net DSD file".format( format_date(datetime.now(tz=timezone(DFTTIMEZONE)), template=TEMPLATE3))
def __init__(self, mandatory=True): self._mandatory = mandatory self._default = format_date(LOCAL.localize(datetime.datetime.utcnow()), template="$Y") self._data = WeakKeyDictionary()