def push(): entry = get_entry_by_id(self.options.entry_id) if entry.metadata is None: entry.metadata = metadata else: entry.metadata.merge(metadata) update_entry_by_id(entry.id, entry)
def push(): entry = get_entry_by_id(step.options.entry_id) for file_ref in filerefs: new_file_ref = FileReference( purpose=file_ref.purpose, version=file_ref.version, reference=file_ref.reference, mime_type=file_ref.mime_type, ) entry.files.append(new_file_ref) update_entry_by_id(entry.id, entry)
def push(): entry = get_entry_by_id(self.options.entry_id) new_file_ref = FileReference( purpose=file_ref.purpose, version=file_ref.version, reference=self.reference, mime_type=file_ref.mime_type, ) new_file = File( reference=self.reference, url=self.system.main_storage.get_file_url(main_path), mime_type=self.source.mime_type, status=FileStatus.managed, ) create_file(new_file) entry.files.append(new_file_ref) update_entry_by_id(entry.id, entry)
def register_parts(self): raw = None original = None derivative = None entry = None source = None root_path = None for part in self.options.parts: url = part.get_url(self.system) root_path = root_path or part.root_path f = File(url=url, reference=url, status=FileStatus.new, mime_type=part.mime_type) try: f = create_file(f) except Conflict: f = get_file_by_url(url) if f.reference is not None: entry = next( iter(get_entries_by_reference(f.reference).entries), None) if f is None: logging.error('Bad file: %s', f.to_json()) if part.is_raw: raw = f source = part.source elif raw is None: original = f source = part.source else: derivative = f primary = raw or original or derivative if primary is None: logging.error('No valid file!\n%s', self.step.to_json()) return if entry is None: entry = Entry( type=EntryType.image, metadata=DefaultEntryMetadata( original_filename=os.path.basename(primary.url), source=source, ), ) entry = create_entry(entry) jobs = [] for f, p in ((raw, FilePurpose.raw), (original, FilePurpose.original), (derivative, FilePurpose.derivative)): if f is None: continue entry.files.append( FileReference( reference=f.reference, purpose=p, version=0, mime_type=f.mime_type, )) jobs.append( Job(steps=[ ToCut.AsStep( source_root_path=root_path, source_url=f.url, ), CalculateHash.AsStep(), ReadMetadata.AsStep( entry_id=entry.id, mime_type=f.mime_type, ), ToMain.AsStep( entry_id=entry.id, source_url=f.url, ), CreateProxy.AsStep( entry_id=entry.id, source_url=f.url, ), CleanCut.AsStep(), ])) update_entry_by_id(entry.id, entry) with QueueClient('ipc://job_queue') as q: for job in jobs: q.send(job)