def serialize_PartParameterData(fpart_parameter, part_parameter=None): if part_parameter is None: part_parameter = PartParameterData() part_parameter.name = fpart_parameter.name part_parameter.description = fpart_parameter.description if fpart_parameter.unit: part_parameter.unit = raise_on_error(find_unit( fpart_parameter.unit.id)) part_parameter.numeric = fpart_parameter.numeric if fpart_parameter.text_value: part_parameter.text_value = fpart_parameter.text_value if fpart_parameter.min_value: part_parameter.min_value = fpart_parameter.min_value if fpart_parameter.min_prefix: part_parameter.min_prefix = raise_on_error( find_unit_prefix(fpart_parameter.min_prefix.id)) if fpart_parameter.nom_value: part_parameter.nom_value = fpart_parameter.nom_value if fpart_parameter.nom_prefix: part_parameter.nom_prefix = raise_on_error( find_unit_prefix(fpart_parameter.nom_prefix.id)) if fpart_parameter.max_value: part_parameter.max_value = fpart_parameter.max_value if fpart_parameter.max_prefix: part_parameter.max_prefix = raise_on_error( find_unit_prefix(fpart_parameter.max_prefix.id)) return part_parameter
def serialize_Model(fmodel, model=None): if model is None: model = Model() model.id = fmodel.id serialize_ModelData(fmodel, model) if fmodel.category: model.category = raise_on_error(find_models_category(fmodel.category.id)) if fmodel.image: model.image = raise_on_error(find_upload_file(fmodel.image.id)) if fmodel.model: model.model = raise_on_error(find_upload_file(fmodel.model.id)) return model
def serialize_Footprint(ffootprint, footprint=None): if footprint is None: footprint = Footprint() footprint.id = ffootprint.id serialize_FootprintData(ffootprint, footprint) if ffootprint.category: footprint.category = raise_on_error( find_footprints_category(ffootprint.category.id)) if ffootprint.image: footprint.image = raise_on_error(find_upload_file(ffootprint.image.id)) if ffootprint.footprint: footprint.footprint = raise_on_error( find_upload_file(ffootprint.footprint.id)) return footprint
def serialize_Part(fpart, part=None, with_offers=True, with_parameters=True, with_childs=True, with_distributors=True, with_manufacturers=True, with_storages=True, with_attachements=True): if part is None: part = Part() part.id = fpart.id serialize_PartData(fpart, part, with_parameters) if fpart.category: part.category = raise_on_error(find_parts_category(fpart.category.id)) if fpart.footprint: part.footprint = raise_on_error(find_versioned_file( fpart.footprint.id)) if fpart.symbol: part.symbol = raise_on_error(find_versioned_file(fpart.symbol.id)) # extract childs if with_childs: part.childs = [] for fchild in fpart.childs.all(): part.childs.append( raise_on_error( find_part(fchild.id, with_offers=with_offers, with_parameters=with_parameters, with_childs=with_childs, with_distributors=with_distributors, with_manufacturers=with_manufacturers, with_storages=with_storages, with_attachements=with_attachements))) part.has_childs = (fpart.childs.count() > 0) if with_distributors: part.distributors = raise_on_error(find_part_distributors(fpart.id)) if with_manufacturers: part.manufacturers = raise_on_error(find_part_manufacturers(fpart.id)) if with_storages: part.storages = raise_on_error(find_part_storages(fpart.id)) if with_attachements: part.attachements = [] for fattachement in fpart.attachements.all(): file = raise_on_error(find_upload_file(fattachement.file.id)) attachement = PartAttachement() attachement.id = fattachement.file.id attachement.description = fattachement.description attachement.source_name = file.source_name attachement.storage_path = file.storage_path part.attachements.append(attachement) return part
def serialize_PartParameterDescription(fpart_parameter, part_parameter=None): if part_parameter is None: part_parameter = PartParameterDescription() part_parameter.name = fpart_parameter['name'] part_parameter.description = fpart_parameter['description'] if fpart_parameter['unit']: part_parameter.unit = raise_on_error(find_unit(fpart_parameter['unit'])) part_parameter.numeric = fpart_parameter['numeric'] return part_parameter
def serialize_Storage(fstorage, storage=None): if storage is None: storage = Storage() storage.id = fstorage.id serialize_StorageData(fstorage, storage) if fstorage.category: storage.category = raise_on_error( find_storages_category(fstorage.category.id)) return storage
def serialize_PartData(fpart, part=None, with_parameters=True): if part is None: part = PartData() part.name = fpart.name part.description = fpart.description part.comment = fpart.comment if fpart.octopart: part.octopart = fpart.octopart if fpart.octopart_uid: part.octopart_uid = fpart.octopart_uid if fpart.updated: part.updated = fpart.updated if fpart.id and with_parameters: part.parameters = raise_on_error(find_part_parameters(fpart.id)) return part
def deserialize_FootprintNew(footprint, ffootprint=None): ffootprint = deserialize_FootprintData(footprint, ffootprint) if footprint.category: try: ffootprint.category = api.models.FootprintCategory.objects.get( id=footprint.category.id) except: raise_on_error( Error(code=1000, message='Category %d does not exists' % footprint.category.id)) else: ffootprint.category = None if footprint.image: try: ffootprint.image = api.models.File.objects.get( id=footprint.image.id) except: raise_on_error( Error(code=1000, message='Image %d does not exists' % footprint.image.id)) else: ffootprint.image = None if footprint.footprint: try: ffootprint.footprint = api.models.File.objects.get( id=footprint.footprint.id) except: raise_on_error( Error(code=1000, message='Footprint %d does not exists' % footprint.footprint.id)) else: ffootprint.footprint = None return ffootprint
def deserialize_PartNew(part, fpart=None): fpart = deserialize_PartData(part, fpart) if part.category: try: fpart.category = api.models.PartCategory.objects.get( pk=part.category.id) except: raise_on_error( Error(code=1000, message='Category %d does not exists' % part.category.id)) else: fpart.category = None if part.footprint: try: fpart.footprint = api.models.VersionedFile.objects.get( pk=part.footprint.id) except: raise_on_error( Error(code=1000, message='Footprint %d does not exists' % part.footprint.id)) else: fpart.footprint = None if part.symbol: try: fpart.symbol = api.models.VersionedFile.objects.get( pk=part.symbol.id) except: raise_on_error( Error(code=1000, message='Symbol %d does not exists' % part.symbol.id)) else: fpart.symbol = None if not part.childs is None: fchilds = [] fchilds_check = [] for child in part.childs: try: fchild = api.models.Part.objects.get(pk=child.id) fchilds.append(fchild) fchilds_check.append(fchild) except: raise_on_error( Error(code=1000, message='Part %d does not exists' % part.id)) # recursive check while len(fchilds_check) > 0: fchild = fchilds_check.pop() if fchild.pk == part.id: raise_on_error( Error(code=1000, message='Part cannot be child of itself')) for fchild in fchild.childs.all(): fchilds_check.append(fchild) fpart.childs.set(fchilds) return fpart
def update_versioned_files(files, force=None): """ update_versioned_files Update a fileset :param files: File list to update :type files: list | bytes :rtype: List[VersionedFile] """ update_files = [] conflict_files = [] if connexion.request.is_json: files = [ VersionedFile.from_dict(d) for d in connexion.request.get_json() ] if force is None: force = False to_update = [] to_delete = [] # conflict_change # conflict_del # income_add # income_change # income_del # outgo_add # outgo_change # outgo_del has_conflicts = False # check if given files are allowed to commit for file in files: #print "*", file raise_on_error(update_file_state(file)) #print "**", file if force: if file.state == 'conflict_add' or file.state == 'outgo_add': file.state = 'income_add' to_update.append(file) elif file.state == 'conflict_change' or file.state == 'outgo_change': file.state = 'income_change' to_update.append(file) elif file.state == 'conflict_del': file.state = 'income_del' to_delete.append(file) elif file.state == 'outgo_del': file.state = 'income_add' to_update.append(file) elif file.state == 'income_add': to_update.append(file) elif file.state == 'income_change': to_update.append(file) elif file.state == 'income_del': to_delete.append(file) else: if file.state == 'conflict_change' or file.state == 'conflict_del' or file.state == 'conflict_add': has_conflicts = True conflict_files.append(file) if file.state == 'income_add': to_update.append(file) elif file.state == 'income_change': to_update.append(file) elif file.state == 'income_del': to_delete.append(file) if has_conflicts and force == False: # in case of a conflict return conflicted files return conflict_files, 403 storage = api.versioned_file_storage.VersionedFileStorage() for file in to_update: if file.id: ffile = api.models.VersionedFile.objects.filter( id=file.id).latest('id') else: ffile = api.models.VersionedFile.objects.filter( source_path=file.source_path).latest('id') file = serialize_VersionedFile(ffile, file) file.content = storage.get_file_content(file.id) file.id = ffile.id file.version = ffile.version update_files.append(file) for file in to_delete: file.storage_path = None update_files.append(file) #print "----", update_files return update_files
def commit_versioned_files(files, force=None): """ commit_versioned_files Commit a fileset :param files: File list to commit :type files: list | bytes :param force: Force commit :type force: bool :rtype: List[VersionedFileStatus] """ #print "===> commit_versioned_files----" commit_files = [] conflict_files = [] if connexion.request.is_json: files = [ VersionedFile.from_dict(d) for d in connexion.request.get_json() ] #print "---", files if force is None: force = False print "***", files to_add = [] to_change = [] to_delete = [] # conflict_change # conflict_del # income_add # income_change # income_del # outgo_add # outgo_change # outgo_del has_conflicts = False # check if given files are allowed to commit for file in files: raise_on_error(update_file_state(file)) if force == True: if file.state == 'outgo_add': to_add.append(file) elif file.state == 'outgo_change': to_change.append(file) elif file.state == 'outgo_del': to_delete.append(file) elif file.state == 'conflict_add': to_change.append(file) elif file.state == 'conflict_change': to_change.append(file) elif file.state == 'conflict_del': to_delete.append(file) elif file.state == 'income_add' or file.state == 'income_change' or file.state == 'income_del': to_change.append(file) else: if file.state == 'conflict_change' or file.state == 'conflict_del' or file.state == 'conflict_add': has_conflicts = True conflict_files.append(file) elif file.state == 'outgo_add': to_add.append(file) elif file.state == 'outgo_change': to_change.append(file) elif file.state == 'outgo_del': to_delete.append(file) if has_conflicts and force == False: # in case of a conflict return conflicted files return conflict_files, 403 storage = api.versioned_file_storage.VersionedFileStorage() for file in to_add: # add file to file storage if not file.content is None: commit_files.append(storage.add_file(file)) else: return [file], 403 for file in to_change: # modify file to file storage if not file.content is None: commit_files.append(storage.add_file(file)) else: return [file], 403 for file in to_delete: # delete file to file storage commit_files.append(storage.delete_file(file)) #print "****", commit_files #print "------------------------------------" return commit_files
def synchronize_versioned_files(files, root_path=None, category=None): """ synchronize_versioned_files Get synchronization status of a fileset :param files: File list to test synchronization :type files: list | bytes :param root_path: Path from which to synchronize :type root_path: str :param category: Category of files to see :type category: str :rtype: List[VersionedFile] """ print "===> synchronize_versioned_files----" print "*", files sync_files = [] if connexion.request.is_json: files = [ VersionedFile.from_dict(d) for d in connexion.request.get_json() ] exclude_id = [] exclude_path = [] # check given files for file in files: if file.source_path: #print "---", file raise_on_error(update_file_state(file)) #print "+++", file sync_files.append(file) if file.id: exclude_id.append(file.id) else: exclude_path.append(file.source_path) #print "*", exclude_id, exclude_path # check files not in list ffile_request = api.models.VersionedFile.objects # limit to category if category: ffile_request = ffile_request.filter(category=category) # limit to root_path if root_path: ffile_request = ffile_request.filter(source_path__startswith=root_path) # exclude files already given in input if len(exclude_id) > 0: ffile_request = ffile_request.filter(~Q(id__in=exclude_id)) if len(exclude_path) > 0: ffile_request = ffile_request.filter(~Q(source_path__in=exclude_path)) # exclude deleted files # ffile_request = ffile_request.filter(state=api.models.VersionedFileState.created) ffile_request = ffile_request.exclude( state=api.models.VersionedFileState.deleted) for ffile in ffile_request.all(): file = serialize_VersionedFile(ffile) file.id = ffile.id file.version = ffile.version #file.id = None #file.version = None file.state = 'income_add' sync_files.append(file) #print "%%%", sync_files #print "------------------------------------" return sync_files