+ case VFIO_IOMMU_MAP_DMA: {
+ vfio_iommu_spapr_tce_dma_map param;
+ struct iommu_table *tbl = container->tbl;
+ enum dma_data_direction direction;
+ unsigned long locked, lock_limit;
+
+ if (WARN_ON(!tbl))
+ return -ENXIO;
+
+ minsz = offsetofend(vfio_iommu_spapr_tce_dma_map, size);
+
+ if (copy_from_user(¶m, (void __user *)arg, minsz))
+ return -EFAULT;
+
+ if (param.argsz < minsz)
+ return -EINVAL;
+
+ if ((param.flags & VFIO_DMA_MAP_FLAG_READ) &&
+ (param.flags & VFIO_DMA_MAP_FLAG_WRITE))
+ direction = DMA_BIDIRECTIONAL;
+ else if (param.flags & VFIO_DMA_MAP_FLAG_READ)
+ direction = DMA_TO_DEVICE;
+ else if (param.flags & VFIO_DMA_MAP_FLAG_WRITE)
+ direction = DMA_FROM_DEVICE;
+ else
+ return -EINVAL;
flags needs to be sanitized too. Return EINVAL if any unknown bit is
set or else sloppy users may make it very difficult to make use of those
flag bits later.