可以通过以下代码示例将ARKit项目中的2D点转换为捕获图像的坐标系统:
// 获取当前帧的相机位置和方向
let currentFrame = sceneView.session.currentFrame
guard let imageBuffer = currentFrame?.capturedImage else { return }
let orientation = UIApplication.shared.statusBarOrientation
let imageOrientation = CGImagePropertyOrientation(orientation)
// 获取捕获图像的尺寸
let ciImage = CIImage(cvImageBuffer: imageBuffer)
let imageSize = ciImage.extent.size
// 将ARKit项目中的2D点转换为捕获图像的坐标系
let normalizedPoint = CGPoint(x: 2.0 * point.x / viewBounds.width - 1.0, y: 1.0 - 2.0 * point.y / viewBounds.height)
let imagePoint = CGPoint(x: normalizedPoint.x * imageSize.width, y: normalizedPoint.y * imageSize.height)
// 应用捕获图像的方向转换
let flippedPoint = CGPoint(x: imagePoint.x, y: imageSize.height - imagePoint.y)
let imageRect = CGRect(x: 0, y: 0, width: imageSize.width, height: imageSize.height)
let cgImage = CIContext().createCGImage(ciImage, from: imageRect)
let orientedImage = UIImage(cgImage: cgImage!, scale: 1.0, orientation: imageOrientation)
// 获取最终的3D点和转换后的2D点
guard let pointCloud = currentFrame?.rawFeaturePoints, let featurePoint = pointCloud.pointsNearest(to: imagePoint)?.simdWorldCoordinates else { return }
let worldPos = SCNVector3(featurePoint)
let screenPos = sceneView.projectPoint(worldPos)
首先,需要获取当前帧的相机位置和方向,以及捕获图像的方向和尺寸。然后,可以将ARKit项目中的2D点转换为捕获图像的坐标系,并应用捕获图像的方