I get a problem for my swift ui app the place I’m making an attempt to implement a video recording performance inside my app. I’m not positive why its not working. I get a black preview display every time the digital camera opens up.
Simply to be clear I’ve added the digital camera and microphone utilization description on my plist file. I’ve added the view and controller code right here. If anybody has handled this earlier than, I’d actually respect the assistance.
CameraViewMode.swift
// MARK: - Digicam View Mannequin
class CameraViewModel: NSObject, ObservableObject {
@Printed var isCameraAuthorized = false
@Printed var isRecording = false
@Printed var previewLayer: AVCaptureVideoPreviewLayer?
@Printed var capturedImage: UIImage?
@Printed var recordedVideoURL: URL?
@Printed var isPreviewingMedia = false
@Printed var isFlashOn = false
@Printed var isFrontCamera = false
@Printed var recordingProgress: CGFloat = 0
personal var captureSession: AVCaptureSession?
personal var videoOutput: AVCaptureMovieFileOutput?
personal var photoOutput: AVCapturePhotoOutput?
personal var recordingTimer: Timer?
personal let maxRecordingDuration: CGFloat = 10.0 // Most recording period in seconds
override init() {
tremendous.init()
checkPermissions()
}
personal func checkPermissions() {
change AVCaptureDevice.authorizationStatus(for: .video) {
case .licensed:
setupCamera()
case .notDetermined:
AVCaptureDevice.requestAccess(for: .video) { [weak self] granted in
if granted {
DispatchQueue.predominant.async {
self?.setupCamera()
}
}
}
default:
break
}
}
personal func setupCamera() {
DispatchQueue.international(qos: .userInitiated).async { [weak self] in
let session = AVCaptureSession()
// Configure the session for prime quality video
if session.canSetSessionPreset(.excessive) {
session.sessionPreset = .excessive
}
// Arrange video enter
guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera,
for: .video,
place: .again),
let videoInput = attempt? AVCaptureDeviceInput(machine: videoDevice) else {
return
}
if session.canAddInput(videoInput) {
session.addInput(videoInput)
}
// Arrange audio enter
if let audioDevice = AVCaptureDevice.default(for: .audio),
let audioInput = attempt? AVCaptureDeviceInput(machine: audioDevice),
session.canAddInput(audioInput) {
session.addInput(audioInput)
}
// Arrange picture output
let picture = AVCapturePhotoOutput()
if session.canAddOutput(picture) {
session.addOutput(picture)
self?.photoOutput = picture
}
// Arrange video output
let video = AVCaptureMovieFileOutput()
if session.canAddOutput(video) {
session.addOutput(video)
self?.videoOutput = video
}
// Create and setup preview layer
let previewLayer = AVCaptureVideoPreviewLayer(session: session)
previewLayer.videoGravity = .resizeAspectFill
if #obtainable(iOS 17.0, *) {
previewLayer.connection?.videoRotationAngle = 90
} else {
previewLayer.connection?.videoOrientation = .portrait
}
// Begin session
session.startRunning()
DispatchQueue.predominant.async {
self?.captureSession = session
self?.previewLayer = previewLayer
self?.isCameraAuthorized = true
}
}
}
func toggleCamera() {
guard let session = captureSession else { return }
session.beginConfiguration()
// Take away present enter
for enter in session.inputs {
session.removeInput(enter)
}
// Change digital camera place
let place: AVCaptureDevice.Place = isFrontCamera ? .again : .entrance
guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera,
for: .video,
place: place),
let videoInput = attempt? AVCaptureDeviceInput(machine: videoDevice) else {
return
}
if session.canAddInput(videoInput) {
session.addInput(videoInput)
}
// Re-add audio enter
if let audioDevice = AVCaptureDevice.default(for: .audio),
let audioInput = attempt? AVCaptureDeviceInput(machine: audioDevice) {
if session.canAddInput(audioInput) {
session.addInput(audioInput)
}
}
session.commitConfiguration()
isFrontCamera.toggle()
}
func toggleFlash() {
guard let machine = AVCaptureDevice.default(for: .video) else { return }
attempt? machine.lockForConfiguration()
if machine.hasTorch {
if machine.torchMode == .off {
attempt? machine.setTorchModeOn(stage: 1.0)
isFlashOn = true
} else {
machine.torchMode = .off
isFlashOn = false
}
}
machine.unlockForConfiguration()
}
func capturePhoto() {
guard let photoOutput = photoOutput else { return }
let settings = AVCapturePhotoSettings()
if isFlashOn {
settings.flashMode = .on
}
photoOutput.capturePhoto(with: settings, delegate: self)
}
func startRecording() {
guard let videoOutput = videoOutput else { return }
let tempURL = FileManager.default.temporaryDirectory.appendingPathComponent("(UUID().uuidString).mov")
videoOutput.startRecording(to: tempURL, recordingDelegate: self)
isRecording = true
// Begin progress timer
recordingProgress = 0
recordingTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { [weak self] _ in
guard let self = self else { return }
self.recordingProgress = min(self.recordingProgress + 0.1/self.maxRecordingDuration, 1.0)
if self.recordingProgress >= 1.0 {
self.stopRecording()
}
}
}
func stopRecording() {
videoOutput?.stopRecording()
recordingTimer?.invalidate()
recordingTimer = nil
isRecording = false
}
}
// MARK: - Photograph Seize Delegate
extension CameraViewModel: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto picture: AVCapturePhoto, error: Error?) {
guard let imageData = picture.fileDataRepresentation(),
let picture = UIImage(information: imageData) else {
return
}
DispatchQueue.predominant.async {
self.capturedImage = picture
self.isPreviewingMedia = true
}
}
}
// MARK: - Video Recording Delegate
extension CameraViewModel: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error == nil {
DispatchQueue.predominant.async {
self.recordedVideoURL = outputFileURL
self.isPreviewingMedia = true
}
}
}
}
// MARK: - Digicam Preview View
struct CameraPreviewView: UIViewRepresentable {
let previewLayer: AVCaptureVideoPreviewLayer
func makeUIView(context: Context) -> UIView {
let view = UIView()
view.backgroundColor = .black
view.layer.addSublayer(previewLayer)
return view
}
func updateUIView(_ uiView: UIView, context: Context) {
previewLayer.body = uiView.bounds
previewLayer.videoGravity = .resizeAspectFill
// Guarantee structure occurs on predominant thread
DispatchQueue.predominant.async {
uiView.layer.layoutIfNeeded()
}
}
}
// MARK: - Progress Bar View
struct RecordingProgressBar: View {
let progress: CGFloat
var physique: some View {
GeometryReader { geometry in
ZStack(alignment: .main) {
Rectangle()
.fill(Colour.white.opacity(0.3))
.body(peak: 4)
Rectangle()
.fill(Colour.pink)
.body(width: geometry.measurement.width * progress, peak: 4)
}
}
.body(peak: 4)
}
}
// MARK: - Media Preview View
struct MediaPreviewView: View {
let picture: UIImage?
let videoURL: URL?
@Binding var isShowing: Bool
var onSend: () -> Void
var physique: some View {
ZStack {
if let picture = picture {
Picture(uiImage: picture)
.resizable()
.aspectRatio(contentMode: .match)
} else if let url = videoURL {
VideoPlayer(url: url)
}
VStack {
HStack {
Button(motion: { isShowing = false }) {
Picture(systemName: "xmark")
.foregroundColor(.white)
.font(.title)
.padding()
}
Spacer()
}
Spacer()
Button(motion: onSend) {
Textual content("Ship")
.font(.headline)
.foregroundColor(.white)
.padding(.horizontal, 40)
.padding(.vertical, 12)
.background(Colour.blue)
.cornerRadius(25)
}
.padding(.backside, 40)
}
}
.edgesIgnoringSafeArea(.all)
.background(Colour.black)
}
}
// MARK: - Video Participant View
struct VideoPlayer: UIViewRepresentable {
let url: URL
func makeUIView(context: Context) -> UIView {
let view = UIView(body: .zero)
let participant = AVPlayer(url: url)
let playerLayer = AVPlayerLayer(participant: participant)
playerLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(playerLayer)
participant.play()
return view
}
func updateUIView(_ uiView: UIView, context: Context) {
if let playerLayer = uiView.layer.sublayers?.first as? AVPlayerLayer {
playerLayer.body = uiView.bounds
}
}
}
VideoRecordingView.swift
struct VideoRecordingView: View {
let coordinate: CLLocationCoordinate2D
@Surroundings(.dismiss) var dismiss
//@StateObject personal var videoRecorder = VideoRecorder()
@StateObject personal var viewModel = CameraViewModel()
@State personal var isRecording = false
@State personal var showErrorAlert = false
@State personal var errorMessage = ""
@State personal var isUploading = false
//@StateObject personal var viewModel = CameraViewModel()
var physique: some View {
GeometryReader { geometry in
ZStack {
// Digicam Preview
if let previewLayer = viewModel.previewLayer {
CameraPreviewView(previewLayer: previewLayer)
.body(width: geometry.measurement.width, peak: geometry.measurement.peak)
} else {
Colour.black
}
// Recording Progress Bar
if viewModel.isRecording {
RecordingProgressBar(progress: viewModel.recordingProgress)
.padding(.prime, 5)
.body(maxHeight: .infinity, alignment: .prime)
}
// Controls
VStack {
// Prime controls
HStack {
Button(motion: { viewModel.toggleFlash() }) {
Picture(systemName: viewModel.isFlashOn ? "bolt.fill" : "bolt.slash")
.foregroundColor(.white)
.font(.system(measurement: 20))
.padding()
}
Spacer()
Button(motion: { viewModel.toggleCamera() }) {
Picture(systemName: "digital camera.rotate")
.foregroundColor(.white)
.font(.system(measurement: 20))
.padding()
}
}
.padding(.prime, 44)
Spacer()
// Backside controls
HStack {
Spacer()
// Digicam button
Button(motion: {
if viewModel.isRecording {
viewModel.stopRecording()
} else {
viewModel.capturePhoto()
}
}) {
Circle()
.strokeBorder(Colour.white, lineWidth: 4)
.body(width: 80, peak: 80)
}
.simultaneousGesture(
LongPressGesture(minimumDuration: 0.5)
.onEnded { _ in
viewModel.startRecording()
}
)
Spacer()
}
.padding(.backside, 40)
}
// Media Preview
if viewModel.isPreviewingMedia {
MediaPreviewView(
picture: viewModel.capturedImage,
videoURL: viewModel.recordedVideoURL,
isShowing: $viewModel.isPreviewingMedia
) {
// Deal with sending media
print("Ship media")
}
}
}