I'm building an app where I have my camera preview playing in a component, with the following code :
import SwiftUI
import AVFoundation
import AVKit
struct CameraPreview: UIViewControllerRepresentable {
func makeUIViewController(context: Context) -> UIViewController {
let controller = CameraPreviewViewController()
return controller
}
func updateUIViewController(_ uiViewController: UIViewController, context: Context) { }
}
class CameraPreviewViewController: UIViewController {
var session: AVCaptureSession?
var previewLayer: AVCaptureVideoPreviewLayer?
var player: AVPlayer?
var playerLayer: AVPlayerLayer?
var pipController: AVPictureInPictureController?
override func viewDidLoad() {
super.viewDidLoad()
setupSession()
setupPreview()
startPIP()
}
func setupSession() {
session = AVCaptureSession()
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .front)
guard let device = deviceDiscoverySession.devices.first else { return }
guard let input = try? AVCaptureDeviceInput(device: device) else { return }
session?.addInput(input)
}
func setupPreview() {
previewLayer = AVCaptureVideoPreviewLayer(session: session!)
previewLayer?.videoGravity = AVLayerVideoGravity.resize
previewLayer?.frame = CGRect(x: 0, y: 0, width: 200, height: 200)
view.layer.insertSublayer(previewLayer!, at: 0)
player = AVPlayer()
playerLayer = AVPlayerLayer(player: player)
playerLayer?.frame = previewLayer!.frame
view.layer.addSublayer(playerLayer!)
session?.startRunning()
player?.play()
}
func startPIP() {
guard AVPictureInPictureController.isPictureInPictureSupported() else { return }
pipController = AVPictureInPictureController(playerLayer: playerLayer!)
pipController?.startPictureInPicture()
}
}
And I've been trying to launch this view in Picture in Picture mode with the Pipify package.
import SwiftUI
import Pipify
struct ContentView: View {
#State var isPresentedThree = false
var body: some View {
VStack {
HStack{
Image(systemName: "camera")
.imageScale(.large)
.foregroundColor(.accentColor)
Image(systemName: "person")
.imageScale(.large)
.foregroundColor(.accentColor)
}
Text("FLOATING CAMERA")
.padding(6.0)
Button("Launch PiP mode") { isPresentedThree.toggle() }
.pipify(isPresented: $isPresentedThree) {
CameraPreview()
.foregroundColor(.red)
.padding()
.onPipSkip { _ in }
.onPipPlayPause { _ in }
}
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
I'm new to SwiftUI so I'm quite confused, that code doesn't work for my CameraPreview but does work for text and such.
How could I fix this or do it another way ?
Thanks !
I expect the camera preview to display in Picture in Picture as soon as the user clicks the button "Launch PiP mode".
Related
I have a strange bug I can't resolve.
I'm trying to create an app where the user can interact with blend shapes instead of buttons.
I have my ContentView where I define an array of animals of type Card.
What the app do is that when the user blinks, a sheetView is shown. In this sheetView it's displayed a CardView with a random animal taken from the array. The user can play the sound of that specific animal with a button (for now).
The user can also close the sheetView opening his mouth.
I'll show my code
ContentView.swift
import SwiftUI
import AVKit
class ViewModel: ObservableObject {
#Published var changeAnimal = false
#Published var realMouthOpen = false
}
struct ContentView: View {
#ObservedObject var viewModel = ViewModel()
let animals: [Card] = [Card(image: "monkey", heading: "monkey", tag: 1, callSound: "Monkey", animal: .monkey), Card(image: "dog", heading: "dog", tag: 2, callSound: "dog", animal: .dog), Card(image: "chick", heading: "chick", tag: 3, callSound: "chick", animal: .chick)]
var body: some View {
ZStack {
SwiftUIViewController(viewModel: viewModel) // wrapper controller uikit
CardViewPreviewScroll(card: Card(image: "unknown", heading: "random animal", tag: 0, callSound: "", animal: .unknown))
.sheet(isPresented: $viewModel.changeAnimal) { // BUG: the view is changed 2 times
changeAnimal() // l'animale corrente, nel momento in cui cambia, la view viene aggiornata
}
}
}
//MARK: Functions
func changeAnimal() -> (CardView?) {
let animal = animals.randomElement()!
return (CardView(card: animal))
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
Card.swift
import Foundation
import SwiftUI
enum TypeAnimal: Identifiable {
case monkey, dog, chick, unknown
var id: Int {
hashValue
}
}
struct Card: Hashable{
var image: String
var heading: String
var id = UUID()
var tag: Int
var callSound: String
var animal: TypeAnimal
static let example = Card(image: "chick", heading: "chick", tag: 0, callSound: "chick", animal: .chick)
}
CardView.swift
import SwiftUI
import AVKit
struct CardView: View {
let card: Card
#State var audioPlayer: AVAudioPlayer!
var body: some View {
ZStack {
RoundedRectangle(cornerRadius: 30)
.fill(.green)
.shadow(radius: 3)
VStack {
Image(card.image)
.resizable()
.aspectRatio(contentMode: .fit)
VStack(alignment: .leading) {
Text(card.heading)
.font(.title)
.fontWeight(.black)
.foregroundColor(.primary)
.lineLimit(3)
HStack {
Spacer()
Button(action: {
self.audioPlayer.play()
}) {
Image(systemName: "play.circle.fill").resizable()
.frame(width: 50, height: 50)
.aspectRatio(contentMode: .fit)
}
Spacer()
Button(action: {
self.audioPlayer.pause()
}) {
Image(systemName: "pause.circle.fill").resizable()
.frame(width: 50, height: 50)
.aspectRatio(contentMode: .fit)
}
Spacer()
}
}
.layoutPriority(100)
.padding()
}
}
.onAppear {
let sound = Bundle.main.path(forResource: card.callSound, ofType: "mp3")
self.audioPlayer = try! AVAudioPlayer(contentsOf: URL(fileURLWithPath: sound!))
}
}
}
struct CardView_Previews: PreviewProvider {
static var previews: some View {
CardView(card: Card.example)
}
}
And this is my ViewController, wrapped in UIViewControllerRepresentable.
This ViewController conforms to the ARSessionDelegate protocol so that I can change the #Published property defined in my ObservableObject ViewModel and this means changing the $viewModel.changeAnimal binding in .sheet.
The fact is, as the CardView in the sheetView change whenever a blendshape of type browInnerUp is detected, I tried to catch this value only 1 time without finding a solution.
The best thing I managed to do is that, when the sheetView is displayed, an instance of CardView appears and this one is immediately replaced with another one.
I tried with DispatchQueue, timers and other things about treads in order to catch only a single value of the parameter of the blendshape in question but I failed
ViewController.swift
import UIKit
import SwiftUI
import ARKit
class ViewController: UIViewController, ARSessionDelegate {
var viewModel: ViewModel?
var session: ARSession!
override func viewDidLoad() {
super.viewDidLoad()
session = ARSession()
session.delegate = self
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
guard ARFaceTrackingConfiguration.isSupported else {print("IPhone X required"); return}
let configuration = ARFaceTrackingConfiguration()
session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
}
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
if let faceAnchor = anchors.first as? ARFaceAnchor {
update(withFaceAnchor: faceAnchor)
}
}
func update(withFaceAnchor faceAnchor: ARFaceAnchor) {
let bledShapes:[ARFaceAnchor.BlendShapeLocation:Any] = faceAnchor.blendShapes
guard let jawOpen = bledShapes[.jawOpen] as? Float else { return }
guard let browInnerUp = bledShapes[.browInnerUp] as? Float else { return }
if browInnerUp > 0.85 {
self.session.pause()
self.viewModel?.changeAnimal = true
// print("eyeBlinkLeft: \(eyeBlinkLeft)") // right eye
}
if jawOpen > 0.85 {
self.session.pause()
self.viewModel?.changeAnimal = false
// print("eyeBlinkRight: \(jawOpen)") // left eye
}
}
}
struct SwiftUIViewController: UIViewControllerRepresentable {
var viewModel: ViewModel
func makeUIViewController(context: Context) -> ViewController{
let controller = ViewController()
controller.viewModel = viewModel
return controller
}
func updateUIViewController(_ uiViewController: ViewController, context: Context) {
}
}
The question is: is there a way to fix this bug or I just messed up?
I want to make a button that when pressed will take a photo, after I take a photo I can directly convert the photo to base64 and will be used to post to the API
This is my code :)
import SwiftUI
import UIKit
struct ImagePicker: UIViewControllerRepresentable {
#Binding var selectedImage: UIImage
#Environment(\.presentationMode) private var presentationMode
var sourceType: UIImagePickerController.SourceType = .photoLibrary
func makeUIViewController(context: UIViewControllerRepresentableContext<ImagePicker>) -> UIImagePickerController {
let imagePicker = UIImagePickerController()
imagePicker.allowsEditing = false
imagePicker.sourceType = sourceType
imagePicker.delegate = context.coordinator
return imagePicker
}
func updateUIViewController(_ uiViewController: UIImagePickerController, context: Context) {
//leave alone for right now
}
final class Coordinator: NSObject, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
var parent: ImagePicker
init(_ parent: ImagePicker) {
self.parent = parent
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let image = info[UIImagePickerController.InfoKey.originalImage] as? UIImage {
parent.selectedImage = image
}
parent.presentationMode.wrappedValue.dismiss()
}
}
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
}
class ImageConverter {
func base64ToImage(_ base64String: String) -> UIImage? {
guard let imageData = Data(base64Encoded: base64String) else { return nil }
return UIImage(data: imageData)
}
func imageToBase64(_ image: UIImage) -> String? {
return image.jpegData(compressionQuality: 1)?.base64EncodedString()
}
}
this is some of my view, i want when i press the button it will direct me to the camera and when i take a photo it will convert it to base64 and save it in a string
import SwiftUI
struct ProfileView: View {
let imageManager = ImageConverter()
#State var changeProfileImage = false
#State var openCameraRoll = false
#State var imageSelected = UIImage()
var body: some View {
ZStack(alignment: .bottomTrailing) {
Button(action: {
changeProfileImage = true
openCameraRoll = true
}, label: {
if changeProfileImage {
Image(uiImage: imageSelected)
.profileImageMod()
} else {
Image("AddProfileImage")
.profileImageMod()
}
})
Image(systemName: "plus")
.frame(width: 30, height: 30)
.foregroundColor(.white)
.background(Color.gray)
.clipShape(Circle())
}.sheet(isPresented: $openCameraRoll) {
ImagePicker(selectedImage: $imageSelected, sourceType: .camera)
}.onAppear {
}
}
}
UPDATE this is my Imagepicker
import SwiftUI
import UIKit
struct ImagePicker: UIViewControllerRepresentable {
#Binding var selectedImage: UIImage
#Environment(\.presentationMode) private var presentationMode
var sourceType: UIImagePickerController.SourceType = .photoLibrary
func makeUIViewController(context: UIViewControllerRepresentableContext<ImagePicker>) -> UIImagePickerController {
let imagePicker = UIImagePickerController()
imagePicker.allowsEditing = false
imagePicker.sourceType = sourceType
imagePicker.delegate = context.coordinator
return imagePicker
}
func updateUIViewController(_ uiViewController: UIImagePickerController, context: Context) {
//leave alone for right now
}
final class Coordinator: NSObject, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
var parent: ImagePicker
init(_ parent: ImagePicker) {
self.parent = parent
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let image = info[UIImagePickerController.InfoKey.originalImage] as? UIImage {
parent.selectedImage = image
}
parent.presentationMode.wrappedValue.dismiss()
}
}
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
}
class ImageConverter {
func base64ToImage(_ base64String: String) -> UIImage? {
guard let imageData = Data(base64Encoded: base64String) else { return nil }
return UIImage(data: imageData)
}
func imageToBase64(_ image: UIImage) -> String? {
return image.jpegData(compressionQuality: 1)?.base64EncodedString()
}
}
you could use something like this approach, using the .onDismiss()
to then convert your imageSelected to base64 string using your ImageConverter.
In ProfileView,
.sheet(isPresented: $openCameraRoll, onDismiss: didDismiss) {
ImagePicker(selectedImage: $imageSelected, sourceType: .camera)
}
func didDismiss() {
let b64Str = imageManager.imageToBase64(imageSelected)
print("\n---> b64Str: \(b64Str?.count) \n")
}
Also remove the ZStack, or replace it by a VStack
EDIT-1: here some example code that works for me:
struct ProfileView: View {
let imageManager = ImageConverter()
#State var changeProfileImage = false
#State var openCameraRoll = false
#State var imageSelected = UIImage()
var body: some View {
VStack {
Button(action: {
openCameraRoll = true
}, label: {
Image(systemName: "plus")
// .profileImageMod()
})
if changeProfileImage {
Image(uiImage: imageSelected)
.resizable() // <-- here
.frame(width: 222, height: 222)
.foregroundColor(.white)
.background(Color.gray)
.clipShape(Circle())
} else {
Image(systemName: "questionmark")
.frame(width: 222, height: 222)
.foregroundColor(.white)
.background(Color.gray)
.clipShape(Circle())
}
}
.sheet(isPresented: $openCameraRoll, onDismiss: didDismiss) {
ImagePicker(selectedImage: $imageSelected, sourceType: .camera)
}
}
func didDismiss() {
changeProfileImage = true
let b64Str = imageManager.imageToBase64(imageSelected)
print("\n---> b64Str: \(b64Str?.count) \n")
}
}
I was trying to load short videos from a URL and I found the best way is to use AVPlayer, I found the code below but it didn't show the activity indicator also if the user doesn't have an internet it won't play the video. Thanks in advance.
import SwiftUI
import AVKit
struct ContentView: View {
#State var player = AVPlayer(url: URL(string: "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ElephantsDream.mp4")!)
#State var isplaying = false
#State var showcontrols = false
#State var value : Float = 0
var body: some View {
VStack{
ZStack{
VideoPlayer(player: $player)
if self.showcontrols{
Controls(player: self.$player, isplaying: self.$isplaying, pannel: self.$showcontrols,value: self.$value)
}
}
.frame(height: UIScreen.main.bounds.height / 3.5)
.onTapGesture {
self.showcontrols = true
}
GeometryReader{_ in
VStack{
Text("Custom Video Player").foregroundColor(.white)
}
}
}
.background(Color.black.edgesIgnoringSafeArea(.all))
.onAppear {
self.player.play()
self.isplaying = true
}
}
}
This is the main screen which is ContentView
struct Controls : View {
#Binding var player : AVPlayer
#Binding var isplaying : Bool
#Binding var pannel : Bool
#Binding var value : Float
var body : some View{
VStack{
Spacer()
HStack{
Button(action: {
if self.isplaying{
self.player.pause()
self.isplaying = false
}
else{
self.player.play()
self.isplaying = true
}
if self.player.currentItem?.duration.seconds == self.player.currentTime().seconds{
print("did it")
self.player.seek(to: CMTime.zero)
self.player.play()
self.isplaying = true
}
}) {
Image(systemName: self.isplaying ? "pause.fill" : "play.fill")
.font(.title)
.foregroundColor(.white)
.padding(20)
}
}
}.padding()
.onTapGesture {
self.pannel = false
}
}
func getSliderValue()->Float{
return Float(self.player.currentTime().seconds / (self.player.currentItem?.duration.seconds)!)
}
func getSeconds()->Double{
return Double(Double(self.value) * (self.player.currentItem?.duration.seconds)!)
}
}
The button for control the video.
class Host : UIHostingController<ContentView>{
override var preferredStatusBarStyle: UIStatusBarStyle{
return .lightContent
}
}
struct VideoPlayer : UIViewControllerRepresentable {
#Binding var player : AVPlayer
func makeUIViewController(context: UIViewControllerRepresentableContext<VideoPlayer>) -> AVPlayerViewController {
let controller = AVPlayerViewController()
controller.player = player
controller.showsPlaybackControls = false
controller.videoGravity = .resize
return controller
}
func updateUIViewController(_ uiViewController: AVPlayerViewController, context: UIViewControllerRepresentableContext<VideoPlayer>) {
}
}
I am trying to display two columns of images in a LazyVStack embedded in a scroll view however the the second row of images partially overlaps the row above. I'm not sure if this is an issue with the LazyVStack itself or an issue with the Photo.swift view.
The output looks like this
The two view files
ContentView.swift
struct ContentView: View {
#State private var image: Image?
#State private var showingCustomCamera = false
#State private var inputImage: UIImage?
#State private var photos: [UIImage] = []
func addImageToArray() {
guard let inputImage = inputImage else { return }
image = Image(uiImage: inputImage)
let ciImage = CIImage(cgImage: inputImage.cgImage!)
let options = [CIDetectorAccuracy: CIDetectorAccuracyHigh]
let faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: options)!
let faces = faceDetector.features(in: ciImage)
if let face = faces.first as? CIFaceFeature {
print("Found face at \(face.bounds)")
print(face.faceAngle)
print(face.hasSmile)
print(face.leftEyeClosed)
print(face.rightEyeClosed)
if face.leftEyeClosed {
print("Left Eye Closed \(face.leftEyePosition)")
}
if face.rightEyeClosed {
print("Right Eye Closed \(face.rightEyePosition)")
}
if face.hasSmile {
print("Person is smiling \(face.mouthPosition)")
}
}
photos.append(inputImage)
}
let columns = [
GridItem(.flexible(), spacing: 20),
GridItem(.flexible(), spacing: 20)
]
var body: some View {
NavigationView {
VStack{
ScrollView {
LazyVGrid(columns: columns, spacing: 20) {
AddPhoto(showCamera: $showingCustomCamera)
ForEach(photos, id: \.self) { photo in
PassportPhoto(img: photo)
}
}
.padding()
}
HStack {
Button(action: {
//
}, label: {
Image(systemName: "printer.fill.and.paper.fill")
Text("Print")
})
.padding()
.foregroundColor(.primary)
Button(action: {
//
}, label: {
Image(systemName: "externaldrive.fill.badge.icloud")
Text("Digital Upload")
})
.padding()
.foregroundColor(.primary)
}
}
.sheet(isPresented: $showingCustomCamera, onDismiss: addImageToArray) {
CustomCameraView(image: self.$inputImage)
}
.navigationTitle("Add Photos")
}
}
}
Photo.swift
struct Photo: View {
var img: UIImage
#State private var overlay: Bool = false
var body: some View {
GeometryReader { geometry in
VStack {
ZStack(alignment: .top) {
Image(uiImage: img)
.resizable()
.aspectRatio(contentMode: .fill)
.frame(width: geometry.size.width, height: geometry.size.width * 1.29, alignment: .top)
.clipped()
.cornerRadius(10)
.onTapGesture {
self.overlay.toggle()
}
if overlay {
Template()
}
}
}
}
}
}
Anyone have any idea? I feel like I'm missing something obvious.
CustomCameraView.swift (as requested)
import SwiftUI
import AVFoundation
struct CustomCameraView: View {
#Binding var image: UIImage?
#State var didTapCapture: Bool = false
#Environment(\.presentationMode) var presentationMode
var body: some View {
VStack(alignment: .center) {
CustomCameraRepresentable(image: self.$image, didTapCapture: $didTapCapture)
.overlay(Template(),alignment: .center)
.overlay(
CaptureButtonView().onTapGesture {
self.didTapCapture = true
}
, alignment: .bottom)
.overlay(
Button(action: {
presentationMode.wrappedValue.dismiss()
}, label: {
Image(systemName: "multiply")
.scaleEffect(2)
.padding(20)
.onTapGesture {
presentationMode.wrappedValue.dismiss()
}
})
.foregroundColor(.white)
.padding()
, alignment: .topTrailing)
}
}
}
struct CustomCameraRepresentable: UIViewControllerRepresentable {
#Environment(\.presentationMode) var presentationMode
#Binding var image: UIImage?
#Binding var didTapCapture: Bool
func makeUIViewController(context: Context) -> CustomCameraController {
let controller = CustomCameraController()
controller.delegate = context.coordinator
return controller
}
func updateUIViewController(_ cameraViewController: CustomCameraController, context: Context) {
if(self.didTapCapture) {
cameraViewController.didTapRecord()
}
}
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
class Coordinator: NSObject, UINavigationControllerDelegate, AVCapturePhotoCaptureDelegate {
let parent: CustomCameraRepresentable
init(_ parent: CustomCameraRepresentable) {
self.parent = parent
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
parent.didTapCapture = false
if let imageData = photo.fileDataRepresentation() {
parent.image = UIImage(data: imageData)
}
parent.presentationMode.wrappedValue.dismiss()
}
}
}
class CustomCameraController: UIViewController {
var image: UIImage?
var captureSession = AVCaptureSession()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentCamera: AVCaptureDevice?
var photoOutput: AVCapturePhotoOutput?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
//DELEGATE
var delegate: AVCapturePhotoCaptureDelegate?
func didTapRecord() {
let settings = AVCapturePhotoSettings()
photoOutput?.capturePhoto(with: settings, delegate: delegate!)
}
override func viewDidLoad() {
super.viewDidLoad()
setup()
}
func setup() {
setupCaptureSession()
setupDevice()
setupInputOutput()
setupPreviewLayer()
startRunningCaptureSession()
}
func setupCaptureSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.photo
}
func setupDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera],
mediaType: AVMediaType.video,
position: AVCaptureDevice.Position.unspecified)
for device in deviceDiscoverySession.devices {
switch device.position {
case AVCaptureDevice.Position.front:
self.frontCamera = device
case AVCaptureDevice.Position.back:
self.backCamera = device
default:
break
}
}
self.currentCamera = self.backCamera
}
func setupInputOutput() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
captureSession.addOutput(photoOutput!)
} catch {
print(error)
}
}
func setupPreviewLayer()
{
let rect = CGRect(x: 0, y: 0, width: self.view.bounds.width, height: self.view.bounds.width * 1.29)
self.cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
self.cameraPreviewLayer?.frame = rect
self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
func startRunningCaptureSession(){
captureSession.startRunning()
}
}
struct CaptureButtonView: View {
#State private var animationAmount: CGFloat = 1
var body: some View {
Image(systemName: "camera").font(.largeTitle)
.padding(30)
.background(Color.red)
.foregroundColor(.white)
.clipShape(Circle())
.overlay(
Circle()
.stroke(Color.red)
.scaleEffect(animationAmount)
.opacity(Double(2 - animationAmount))
.animation(Animation.easeOut(duration: 1)
.repeatForever(autoreverses: false))
)
.padding(.bottom)
.onAppear
{
self.animationAmount = 2
}
}
}
You shouldn’t use GeometryReader from within the ScrollView, it will create all sort of mess for you. Instead define it at top level just under VStack, and pass the proxy down to Photo view to set Frame.
Check the code below-:
import SwiftUI
struct Test1: View {
#State private var image: Image?
#State private var showingCustomCamera = false
#State private var inputImage: UIImage?
#State private var photos: [UIImage] = []
func addImageToArray() {
guard let inputImage = inputImage else { return }
image = Image(uiImage: inputImage)
let ciImage = CIImage(cgImage: inputImage.cgImage!)
let options = [CIDetectorAccuracy: CIDetectorAccuracyHigh]
let faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: options)!
let faces = faceDetector.features(in: ciImage)
if let face = faces.first as? CIFaceFeature {
print("Found face at \(face.bounds)")
print(face.faceAngle)
print(face.hasSmile)
print(face.leftEyeClosed)
print(face.rightEyeClosed)
if face.leftEyeClosed {
print("Left Eye Closed \(face.leftEyePosition)")
}
if face.rightEyeClosed {
print("Right Eye Closed \(face.rightEyePosition)")
}
if face.hasSmile {
print("Person is smiling \(face.mouthPosition)")
}
}
photos.append(inputImage)
}
let columns =
[GridItem(.flexible(),spacing: 20),
GridItem(.flexible(),spacing: 20)]
var body: some View {
NavigationView {
VStack{
GeometryReader { geometry in
ScrollView {
LazyVGrid(columns: columns, spacing: 20) {
// AddPhoto(showCamera: $showingCustomCamera) // Uncomment in your case
ForEach(0..<50, id: \.self) { photo in
Photo(img: "ABC", proxy: geometry) // Pass photo as you were doing
}
}
.padding()
}
}
HStack {
Button(action: {
//
}, label: {
Image(systemName: "printer.fill.and.paper.fill")
Text("Print")
})
.padding()
.foregroundColor(.primary)
Button(action: {
//
}, label: {
Image(systemName: "externaldrive.fill.badge.icloud")
Text("Digital Upload")
})
.padding()
.foregroundColor(.primary)
}
}
.sheet(isPresented: $showingCustomCamera, onDismiss: addImageToArray) {
// CustomCameraView(image: self.$inputImage)
}
.navigationTitle("Add Photos")
}
}
}
struct Photo: View {
var img: String
var proxy:GeometryProxy
#State private var overlay: Bool = false
var body: some View {
// GeometryReader { geometry in
VStack {
ZStack(alignment: .top) {
Image(img)
.resizable()
.aspectRatio(contentMode: .fill)
// .frame(width: 170, height: 200)
.frame(width: proxy.size.width * 0.4, height: proxy.size.width * 0.5, alignment: .top)
.clipped()
.cornerRadius(10)
.onTapGesture {
self.overlay.toggle()
}
if overlay {
// Template()
}
}
}
//}
}
}
I have the following VStack that contains an AVPlayer (in PlayerView):
struct ContentView: View {
#State var url: URL?
private let openFile = NotificationCenter.default.publisher(for: .openFile)
var body: some View {
VStack {
if isVideo(self.url!) {
PlayerView(url: self.url!)
} else {
Image(nsImage: NSImage(contentsOf: self.url!)!).resizable().aspectRatio(contentMode: .fit)
}
}.onReceive(openFile) { notification in
self.url = notification.object as? URL
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
And this is the PlayerView:
struct PlayerView: NSViewRepresentable {
private var url: URL
init(url: URL) {
self.url = url
}
func updateNSView(_ nsView: PlayerNSView, context _: NSViewRepresentableContext<PlayerView>) {
nsView.play(url: url)
}
func makeNSView(context _: Context) -> PlayerNSView {
PlayerNSView(frame: .zero)
}
func dismantleNSView(coordinator _: Coordinator) {
// not called
}
}
After updating from a video to a image the audio of the video keeps playing for a few seconds.
Where can I tell the AVPlayer to pause? Does the VStack notify VideoPlayer?
Updated with code from iUrii:
struct ContentView: View {
#State var url: URL?
#State var playerView: PlayerView?
private let openFile = NotificationCenter.default.publisher(for: .openFile)
var body: some View {
VStack {
if let view = playerView {
view
} else {
Image(nsImage: NSImage(contentsOf: self.url!)!).resizable().aspectRatio(contentMode: .fit)
}
}.onReceive(openFile) { notification in
url = notification.object as? URL
if playerView != nil {
playerView!.player.pause()
playerView = nil
}
if videoExtensions.contains(url!.pathExtension.lowercased()) {
playerView = PlayerView(url: url!)
}
}
}
}
This works great when going video to image to video.
When I go video to video, the first video will be paused and audio from the second video starts playing, it is not shown though (the first video is still visible).
I solved it by updating the player with playerView.player.replaceCurrentItem(with: playerItem) instead of replacing the view.
You should manage your PlayerNSView with AVPlayer manually if you want to control its behaviour e.g.:
struct PlayerView: NSViewRepresentable {
let player: AVPlayer
init(url: URL) {
self.player = AVPlayer(url: url)
}
func updateNSView(_ nsView: AVPlayerView, context: NSViewRepresentableContext<Self>) {
}
func makeNSView(context: NSViewRepresentableContext<Self>) -> AVPlayerView {
let playerView = AVPlayerView(frame: .zero)
playerView.player = player
return playerView
}
}
struct ContentView: View {
#State var playerView: PlayerView?
var body: some View {
VStack {
if let view = playerView {
view
} else {
Image(nsImage: NSImage(named: "iphone12")!)
}
Button("Toogle") {
if playerView == nil {
let url = URL(string: "https://www.apple.com/105/media/us/iphone-12-pro/2020/e70ffbd8-50f1-40f3-ac36-0f03a15ac314/films/product/iphone-12-pro-product-tpl-us-2020_16x9.m3u8")!
playerView = PlayerView(url: url)
playerView?.player.play()
}
else {
playerView?.player.pause()
playerView = nil
}
}
}
}
}