I am trying to combine these two tutorials Instagram Reels UI and Infinite Carousel Slider, so that I get Reels with Infinite scrolling. I implemented the solution to the flashing of the Infinite Slider but my offset is never 0, I tried printing it and it was something like -201. I guess my .preference(key: OffsetKey.self, value: proxy.frame(in: .global).minX) returns something else. I tried using minY since my scrolling is to the top/bottom, and checking for index == 0 && -offset < size.height && offset < size.height but this leads to lag/flashing. To test the flashing just comment out the .onChange(of: offset, perform: {...}
ReelView:
struct Reels: View {
#State var currentReel = ""
#State var offset: CGFloat = 0
#State var items: [Item] = [
Item(reel: Reel(player: AVPlayer(url: URL(fileURLWithPath: Bundle.main.path(forResource: "apple-watch", ofType: "mp4") ?? "")))),
Item(reel: Reel(player: AVPlayer(url: URL(fileURLWithPath: Bundle.main.path(forResource: "ibiza", ofType: "mp4") ?? "")))),
Item(reel: Reel(player: AVPlayer(url: URL(fileURLWithPath: Bundle.main.path(forResource: "sneaker", ofType: "mp4") ?? "")))),
Item(reel: Reel(player: AVPlayer(url: URL(fileURLWithPath: Bundle.main.path(forResource: "rolex", ofType: "mp4") ?? "")))),
Item(reel: Reel(player: AVPlayer(url: URL(fileURLWithPath: Bundle.main.path(forResource: "lambo", ofType: "mp4") ?? ""))))
]
var body: some View {
ZStack {
GeometryReader { proxy in
let size = proxy.size
TabView(selection: $currentReel) {
ForEach($items) { $reel in
ReelsPlayer(currentReel: $currentReel, item: $reel)
.frame(width: size.width)
.rotationEffect(.init(degrees: -90))
.ignoresSafeArea(.all, edges: .top)
.tag(reel.id)
.overlay {
GeometryReader { proxy in
Color.clear
.preference(key: OffsetKey.self, value: proxy.frame(in: .global).minX)
}
}
.onPreferenceChange(OffsetKey.self, perform: { offset in
self.offset = offset
})
}
}.tabViewStyle(.page)
.onChange(of: offset, perform: { newValue in
let index = items.lastIndex(where: { item in
item.id == currentReel
}) ?? 0
if index == 0 && offset == 0 {
currentReel = items[items.count - 2].id
}
if index == items.count - 1 && offset == 0 {
currentReel = items[1].id
}
})
.onAppear() {
guard var first = items.first else {
return
}
guard var last = items.last else {
return
}
last.id = UUID().uuidString
first.id = UUID().uuidString
items.append(first)
items.insert(last, at: 0)
currentReel = items[1].id
}
.rotationEffect(.init(degrees: 90))
.frame(width: size.height)
.tabViewStyle(.page(indexDisplayMode: .never))
.frame(width: size.width)
}
}
}
}
ReelsPlayer:
struct ReelsPlayer: View {
#Binding var currentReel: String
#Binding var item: Item
var body: some View {
ZStack {
if let player = item.reel.player {
CustomVideoPlayer(player: player)
.allowsHitTesting(false)
GeometryReader { proxy -> Color in
let minY = proxy.frame(in: .global).minY
let size = proxy.size
DispatchQueue.main.async {
if -minY < (size.height / 2) && minY < (size.height / 2) && currentReel == item.id {
player.play()
} else {
player.pause()
player.seek(to: .zero
)
}
}
return Color.clear
}
}
BottomOverlay(item: $item)
.allowsHitTesting(true)
}
}
}
CustomVideoPlayer:
struct CustomVideoPlayer: UIViewControllerRepresentable {
var player: AVPlayer
func makeCoordinator() -> Coordinator {
return Coordinator(parent: self)
}
func makeUIViewController(context: Context) -> AVPlayerViewController {
let controller = AVPlayerViewController()
controller.player = player
controller.showsPlaybackControls = false
controller.videoGravity = .resizeAspectFill
player.actionAtItemEnd = .none
NotificationCenter.default.addObserver(context.coordinator, selector: #selector(context.coordinator.restartPlayback), name: .AVPlayerItemDidPlayToEndTime, object: player.currentItem)
return controller
}
func updateUIViewController(_ uiViewController: AVPlayerViewController, context: Context) {
}
class Coordinator: NSObject {
var parent: CustomVideoPlayer
init(parent: CustomVideoPlayer) {
self.parent = parent
}
#objc func restartPlayback() {
parent.player.seek(to: .zero)
}
}
}
Related
Hi am making app which plays video only If it's in the centre of the view. I've already managed to get the position in the scroll view, but I can't combine that with playing the video.
This is my main view:
struct MainView: View {
#State var position = 0.0
var body: some View {
ScrollView {
ForEach(videos){ video in
VideoView(player: video.player)
.onChange(of: position) { pos in
if pos > -50 && pos < 400 {
print("Play video")
}else {
print("Stop video")
}
}
}
.background(GeometryReader {
Color.clear.preference(key: ViewOffsetKey.self, value: -$0.frame(in: .named("scroll")).origin.y)
})
}.onPreferenceChange(ViewOffsetKey.self) {
position = $0
}
.coordinateSpace(name: "scroll")
.padding()
}
}
This is my video model:
struct VideoModel: Identifiable {
var id = UUID()
var number: Int
var player: AVPlayer
}
This is video array:
let videos = [
VideoModel(number: 1, player: AVPlayer(url: URL(fileURLWithPath: Bundle.main.path(forResource: "video", ofType: "mp4")!))),
VideoModel(number: 3, player: AVPlayer(url: URL(fileURLWithPath: Bundle.main.path(forResource: "video", ofType: "mp4")!))),
VideoModel(number: 4, player: AVPlayer(url: URL(fileURLWithPath: Bundle.main.path(forResource: "video", ofType: "mp4")!))),
VideoModel(number: 5, player: AVPlayer(url: URL(fileURLWithPath: Bundle.main.path(forResource: "video", ofType: "mp4")!)))
]
And those are structures handling the video player and preference key:
struct ViewOffsetKey: PreferenceKey {
typealias Value = CGFloat
static var defaultValue = CGFloat.zero
static func reduce(value: inout Value, nextValue: () -> Value) {
value += nextValue()
}
}
struct VideoView: View {
var player: AVPlayer
var body: some View {
AVPlayerControllerRepresented(player: player)
.frame(height: height)
}
}
struct AVPlayerControllerRepresented : UIViewControllerRepresentable {
var player : AVPlayer
func makeUIViewController(context: Context) -> AVPlayerViewController {
let controller = AVPlayerViewController()
controller.player = player
controller.showsPlaybackControls = false
return controller
}
func updateUIViewController(_ uiViewController: AVPlayerViewController, context: Context) {
}
}
Please help, I will be so thankful.
First You need to make the AVPlayer a Binding for the changes in VideoView & AVPlayerControllerRepresented to take effect, then add those pieces of code accordingly
player.play() // to play
player.stop() // to stop
I am trying to display two columns of images in a LazyVStack embedded in a scroll view however the the second row of images partially overlaps the row above. I'm not sure if this is an issue with the LazyVStack itself or an issue with the Photo.swift view.
The output looks like this
The two view files
ContentView.swift
struct ContentView: View {
#State private var image: Image?
#State private var showingCustomCamera = false
#State private var inputImage: UIImage?
#State private var photos: [UIImage] = []
func addImageToArray() {
guard let inputImage = inputImage else { return }
image = Image(uiImage: inputImage)
let ciImage = CIImage(cgImage: inputImage.cgImage!)
let options = [CIDetectorAccuracy: CIDetectorAccuracyHigh]
let faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: options)!
let faces = faceDetector.features(in: ciImage)
if let face = faces.first as? CIFaceFeature {
print("Found face at \(face.bounds)")
print(face.faceAngle)
print(face.hasSmile)
print(face.leftEyeClosed)
print(face.rightEyeClosed)
if face.leftEyeClosed {
print("Left Eye Closed \(face.leftEyePosition)")
}
if face.rightEyeClosed {
print("Right Eye Closed \(face.rightEyePosition)")
}
if face.hasSmile {
print("Person is smiling \(face.mouthPosition)")
}
}
photos.append(inputImage)
}
let columns = [
GridItem(.flexible(), spacing: 20),
GridItem(.flexible(), spacing: 20)
]
var body: some View {
NavigationView {
VStack{
ScrollView {
LazyVGrid(columns: columns, spacing: 20) {
AddPhoto(showCamera: $showingCustomCamera)
ForEach(photos, id: \.self) { photo in
PassportPhoto(img: photo)
}
}
.padding()
}
HStack {
Button(action: {
//
}, label: {
Image(systemName: "printer.fill.and.paper.fill")
Text("Print")
})
.padding()
.foregroundColor(.primary)
Button(action: {
//
}, label: {
Image(systemName: "externaldrive.fill.badge.icloud")
Text("Digital Upload")
})
.padding()
.foregroundColor(.primary)
}
}
.sheet(isPresented: $showingCustomCamera, onDismiss: addImageToArray) {
CustomCameraView(image: self.$inputImage)
}
.navigationTitle("Add Photos")
}
}
}
Photo.swift
struct Photo: View {
var img: UIImage
#State private var overlay: Bool = false
var body: some View {
GeometryReader { geometry in
VStack {
ZStack(alignment: .top) {
Image(uiImage: img)
.resizable()
.aspectRatio(contentMode: .fill)
.frame(width: geometry.size.width, height: geometry.size.width * 1.29, alignment: .top)
.clipped()
.cornerRadius(10)
.onTapGesture {
self.overlay.toggle()
}
if overlay {
Template()
}
}
}
}
}
}
Anyone have any idea? I feel like I'm missing something obvious.
CustomCameraView.swift (as requested)
import SwiftUI
import AVFoundation
struct CustomCameraView: View {
#Binding var image: UIImage?
#State var didTapCapture: Bool = false
#Environment(\.presentationMode) var presentationMode
var body: some View {
VStack(alignment: .center) {
CustomCameraRepresentable(image: self.$image, didTapCapture: $didTapCapture)
.overlay(Template(),alignment: .center)
.overlay(
CaptureButtonView().onTapGesture {
self.didTapCapture = true
}
, alignment: .bottom)
.overlay(
Button(action: {
presentationMode.wrappedValue.dismiss()
}, label: {
Image(systemName: "multiply")
.scaleEffect(2)
.padding(20)
.onTapGesture {
presentationMode.wrappedValue.dismiss()
}
})
.foregroundColor(.white)
.padding()
, alignment: .topTrailing)
}
}
}
struct CustomCameraRepresentable: UIViewControllerRepresentable {
#Environment(\.presentationMode) var presentationMode
#Binding var image: UIImage?
#Binding var didTapCapture: Bool
func makeUIViewController(context: Context) -> CustomCameraController {
let controller = CustomCameraController()
controller.delegate = context.coordinator
return controller
}
func updateUIViewController(_ cameraViewController: CustomCameraController, context: Context) {
if(self.didTapCapture) {
cameraViewController.didTapRecord()
}
}
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
class Coordinator: NSObject, UINavigationControllerDelegate, AVCapturePhotoCaptureDelegate {
let parent: CustomCameraRepresentable
init(_ parent: CustomCameraRepresentable) {
self.parent = parent
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
parent.didTapCapture = false
if let imageData = photo.fileDataRepresentation() {
parent.image = UIImage(data: imageData)
}
parent.presentationMode.wrappedValue.dismiss()
}
}
}
class CustomCameraController: UIViewController {
var image: UIImage?
var captureSession = AVCaptureSession()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentCamera: AVCaptureDevice?
var photoOutput: AVCapturePhotoOutput?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
//DELEGATE
var delegate: AVCapturePhotoCaptureDelegate?
func didTapRecord() {
let settings = AVCapturePhotoSettings()
photoOutput?.capturePhoto(with: settings, delegate: delegate!)
}
override func viewDidLoad() {
super.viewDidLoad()
setup()
}
func setup() {
setupCaptureSession()
setupDevice()
setupInputOutput()
setupPreviewLayer()
startRunningCaptureSession()
}
func setupCaptureSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.photo
}
func setupDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera],
mediaType: AVMediaType.video,
position: AVCaptureDevice.Position.unspecified)
for device in deviceDiscoverySession.devices {
switch device.position {
case AVCaptureDevice.Position.front:
self.frontCamera = device
case AVCaptureDevice.Position.back:
self.backCamera = device
default:
break
}
}
self.currentCamera = self.backCamera
}
func setupInputOutput() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
captureSession.addOutput(photoOutput!)
} catch {
print(error)
}
}
func setupPreviewLayer()
{
let rect = CGRect(x: 0, y: 0, width: self.view.bounds.width, height: self.view.bounds.width * 1.29)
self.cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
self.cameraPreviewLayer?.frame = rect
self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
func startRunningCaptureSession(){
captureSession.startRunning()
}
}
struct CaptureButtonView: View {
#State private var animationAmount: CGFloat = 1
var body: some View {
Image(systemName: "camera").font(.largeTitle)
.padding(30)
.background(Color.red)
.foregroundColor(.white)
.clipShape(Circle())
.overlay(
Circle()
.stroke(Color.red)
.scaleEffect(animationAmount)
.opacity(Double(2 - animationAmount))
.animation(Animation.easeOut(duration: 1)
.repeatForever(autoreverses: false))
)
.padding(.bottom)
.onAppear
{
self.animationAmount = 2
}
}
}
You shouldn’t use GeometryReader from within the ScrollView, it will create all sort of mess for you. Instead define it at top level just under VStack, and pass the proxy down to Photo view to set Frame.
Check the code below-:
import SwiftUI
struct Test1: View {
#State private var image: Image?
#State private var showingCustomCamera = false
#State private var inputImage: UIImage?
#State private var photos: [UIImage] = []
func addImageToArray() {
guard let inputImage = inputImage else { return }
image = Image(uiImage: inputImage)
let ciImage = CIImage(cgImage: inputImage.cgImage!)
let options = [CIDetectorAccuracy: CIDetectorAccuracyHigh]
let faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: options)!
let faces = faceDetector.features(in: ciImage)
if let face = faces.first as? CIFaceFeature {
print("Found face at \(face.bounds)")
print(face.faceAngle)
print(face.hasSmile)
print(face.leftEyeClosed)
print(face.rightEyeClosed)
if face.leftEyeClosed {
print("Left Eye Closed \(face.leftEyePosition)")
}
if face.rightEyeClosed {
print("Right Eye Closed \(face.rightEyePosition)")
}
if face.hasSmile {
print("Person is smiling \(face.mouthPosition)")
}
}
photos.append(inputImage)
}
let columns =
[GridItem(.flexible(),spacing: 20),
GridItem(.flexible(),spacing: 20)]
var body: some View {
NavigationView {
VStack{
GeometryReader { geometry in
ScrollView {
LazyVGrid(columns: columns, spacing: 20) {
// AddPhoto(showCamera: $showingCustomCamera) // Uncomment in your case
ForEach(0..<50, id: \.self) { photo in
Photo(img: "ABC", proxy: geometry) // Pass photo as you were doing
}
}
.padding()
}
}
HStack {
Button(action: {
//
}, label: {
Image(systemName: "printer.fill.and.paper.fill")
Text("Print")
})
.padding()
.foregroundColor(.primary)
Button(action: {
//
}, label: {
Image(systemName: "externaldrive.fill.badge.icloud")
Text("Digital Upload")
})
.padding()
.foregroundColor(.primary)
}
}
.sheet(isPresented: $showingCustomCamera, onDismiss: addImageToArray) {
// CustomCameraView(image: self.$inputImage)
}
.navigationTitle("Add Photos")
}
}
}
struct Photo: View {
var img: String
var proxy:GeometryProxy
#State private var overlay: Bool = false
var body: some View {
// GeometryReader { geometry in
VStack {
ZStack(alignment: .top) {
Image(img)
.resizable()
.aspectRatio(contentMode: .fill)
// .frame(width: 170, height: 200)
.frame(width: proxy.size.width * 0.4, height: proxy.size.width * 0.5, alignment: .top)
.clipped()
.cornerRadius(10)
.onTapGesture {
self.overlay.toggle()
}
if overlay {
// Template()
}
}
}
//}
}
}
I am trying to make an audio player in SwiftUI, Audio player should have these functionality.
Play/Stop Audio
Play in loop
Change volume through slider
Change audio pitch through slider.
There are two problem currently I am facing
audio player is not using volume and pitch slider value
While I stop and play and change volume/pitch slider app crashes with following message.
2020-10-14 17:34:08.957709+0530 SwiftUIAudioPlayer[1369:24886] [avae]
AVAEInternal.h:109 [AVAudioFile.mm:484:-[AVAudioFile
readIntoBuffer:frameCount:error:]:
(ExtAudioFileRead(_imp->_extAudioFile, &ioFrames,
buffer.mutableAudioBufferList)): error -50
Here is the link to project. https://github.com/varun-naharia/SwiftUIAudioPlayer
ContentView.swift
import Foundation
import SwiftUI
struct ContentView: View {
#State var volume:Double = 0.00
#State var pitch:Double = 0.0
#State var musicFiles:[SoundModel] = [SoundModel(file: "metro35", name: "Metronome", fileExtension: "wav"), SoundModel(file: "johnson_tone_down_5min", name: "Johnson", fileExtension: "wav"), SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")]
#State var selectedMusicFile:SoundModel = SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")
#State var showSoundPicker = false
#State var selectedGraph = "skin_conductance"
#State var iconSize:CGFloat = 0.124
#State var iconSpace:CGFloat = 0.015
#State var heart = false
init() {
Player.setPitch(pitch: Float(self.pitch))
Player.setVolume(volume: Float(self.volume))
}
var body: some View {
GeometryReader { geometry in
ZStack{
VStack(alignment: .leading) {
Button(action: {
self.heart = !self.heart
self.selectedGraph = "heart"
if(self.heart)
{
Player.playMusic(musicfile: self.selectedMusicFile.file, fileExtension: self.selectedMusicFile.fileExtension)
}
else
{
Player.stopMusic()
self.selectedGraph = ""
}
})
{
Image(self.selectedGraph == "heart" ? "heart" : "heart_disabled")
.resizable()
.frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)
}
.frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)
.padding(.bottom, geometry.size.height*(self.iconSpace/2))
Button(action: {
self.showSoundPicker = !self.showSoundPicker
})
{
Image("tone")
.resizable()
.frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)
}
.frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)
.padding(.bottom, geometry.size.height*(self.iconSpace/2))
HStack{
SwiftUISlider(
thumbColor: .green,
thumbImage: "musicNote 2",
value: self.$volume
).padding(.horizontal)
Button(action: {
})
{
Image("centerGraph")
.resizable()
.frame(width: geometry.size.width*0.05, height: geometry.size.width*0.05)
}
.frame(width: geometry.size.width*0.03, height: geometry.size.width*0.03)
SwiftUISlider(
thumbColor: .green,
thumbImage: "timerSlider 2",
minValue: 0,
maxValue: 20,
value: self.$pitch
)
.padding(.horizontal)
.frame(width: (geometry.size.width/2)-geometry.size.width*0.05, height: geometry.size.width*0.05)
}
.background(Color(UIColor.lightGray))
.frame(width: geometry.size.width, height: geometry.size.height*0.10)
if(self.showSoundPicker)
{
ChooseSoundView(
musicFiles: self.musicFiles,
selectedMusicFile: self.$selectedMusicFile ,
showSoundPicker: self.$showSoundPicker,
isPlaying: self.selectedGraph != ""
)
.frame(width: geometry.size.width*0.6, height: geometry.size.height*0.7, alignment: .center)
.background(Color.white)
}
}
.frame(maxWidth: geometry.size.width,
maxHeight: geometry.size.height)
.background(Color(UIColor.lightGray))
}
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
struct ChooseSoundView: View {
#State var musicFiles:[SoundModel]
#Binding var selectedMusicFile:SoundModel
#Binding var showSoundPicker:Bool
#State var isPlaying:Bool
var body: some View {
GeometryReader { geometry in
VStack(alignment: .leading)
{
List(self.musicFiles, id: \.name)
{ item in
Image(self.selectedMusicFile.file == item.file ? "radio-button_on" : "radio-button_off")
.resizable()
.frame(width: 15, height: 15)
Button(action: {
print(item.name)
self.selectedMusicFile = item
self.showSoundPicker = false
if(self.isPlaying)
{
// Player.stopMusic()
// Player.playMusic(musicfile: self.selectedMusicFile.file, fileExtension: self.selectedMusicFile.fileExtension)
}
}){
Text(item.name)
.frame(width: geometry.size.width*90,
height: 50.0,
alignment: .leading)
}
.frame(width: geometry.size.width*90, height: 50.0)
}
HStack{
Button(action: {
self.showSoundPicker = false
}){
Text("Done")
.frame(width: geometry.size.width*0.45,
height: 50.0,
alignment: .center)
}
.frame(width: geometry.size.width*0.45, height: 50.0)
Button(action: {
self.showSoundPicker = false
}){
Text("Cancel")
.frame(width: geometry.size.width*0.45,
height: 50.0,
alignment: .center)
}
.frame(width: geometry.size.width*0.45, height: 50.0)
}
.background(Color.white)
}
}
}
}
Player.swift
import Foundation
import AVFoundation
class Player {
private static var breathAudioPlayer:AVAudioPlayer?
private static var audioPlayerEngine = AVAudioEngine()
private static let speedControl = AVAudioUnitVarispeed()
private static var pitchControl = AVAudioUnitTimePitch()
private static var audioPlayerNode = AVAudioPlayerNode()
private static var volume:Float = 1.0
private static func playSounds(soundfile: String) {
if let path = Bundle.main.path(forResource: soundfile, ofType: "m4a"){
do{
breathAudioPlayer = try AVAudioPlayer(contentsOf: URL(fileURLWithPath: path))
breathAudioPlayer?.volume = self.volume
breathAudioPlayer?.prepareToPlay()
breathAudioPlayer?.play()
}catch {
print("Error")
}
}
}
static func playMusic(musicfile: String, fileExtension:String) {
if let path = Bundle.main.path(forResource: musicfile, ofType: fileExtension){
do{
// 1: load the file
let audioPlayFile = try AVAudioFile(forReading: URL(fileURLWithPath: path))
let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioPlayFile.fileFormat, frameCapacity: AVAudioFrameCount(audioPlayFile.length))
try? audioPlayFile.read(into: audioFileBuffer!)
// 2: create the audio player
audioPlayerNode = AVAudioPlayerNode()
audioPlayerEngine = AVAudioEngine()
// you can replace mp3 with anything else you like, just make sure you load it from our project
// making sure to clean up the audio hardware to avoid any damage and bugs
audioPlayerNode.stop()
audioPlayerEngine.stop()
audioPlayerEngine.reset()
audioPlayerEngine.attach(audioPlayerNode)
let pitchControl = AVAudioUnitTimePitch()
// assign the speed and pitch
audioPlayerEngine.attach(pitchControl)
audioPlayerEngine.connect(audioPlayerNode, to: pitchControl, format: nil)
audioPlayerEngine.connect(pitchControl, to: audioPlayerEngine.outputNode, format: nil)
audioPlayerNode.scheduleFile(audioPlayFile, at: nil, completionHandler: nil)
// try to start playing the audio
audioPlayerNode.scheduleBuffer(audioFileBuffer!, at: nil, options: .loops, completionHandler: nil)
do {
try audioPlayerEngine.start()
} catch {
print(error)
}
// play the audio
audioPlayerNode.play()
}catch {
print("Error")
}
}
}
static func breathIn() {
// Player.playSounds(soundfile: "breathin")
}
static func breathOut() {
// Player.playSounds(soundfile: "breathout")
}
static func play(musicFile:String, fileExtension:String)
{
Player.playMusic(musicfile: musicFile,fileExtension: fileExtension)
}
static func stopMusic() {
audioPlayerNode.pause()
audioPlayerNode.stop()
}
static func setPitch(pitch:Float) {
pitchControl.pitch = pitch
}
static func setVolume(volume:Float) {
audioPlayerNode.volume = volume
}
}
SwiftUISlider.swift
import Foundation
import SwiftUI
struct SwiftUISlider: UIViewRepresentable {
var onChangeNotification:String = ""
final class Coordinator: NSObject {
// The class property value is a binding: It’s a reference to the SwiftUISlider
// value, which receives a reference to a #State variable value in ContentView.
var value: Binding<Double>
// Create the binding when you initialize the Coordinator
init(value: Binding<Double>) {
self.value = value
}
// Create a valueChanged(_:) action
#objc func valueChanged(_ sender: UISlider) {
self.value.wrappedValue = Double(sender.value)
}
}
var thumbColor: UIColor = .white
var minTrackColor: UIColor?
var maxTrackColor: UIColor?
var thumbImage:String?
var minValue:Float?
var maxValue:Float?
#Binding var value: Double
func makeUIView(context: Context) -> UISlider {
let slider = UISlider(frame: .zero)
slider.thumbTintColor = thumbColor
slider.minimumTrackTintColor = minTrackColor
slider.maximumTrackTintColor = maxTrackColor
slider.value = Float(value)
if(self.minValue != nil)
{
slider.minimumValue = self.minValue!
}
if(self.maxValue != nil)
{
slider.maximumValue = self.maxValue!
}
slider.setThumbImage(UIImage(named: self.thumbImage ?? ""), for: .normal)
slider.setThumbImage(UIImage(named: self.thumbImage ?? ""), for: .focused)
slider.setThumbImage(UIImage(named: self.thumbImage ?? ""), for: .highlighted)
slider.addTarget(
context.coordinator,
action: #selector(Coordinator.valueChanged(_:)),
for: .valueChanged
)
return slider
}
func onValueChange(_ sender: UISlider) {
}
func updateUIView(_ uiView: UISlider, context: Context) {
// Coordinating data between UIView and SwiftUI view
uiView.value = Float(self.value)
}
func makeCoordinator() -> SwiftUISlider.Coordinator {
Coordinator(value: $value)
}
}
SoundModel.swift
import Foundation
import Combine
class SoundModel:ObservableObject, Identifiable
{
#Published var file:String
#Published var name:String
#Published var fileExtension:String
init(file:String, name:String, fileExtension:String) {
self.file = file
self.name = name
self.fileExtension = fileExtension
}
}
Your first problem is that you're not tracking changes of volume/pitch values. To do so move them to a class:
class PlayerSetup: ObservableObject {
#Published var volume:Double = 0.00 {
didSet {
Player.setVolume(volume: Float(self.volume))
}
}
#Published var pitch:Double = 0.0{
didSet {
Player.setPitch(pitch: Float(self.pitch))
}
}
}
Declare in the view:
#ObservedObject var playerSetup = PlayerSetup()
And bind to your views:
SwiftUISlider(
thumbColor: .green,
thumbImage: "musicNote 2",
value: $playerSetup.volume
).padding(.horizontal)
SwiftUISlider(
thumbColor: .green,
thumbImage: "timerSlider 2",
minValue: 0,
maxValue: 20,
value: $playerSetup.pitch
)
it crashes when finish playing the file because try? audioPlayFile.read(into: audioFileBuffer!) fails and your buffer scheduled after the file is empty. and it plays the file for the first time because of scheduleFile. If you wanna loop single file, try calling this function:
static func scheduleNext(audioPlayFile: AVAudioFile) {
audioPlayerNode.scheduleFile(audioPlayFile, at: nil) {
DispatchQueue.main.async {
scheduleNext(audioPlayFile: audioPlayFile)
}
}
}
pitchControl doesn't work because you're using local value when starting playing, just remove the local value declaration.
about to the volume. as you can see in the documentation, This property is implemented only by the AVAudioEnvironmentNode and AVAudioMixerNode class mixers. So you can't use it for a player node, you need to create a mixer node, add it to the chain of nodes, and change it's volume.
Also to clean up nodes code, I advice you to use the following code:
let nodes = [
audioPlayerNode,
pitchControl,
mixerNode,
]
nodes.forEach { node in
audioPlayerEngine.attach(node)
}
zip(nodes, (nodes.dropFirst() + [audioPlayerEngine.outputNode]))
.forEach { firstNode, secondNode in
audioPlayerEngine.connect(firstNode, to: secondNode, format: nil)
}
it connects all nodes one by one.
https://github.com/PhilipDukhov/SwiftUIAudioPlayer/tree/fixes
Main problem of your code that there is mixed view and view model between them. Lot of things that MUST BE in viewModel at the moment located in the View.
View:
struct ContentView: View {
#State var volume:Double = 0.00
#State var pitch:Double = 0.0
#State var musicFiles:[SoundModel] = [SoundModel(file: "metro35", name: "Metronome", fileExtension: "wav"), SoundModel(file: "johnson_tone_down_5min", name: "Johnson", fileExtension: "wav"), SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")]
#State var selectedMusicFile:SoundModel = SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")
#State var showSoundPicker = false
#State var selectedGraph = "skin_conductance"
#State var iconSize:CGFloat = 0.124
#State var iconSpace:CGFloat = 0.015
#State var heart = false
init() {
Player.setPitch(pitch: Float(self.pitch))
Player.setVolume(volume: Float(self.volume))
}
view must be:
struct PlayerView: View {
#ObservedObject var viewModel: PlayerViewModel
#State var iconSize:CGFloat = 0.124
#State var iconSpace:CGFloat = 0.015
init() {
// that was wrote here must be moved to init of playerViewModel
}
viewModel your:
import Foundation
import AVFoundation
class Player {
private static var breathAudioPlayer:AVAudioPlayer?
private static var audioPlayerEngine = AVAudioEngine()
private static let speedControl = AVAudioUnitVarispeed()
private static var pitchControl = AVAudioUnitTimePitch()
private static var audioPlayerNode = AVAudioPlayerNode()
private static var volume:Float = 1.0
private static func playSounds(soundfile: String) {
....
ViewModel must be:
import Foundation
import AVFoundation
class PlayerViewModel: ObservableObject {
#Published var volume:Double = 0.00
#Published var pitch:Double = 0.0
#Published var musicFiles:[SoundModel] = [SoundModel(file: "metro35", name: "Metronome", fileExtension: "wav"), SoundModel(file: "johnson_tone_down_5min", name: "Johnson", fileExtension: "wav"), SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")]
#Published var selectedMusicFile:SoundModel = SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")
#Published var showSoundPicker = false
#Published var selectedGraph = "skin_conductance"
//#Published var heart = false // SoundViewModel's property
private static var breathAudioPlayer:AVAudioPlayer?
private static var audioPlayerEngine = AVAudioEngine()
private static let speedControl = AVAudioUnitVarispeed()
private static var pitchControl = AVAudioUnitTimePitch()
private static var audioPlayerNode = AVAudioPlayerNode()
private static var volume:Float = 1.0
private static func playSounds(soundfile: String) {
If you will move all of this to proper place, almost sure your code will work better and possibly your problem will be fixed.
at the moment you need to track any changes manually. Your structure of code is the reason of this. You do not need to track this manually because of this is additional useless code.
You need to fix the code structure instead of use "didSet"
I am using PHCachingImageManager to get thumbnail images for all images in the iOS photo library which are then rendered using a SwiftUI List. It seems to work for a small number of images (e.g. the six that load with the simulator), but given 1000s of images I'm incurring this error many times.
IIO_CreateIOSurfaceWithFormatAndBuffer:594: *** ERROR: IOSurfaceCreate 'RGBA' failed - clientAddress: 0x14d6a0000 allocSize: 0x00072000 size: 256 x 456 rb: 1024 [0x00000400] bpp: 4
What does this mean and what is the root cause? Does access to PHCachingImageManager need to be throttled down?
Below is a class similar to the one in my app that reproduces the issue on my iPhone SE2.
import Foundation
import SwiftUI
import UIKit
import Photos
let thumbnailSize = CGSize(width: 90, height: 90)
struct PhotoSelectView: View {
class ImageRowManager {
let thumbnailImageRequestOptions: PHImageRequestOptions
let cachingImageManager = PHCachingImageManager()
var rows: [SelectableImageRow] = []
init() {
let options = PHImageRequestOptions()
options.isSynchronous = true
options.resizeMode = .fast
options.deliveryMode = .highQualityFormat
options.isNetworkAccessAllowed = false
self.thumbnailImageRequestOptions = options
}
func add(row: SelectableImageRow) {
self.rows.append(row)
}
}
struct SelectableImageRow: Hashable {
var rowIndex: Int
var images: [SelectableImage]
func hash(into hasher: inout Hasher) {
hasher.combine(self.rowIndex)
}
}
class SelectableImage: Hashable, ObservableObject {
#Published var image: UIImage? = nil
let id: String
private let asset: PHAsset
private let imageRowManager: ImageRowManager
init(asset: PHAsset, imageRowManager: ImageRowManager) {
self.id = asset.localIdentifier
self.asset = asset
self.imageRowManager = imageRowManager
self.loadImage()
}
func loadImage() {
DispatchQueue.global(qos: .background).async {
self.imageRowManager.cachingImageManager.requestImage(for: self.asset, targetSize: CGSize(width: 150, height: 150), contentMode: .aspectFill, options: self.imageRowManager.thumbnailImageRequestOptions) { (image, _) in
RunLoop.main.perform {
self.image = image
}
}
}
}
func hash(into hasher: inout Hasher) {
hasher.combine(self.id)
}
static func ==(lhs: SelectableImage, rhs: SelectableImage) -> Bool {
return lhs.id == rhs.id
}
}
let imageRowManager = ImageRowManager()
#State var selected = Set<SelectableImage>()
#State var grid: [SelectableImageRow] = []
var body: some View {
VStack {
VStack {
if !self.grid.isEmpty {
HStack {
Text("Pick images")
Spacer()
}
.padding(.leading)
.padding(.top)
ImagesScrollView(grid: self.$grid, selected: self.$selected)
Button(action: {
self.handleSelectButton()
}) {
Text("Select")
.foregroundColor(Color.black.opacity((self.selected.count != 0) ? 1 : 0.5))
.padding(.vertical,10)
.frame(width: UIScreen.main.bounds.width / 2)
.overlay(
Capsule(style: .continuous)
.stroke(Color.black.opacity((self.selected.count != 0) ? 1 : 0.5), style: StrokeStyle(lineWidth: 5))
)
}
.background(Color.white)
.padding(.bottom)
.disabled((self.selected.count != 0) ? false : true)
}
}
.frame(width: UIScreen.main.bounds.width - CGFloat(horizontalPadding), height: UIScreen.main.bounds.height / 1.5)
.background(Color.white)
.cornerRadius(12)
}
.background(Color.black.opacity(0.1)).edgesIgnoringSafeArea(.all)
.onAppear {
PHPhotoLibrary.requestAuthorization { status in
if status == .authorized {
self.getAllImages()
} else {
print("Cannot access photo library")
}
}
}
}
private func handleSelectButton() {
print("selected images", self.selected)
}
private func getAllImages() {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
let req = PHAsset.fetchAssets(with: .image, options: fetchOptions)
var rowIndex = 0
for i in stride(from: 0, to: req.count, by: gridItemWidth) {
var iteration : [SelectableImage] = []
for j in i..<i+gridItemWidth {
if j < req.count{
iteration.append(SelectableImage(asset: req[j], imageRowManager: self.imageRowManager))
}
}
let row = SelectableImageRow(rowIndex: rowIndex, images: iteration)
imageRowManager.add(row: row)
rowIndex += 1
}
self.grid = imageRowManager.rows
}
// Subviews
struct ImagesScrollView: View {
#Binding var grid: [SelectableImageRow]
#Binding var selected: Set<SelectableImage>
var body: some View {
List(self.grid, id: \.self) { row in
SelectableImageRowView(row: row, selected: self.$selected)
}
}
}
struct SelectableImageRowView: View {
var row: SelectableImageRow
#Binding var selected: Set<SelectableImage>
var body: some View {
HStack(spacing: 2) {
ForEach(row.images, id: \.self) { img in
SelectableImageCard(data: img, selected: self.$selected)
}
}
}
}
struct SelectableImageCard: View {
#ObservedObject var data: SelectableImage
#Binding var selected: Set<SelectableImage>
var body: some View {
ZStack {
Image(uiImage: self.image()).resizable()
if self.selected.contains(self.data) {
Image(systemName: "checkmark")
.resizable()
.padding(7)
.foregroundColor(.white)
.background(Color.blue)
.clipShape(Circle())
.overlay(Circle().stroke(Color.white, lineWidth: 1))
.frame(width: 30, height: 30, alignment: .topTrailing)
.offset(x: 30, y: -28)
}
}
.frame(width: thumbnailSize.width, height: thumbnailSize.height)
.onTapGesture {
if !self.selected.contains(self.data) {
self.selected.insert(self.data)
} else{
self.selected.remove(self.data)
}
}
}
private func image() -> some UIImage {
if let image = self.data.image {
return image
} else {
return UIImage(systemName: "heart.fill")!
}
}
}
}
I have an app that fetches a list of items with images URL's from remote API, and then it has to fetch an image per item from given url inside that item's position.
The problem is that when scrolling up and down and thus removing list items from view and moving them back into view they do show up. However on initial load they stay in "loading" status forever until moved out and in.
My code:
import SwiftUI
struct ContentView: View {
#EnvironmentObject var artObjectStore: ArtObjectStore
#State private var pageCount = 1
#State private var tappedLink: String? = nil
#Environment(\.imageCache) var cache: ImageCache
var body: some View {
NavigationView {
Form {
Section(header: Text("Art")) {
List {
ForEach(artObjectStore.artObjects, id: \.self) { artObject in
self.link(for: artObject)
}
Button(action: loadMore) {
Text("")
}
.onAppear {
DispatchQueue.global(qos: .background).asyncAfter(deadline: DispatchTime(uptimeNanoseconds: 10)) {
self.loadMore()
}
}
}
}
}
.navigationBarTitle("Art objects")
}
.onAppear(perform: loadMore)
}
func loadMore() {
pageCount += 1
artObjectStore.loadMore(pageCount)
}
private func link(for artObject: ArtObject) -> some View {
let selection = Binding(get: { self.tappedLink },
set: {
UIApplication.shared.endEditing()
self.tappedLink = $0
})
return NavigationLink(destination: DetailView(artObject: artObject, cache: self.cache),
tag: artObject.id,
selection: selection) {
HStack(alignment: .center) {
VStack(alignment: .leading){
Text("\(artObject.title)").font(.system(size: 12))
Text("\(artObject.principalOrFirstMaker)").font(.system(size: 9)).foregroundColor(.gray)
}
Spacer()
AsyncImage(
url: URL(string: artObject.headerImage.url)!,
cache: self.cache,
width: 200,
height: 50
)
}
}
}
}
extension UIApplication {
func endEditing() {
sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil)
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
Image container:
import SwiftUI
import Combine
import Foundation
class ImageLoader: ObservableObject {
let objectWillChange = ObservableObjectPublisher()
private var cancellable: AnyCancellable?
#Published var image: UIImage? {
willSet {
objectWillChange.send()
}
}
private let url: URL
private var cache: ImageCache?
init(url: URL, cache: ImageCache? = nil) {
self.url = url
self.cache = cache
}
deinit {
cancellable?.cancel()
}
private func cache(_ image: UIImage?) {
image.map { cache?[url] = $0 }
}
func load() {
if let image = cache?[url] {
self.image = image
return
}
cancellable = URLSession.shared.dataTaskPublisher(for: url)
.map { UIImage(data: $0.data) }
.replaceError(with: nil)
.handleEvents(receiveOutput: { [weak self] in self?.cache($0) })
.receive(on: DispatchQueue.main)
.assign(to: \.image, on: self)
}
func cancel() {
cancellable?.cancel()
}
}
struct AsyncImage: View {
#ObservedObject private var loader: ImageLoader
private let width: CGFloat?
private let height: CGFloat?
#State var spin = false
init(url: URL, cache: ImageCache? = nil, width: CGFloat? = nil, height: CGFloat? = nil) {
loader = ImageLoader(url: url, cache: cache)
self.width = width
self.height = height
}
var body: some View {
image
.onAppear(perform: loader.load)
.onDisappear(perform: loader.cancel)
}
private var image: some View {
Group {
if loader.image != nil {
Image(uiImage: loader.image!)
.resizable()
.aspectRatio(contentMode: .fit)
.frame(width: width, height: height)
} else {
Image("loadingCircle")
.resizable()
.frame(width: 20, height: 20)
.rotationEffect(.degrees(spin ? 360 : 0))
.animation(Animation.linear(duration: 0.8).repeatForever(autoreverses: false))
.onAppear() {
self.spin.toggle()
}
}
}
}
}
protocol ImageCache {
subscript(_ url: URL) -> UIImage? { get set }
}
struct TemporaryImageCache: ImageCache {
private let cache = NSCache<NSURL, UIImage>()
subscript(_ key: URL) -> UIImage? {
get { cache.object(forKey: key as NSURL) }
set { newValue == nil ? cache.removeObject(forKey: key as NSURL) : cache.setObject(newValue!, forKey: key as NSURL) }
}
}
struct ImageCacheKey: EnvironmentKey {
static let defaultValue: ImageCache = TemporaryImageCache()
}
extension EnvironmentValues {
var imageCache: ImageCache {
get { self[ImageCacheKey.self] }
set { self[ImageCacheKey.self] = newValue }
}
}
I did try to add willSet on the image, that doesn't seem to work. Can you help me?
All below tested with Xcode 11.4 / iOS 13.4
Modified worked AsyncImage
I changed Group to VStack and it started updating
private var image: some View {
VStack { // << here !!
if loader.image != nil {
Modified worked image loader
class ImageLoader: ObservableObject {
#Published var image: UIImage?
private var cancellable: AnyCancellable?
private let url: URL
private var cache: ImageCache?
init(url: URL, cache: ImageCache? = nil) {
self.url = url
self.cache = cache
}
deinit {
cancellable?.cancel()
}
private func cache(_ image: UIImage?) {
self.image = image
image.map { cache?[url] = $0 }
}
func load() {
if let image = cache?[url] {
self.image = image
return
}
cancellable = URLSession.shared.dataTaskPublisher(for: url)
.map { UIImage(data: $0.data) }
.receive(on: DispatchQueue.main)
.replaceError(with: nil)
.sink(receiveValue: { [weak self] in self?.cache($0) })
}
func cancel() {
cancellable?.cancel()
}
}