I have an audio file (its length is > 60 seconds) that is played in the onAppear() method of 2 of my views (shown below).
When I play the audio file in GamesList.swift, it plays the whole length. But if I play it in ActiveGame.swift, it cuts off after a couple seconds. I don't know what the difference is between these views, but it does not work for the latter.
GamesList.swift (successfully plays whole file)
struct GamesList: View {
#State var showCreateGameSheet = false
let diameter: CGFloat = 25.0
#State var games: [Game] = []
#EnvironmentObject var gameManager: GameManager
#State var stayPressed = false
#ObservedObject var soundManager = SoundManager()
var body: some View {
NavigationView {
VStack {
// Create game
HStack {
Spacer()
Button(action: { showCreateGameSheet.toggle() }){
Text("+ Create game")
.font(.custom("Seravek-Medium", size: 20))
.foregroundColor(Color.white)
}.buttonStyle(GTButton(color: Color("primary"), stayPressed: stayPressed))
}
.frame(
maxWidth: .infinity,
maxHeight: 80,
alignment: .top
)
ScrollView(.vertical, showsIndicators: true) {
ForEach(self.games.sorted(by: { $0.players.count > $1.players.count }), id: \.id){ game in
AvailableGame(game: game)
.environmentObject(gameManager)
Divider()
}
} // Scrollview
} // VStack
.padding(25)
// .navigationBarHidden(true)
.navigationTitle("Games").foregroundColor(Color("primary"))
.toolbar {
ToolbarItem(placement: .navigationBarTrailing) {
HStack {
Button(action: {
self.games = []
Database().fetchGames(){ game in
if let game = game {
self.games.append(contentsOf: game)
}
}
}){
Image(systemName: "arrow.clockwise")
.resizable()
.aspectRatio(contentMode: .fill)
.frame(width: diameter, height: diameter)
.foregroundColor(.gray)
.padding(.horizontal, 30)
}
NavigationLink(destination: Settings()){
Image("settings")
.resizable()
.aspectRatio(contentMode: .fill)
.frame(width: diameter, height: diameter)
.foregroundColor(.gray)
}
}.padding(.top, 10)
}
}
} // NavigationView
.sheet(isPresented: $showCreateGameSheet, onDismiss: { }) {
CreateGame()
.environmentObject(self.gameManager)
}
.onAppear {
self.soundManager.playSound(name: "duringGame.aiff", loop: false)
Database().fetchGames(){ game in
if let game = game {
self.games.append(contentsOf: game)
}
}
}
.accentColor(Color("primary"))
}
}
ActiveGame.swift (cuts out after a couple of seconds)
struct ActiveGame: View {
#EnvironmentObject var gameManager: GameManager
let diameter: CGFloat = 50.0
#State var image = Image(systemName: "rectangle")
#ObservedObject var soundManager = SoundManager()
var body: some View {
ZStack {
PlayerBlock()
.padding(.horizontal, 10)
.position(x: (UIScreen.main.bounds.size.width / 2), y: 30)
VStack(spacing: 15) {
ZStack { // Question and answers block
if self.gameManager.game?.question == nil {
ProgressView()
.progressViewStyle(CircularProgressViewStyle())
// .scaleEffect(2.0, anchor: .center)
}
VStack {
Text("Round \(self.gameManager.game?.currentRound ?? 1)")
.font(.system(size: 22))
.foregroundColor(Color.gray)
.multilineTextAlignment(.center)
.padding(.bottom, 5)
// QUESTION
Text("\(self.gameManager.game?.question?.text ?? "")")
.font(.custom("Seravek-Bold", size: 28))
.foregroundColor(Color("primary"))
.multilineTextAlignment(.center)
.padding(.horizontal, 30)
.fixedSize(horizontal: false, vertical: true)
// QUESTION IMAGE
(self.gameManager.cachedQuestionImage ?? Image(systemName: "rectangle"))
.resizable()
.scaledToFit()
.aspectRatio(contentMode: .fill)
.frame(height: 200)
// .frame(width: 280, height: 180)
.clipShape(RoundedRectangle(cornerRadius: 10))
.padding(.horizontal, 30)
.opacity(self.gameManager.cachedQuestionImage == nil ? 0.0 : 1.0) // Hide image while it is loading
.onTapGesture {
self.gameManager.deleteGame()
}
// ANSWERS
if 1 > 0 {
MultipleChoice(options: self.gameManager.game?.question?.options ?? [])
} else if (self.gameManager.game?.question?.type == QuestionType.textInput){
TextInput()
}
}.opacity(self.gameManager.game?.question == nil ? 0.0 : 1.0)
.disabled(self.gameManager.game?.question == nil)
// Hide and disable the question block when the next question is loading
}
}.transition(.fadeTransition)
.padding(.top, 40)
}
.onAppear {
print("onAppear")
self.soundManager.playSound(name: "duringGame.aiff", loop: false)
}
.onDisappear {
print("onDisappear")
self.soundManager.player?.stop()
}
}
}
SoundManager.swift - this is the viewmodel that plays the audio
class SoundManager: ObservableObject {
#Published var player: AVAudioPlayer?
func playSound(name: String, loop: Bool){
let path = Bundle.main.path(forResource: name, ofType: nil)
let url = URL(fileURLWithPath: path!)
print("Play URL from name: \(name)")
do {
player = try AVAudioPlayer(contentsOf: url)
if loop {
player?.numberOfLoops = -1 // Loop forever
}
player?.play()
print("Played sound") // Both views print this when the audio plays
} catch {
print("Error playing \(name) sound")
}
}
}
Any idea what the problem is? The SoundManager is stored as an observed object in both views, so it should live as long as the view is still alive. In ActiveGame, onDisappear() does not get called, so the view is still alive and should be playing the audio until the end.
The first thing I would fix is changing your #ObservedObject wrapper to a #StateObject wrapper. This will prevent deallocation if the view updates at some point in the process of playing the sound. Let me know if that works...
Related
I have an image slider with dot indicator named "Onboardingslider". I have used it in my another screen "onboard4" , I want to change the text of "next" button to "get started" only on last image of slider otherwise it stays "next".
I have tried a lot of things but nothing works
Please help me I'm a newbie
struct onboard4: View {
#State var showModal = false
#State var maxlogoheight: CGFloat = 0
#State var isLinkActive = false
var body: some View {
NavigationView {
ZStack{
//max height will be width of the screen
GeometryReader{ proxy -> AnyView in
let height = proxy.frame(in: .global).height
DispatchQueue.main.async {
if maxlogoheight == 0 {
maxlogoheight = height
}
}
return AnyView (
ZStack{
Image("Logo")
.resizable()
.scaledToFit()
.offset(x: getReact().width/3.5, y: -height/1.25)
}//zstack 2
// .padding(.leading,10)
)//anyview
}//end of gr
.frame(maxHeight: getReact().width)
VStack{
Onboardingslider()
Button(action: {
showModal = true
}) {
ZStack{
Text("Next")
.foregroundColor(Color.white)
.fontWeight(.bold)
.frame (width: 295, height: 30)
.padding()
.background(Color("YellowAccent"))
.cornerRadius(20)
.shadow(color: .gray, radius: 5, x: 0, y: 4)
Image("NextButtonOnboard")
}
}
.fullScreenCover(isPresented: $showModal) {
LoginView() }
.offset(y: getSafearea().bottom + -55)
}//vstack
Button(action: {}, label: {
Text("Continue to the listing service")
.underline()
})
.foregroundColor(Color.black)
.offset(y: getSafearea().bottom + 310)
}//zstack
.background(
NavigationLink(destination: LoginView(), isActive: $isLinkActive) {
EmptyView()
}
.hidden()
)
.toolbar {
ToolbarItem(placement: .navigationBarTrailing) {
Button(action: {
showModal = true
}) {
Text("Skip").underline()
}
.foregroundColor(Color("YellowAccent"))
.font(.system(size: 20,weight: .semibold,design: .serif))
.frame(width: 100, height: 100)
.padding(.top)
.fullScreenCover(isPresented: $showModal) {
LoginView() }
//.padding(.bottom)
}//toolbaritem
}//toolbar
}
}
}
struct onboard4_Previews: PreviewProvider {
static var previews: some View {
onboard4()
}
}
struct Onboardingslider: View {
private let images = ["1", "2", "3", "4"]
init() {
// modify appearance
UIPageControl.appearance().currentPageIndicatorTintColor = .orange
UIPageControl.appearance().pageIndicatorTintColor = .gray
}
var body: some View {
TabView {
ForEach(images, id: \.self) { item in
Image(item)
.padding(.leading,24)
}
}
.frame(height: 600)
.tabViewStyle(PageTabViewStyle(indexDisplayMode: .always))
}
}
Hi I have code for writing to the camera in Swift and SwiftUI. I'm making use of the following resource for the camera and integrated with my SwiftUI view. https://betterprogramming.pub/effortless-swiftui-camera-d7a74abde37e
For some reason, when I go to a new view after I capture a photo, and then I go back to the camera with present.wrappedValue.dismiss(), this following piece of code gets triggered from CameraService.swift:
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
if session.canAddInput(videoDeviceInput) {
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
} else {
print("Couldn't add video device input to the session.")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
and it outputs that Couldn't add video device input to the session.
Please let me know what could be this issue because it seems to be that Navigation Link is the problem as I can go around from another view to the camera view and it works fine. However, if I dismiss from my navigated view using navigation link, the code for the camera breaks.
Minimal reproducible example by modifying CameraView.swift code from the above link, as well as the GitHub repo to the link:
https://github.com/rorodriguez116/SwiftCamera
import SwiftUI
import Combine
import AVFoundation
final class CameraModel: ObservableObject {
private let service = CameraService()
#Published var photo: Photo!
#Published var showAlertError = false
#Published var isFlashOn = false
#Published var willCapturePhoto = false
var alertError: AlertError!
var session: AVCaptureSession
private var subscriptions = Set<AnyCancellable>()
init() {
self.session = service.session
service.$photo.sink { [weak self] (photo) in
guard let pic = photo else { return }
self?.photo = pic
}
.store(in: &self.subscriptions)
service.$shouldShowAlertView.sink { [weak self] (val) in
self?.alertError = self?.service.alertError
self?.showAlertError = val
}
.store(in: &self.subscriptions)
service.$flashMode.sink { [weak self] (mode) in
self?.isFlashOn = mode == .on
}
.store(in: &self.subscriptions)
service.$willCapturePhoto.sink { [weak self] (val) in
self?.willCapturePhoto = val
}
.store(in: &self.subscriptions)
}
func configure() {
service.checkForPermissions()
service.configure()
}
func capturePhoto() {
service.capturePhoto()
}
func flipCamera() {
service.changeCamera()
}
func zoom(with factor: CGFloat) {
service.set(zoom: factor)
}
func switchFlash() {
service.flashMode = service.flashMode == .on ? .off : .on
}
}
struct CameraView: View {
#StateObject var model = CameraModel()
#State var currentZoomFactor: CGFloat = 1.0
#State var showNewPost: Bool = false
var captureButton: some View {
Button(action: {
model.capturePhoto()
}, label: {
Circle()
.foregroundColor(.white)
.frame(width: 80, height: 80, alignment: .center)
.overlay(
Circle()
.stroke(Color.black.opacity(0.8), lineWidth: 2)
.frame(width: 65, height: 65, alignment: .center)
)
})
}
var capturedPhotoThumbnail: some View {
Group {
if model.photo != nil {
Image(uiImage: model.photo.image!)
.resizable()
.aspectRatio(contentMode: .fill)
.frame(width: 60, height: 60)
.clipShape(RoundedRectangle(cornerRadius: 10, style: .continuous))
.animation(.spring())
.onAppear{
self.showNewPost.toggle()
print("show new post is \(self.showNewPost)")
}
} else {
RoundedRectangle(cornerRadius: 10)
.frame(width: 60, height: 60, alignment: .center)
.foregroundColor(.black)
}
}
}
var flipCameraButton: some View {
Button(action: {
model.flipCamera()
}, label: {
Circle()
.foregroundColor(Color.gray.opacity(0.2))
.frame(width: 45, height: 45, alignment: .center)
.overlay(
Image(systemName: "camera.rotate.fill")
.foregroundColor(.white))
})
}
var body: some View {
GeometryReader { reader in
NavigationView {
ZStack {
Color.black.edgesIgnoringSafeArea(.all)
VStack {
Button(action: {
model.switchFlash()
}, label: {
Image(systemName: model.isFlashOn ? "bolt.fill" : "bolt.slash.fill")
.font(.system(size: 20, weight: .medium, design: .default))
})
.accentColor(model.isFlashOn ? .yellow : .white)
CameraPreview(session: model.session)
.gesture(
DragGesture().onChanged({ (val) in
// Only accept vertical drag
if abs(val.translation.height) > abs(val.translation.width) {
// Get the percentage of vertical screen space covered by drag
let percentage: CGFloat = -(val.translation.height / reader.size.height)
// Calculate new zoom factor
let calc = currentZoomFactor + percentage
// Limit zoom factor to a maximum of 5x and a minimum of 1x
let zoomFactor: CGFloat = min(max(calc, 1), 5)
// Store the newly calculated zoom factor
currentZoomFactor = zoomFactor
// Sets the zoom factor to the capture device session
model.zoom(with: zoomFactor)
}
})
)
.onAppear {
model.configure()
}
.alert(isPresented: $model.showAlertError, content: {
Alert(title: Text(model.alertError.title), message: Text(model.alertError.message), dismissButton: .default(Text(model.alertError.primaryButtonTitle), action: {
model.alertError.primaryAction?()
}))
})
.overlay(
Group {
if model.willCapturePhoto {
Color.black
}
}
)
.animation(.easeInOut)
HStack {
capturedPhotoThumbnail
Spacer()
captureButton
Spacer()
flipCameraButton
}
.padding(.horizontal, 20)
}
NavigationLink(destination: NewPost(), isActive: $showNewPost) {
}
}
}
}
}
}
struct NewPost: View {
#Environment(\.presentationMode) var present
var body: some View {
Button(action: {
print("tapped button")
present.wrappedValue.dismiss() //triggers the configuration failed
}, label: {
Text("new post").foregroundColor(Color.black)
})
}
}
this bug has been scratching my head for the past few days and I still don't know why the problem is arising and what the fix is. I have a camera screen and integrated it with the TOCropViewController (https://github.com/TimOliver/TOCropViewController) to allow a user to select a picture from their photo library and crop it to show a new post. For some reason the image picker is detecting that it should change the view to the ImagePicker from the camera view screen but it's not displaying it on ios14.4 and below but it works just fine for iOS 14.5 and above.
Here is my camera view code:
struct CameraView: View {
#StateObject var model = CameraModel()
#State var currentZoomFactor: CGFloat = 1.0
#StateObject var registerData = RegisterViewModel()
#StateObject var newPostData = NewPostModel()
enum SheetType {
case imagePick
case imageCrop
case share
}
#State private var currentSheet: SheetType = .imagePick
#State private var actionSheetIsPresented = false
#State private var sheetIsPresented = false
#State private var originalImage: UIImage?
#State private var image: UIImage?
#State private var croppingStyle = CropViewCroppingStyle.default
#State private var croppedRect = CGRect.zero
#State private var croppedAngle = 0
#StateObject var userData = UserViewModel()
var captureButton: some View {
Button(action: {
let impactMed = UIImpactFeedbackGenerator(style: .light)
impactMed.impactOccurred()
model.capturePhoto()
}, label: {
Circle()
.foregroundColor(.white)
.frame(width: 80, height: 80, alignment: .center)
.overlay(
Circle()
.stroke(Color.black.opacity(0.8), lineWidth: 2)
.frame(width: 65, height: 65, alignment: .center)
)
})
}
var capturedPhotoThumbnail: some View {
Group {
RoundedRectangle(cornerRadius: 10)
.frame(width: 55, height: 55, alignment: .center)
.foregroundColor(Color.gray.opacity(0.2))
.onTapGesture(perform: {
// newPostData.picker.toggle()
self.croppingStyle = .default
self.currentSheet = .imagePick
self.sheetIsPresented = true
print("HERE11 and \(self.currentSheet) and \(self.sheetIsPresented)")
})
.overlay(
Image("gallery")
.renderingMode(.template)
.resizable()
.frame(width: 25, height: 25)
.foregroundColor(Color("white")))
//CODE WITH BUG on ios 14.4 and below. I tried a regular sheet as well that works on another view in ios 14.4 but it doesn't work in the cameraview()
.sheet(isPresented: $sheetIsPresented) {
if (self.currentSheet == .imagePick) {
ImagePickerView(croppingStyle: self.croppingStyle, sourceType: .photoLibrary, onCanceled: {
// on cancel
}) { (image) in
guard let image = image else {
return
}
self.originalImage = image
DispatchQueue.main.async {
self.currentSheet = .imageCrop
self.sheetIsPresented = true
}
}
} else if (self.currentSheet == .imageCrop) {
ZStack {
Color("imagecropcolor").edgesIgnoringSafeArea(.all)
ImageCropView(croppingStyle: self.croppingStyle, originalImage: self.originalImage!, onCanceled: {
// on cancel
}) { (image, cropRect, angle) in
// on success
self.image = image
model.resetPhoto()
newPostData.newPost.toggle()
}
}
}
}
}
}
var flipCameraButton: some View {
Button(action: {
let impactMed = UIImpactFeedbackGenerator(style: .light)
impactMed.impactOccurred()
model.flipCamera()
}, label: {
Circle()
.foregroundColor(Color.gray.opacity(0.2))
.frame(width: 45, height: 45, alignment: .center)
.overlay(
Image(systemName: "camera.rotate.fill")
.foregroundColor(.white))
})
}
var body: some View {
GeometryReader { reader in
ZStack {
Color.black.edgesIgnoringSafeArea(.all)
VStack {
HStack{
Button(action: {
model.switchFlash()
}, label: {
Image(systemName: model.isFlashOn ? "bolt.fill" : "bolt.slash.fill")
.font(.system(size: 20, weight: .medium, design: .default))
})
.accentColor(model.isFlashOn ? .yellow : .white)
.padding(.leading, 30)
Spacer()
if model.photo != nil {
Text("taken photo").onAppear{
newPostData.newPost.toggle()
}
}
// Image(uiImage: model.photo.image!)
// .resizable()
// .aspectRatio(contentMode: .fill)
// .frame(width: 60, height: 60)
// .clipShape(RoundedRectangle(cornerRadius: 10, style: .continuous))
// .animation(.spring())
//
}
CameraPreview(session: model.session)
.gesture(
DragGesture().onChanged({ (val) in
// Only accept vertical drag
if abs(val.translation.height) > abs(val.translation.width) {
// Get the percentage of vertical screen space covered by drag
let percentage: CGFloat = -(val.translation.height / reader.size.height)
// Calculate new zoom factor
let calc = currentZoomFactor + percentage
// Limit zoom factor to a maximum of 5x and a minimum of 1x
let zoomFactor: CGFloat = min(max(calc, 1), 5)
// Store the newly calculated zoom factor
currentZoomFactor = zoomFactor
// Sets the zoom factor to the capture device session
model.zoom(with: zoomFactor)
}
})
)
.onAppear {
model.configure()
}
.alert(isPresented: $model.showAlertError, content: {
Alert(title: Text(model.alertError.title), message: Text(model.alertError.message), dismissButton: .default(Text(model.alertError.primaryButtonTitle), action: {
model.alertError.primaryAction?()
}))
})
.overlay(
Group {
if model.willCapturePhoto {
Color.black
}
}
)
.animation(.easeInOut)
HStack {
capturedPhotoThumbnail
Spacer()
captureButton
.padding(.trailing, 10)
Spacer()
flipCameraButton
}
.padding(.horizontal, 20)
.padding(.bottom, 20)
}
}.fullScreenCover(isPresented: $newPostData.newPost) {
if model.photo == nil {
NewPost(imageData: (self.image?.pngData())! )
} else {
NewPost(imageData: model.photo.originalData)
}
}
}
}
}
Here is where the CameraView() gets called from my Home Screen
import SwiftUI
import Firebase
struct Home: View {
#AppStorage("current_status") var status = false
#AppStorage("showSheet") var showSheet = false
#State var loadedPost = Post(id: 0, PostUID: "", PostName: "", selectedForPost: false, time: Date())
#State var selectedTab = "camera"
var edges = UIApplication.shared.windows.first?.safeAreaInsets
#StateObject var modelData = ModelView()
#StateObject var userData = UserViewModel()
var body: some View {
VStack(spacing: 15){
VStack (spacing: 0) {
GeometryReader{_ in
ZStack{
if selectedTab == "Post"{
Post(loadedPost: $loadedPost, selectedTab: $selectedTab)
}else if selectedTab == "camera"{
CameraView()
}else if selectedTab == "user"{
User(selectedTab: $selectedTab, loadedPost: $loadedPost)
}
}
}.onChange(of: selectedTab) { (_) in
switch(selectedTab){
case "Post": if
!modelData.isPostLoad{modelData.loadPost()}
case "camera": if
!modelData.isCameraLoad{modelData.loadCamera()}
case "user": if
!modelData.isUserLoad{modelData.loadUser()}
default: ()
}
}
//Tabview hide to show friend modal
if !showSheet{
Divider()
HStack(spacing: 0) {
Spacer(minLength: 0)
TabButton(title: "Post", selectedTab: $selectedTab)
Spacer(minLength: 0)
TabButton(title: "camera", selectedTab: $selectedTab)
.padding(.leading, 30)
.padding(.trailing, 30)
Spacer(minLength: 0)
TabButton(title: "user", selectedTab: $selectedTab)
Spacer(minLength: 0)
}
.padding(.horizontal, 30)
.padding(.bottom, edges!.bottom == 0 ? 15 : edges!.bottom)
.background(Color.black)
}
}
.ignoresSafeArea(.all, edges: .bottom)
.background(Color("Black").ignoresSafeArea(.all, edges: .all))
}
}
}
//Tab Button
struct TabButton : View {
var title: String
#Binding var selectedTab: String
var body: some View {
Button(action: {
withAnimation{selectedTab = title}
}) {
VStack(spacing: 5) {
//Top indicator
//Custom shape...
if title == "user" {
Image(title)
.renderingMode(.template)
.resizable()
.foregroundColor(selectedTab == title ? Color.white : Color("Grey"))
.frame(width: 26.5, height: 26.5)
.padding(.top, UIScreen.screenHeight < 500 ? -5 : 15)
}else if title == "camera"{
Image(title)
.renderingMode(.template)
.resizable()
.foregroundColor(selectedTab == title ? Color.white : Color("Grey"))
.frame(width: 40, height: 40)
.padding(.top, UIScreen.screenHeight < 500 ? -5 : 15)
}else{
Image(title)
.renderingMode(.template)
.resizable()
.foregroundColor(selectedTab == title ? Color.white : Color("Grey"))
.frame(width: 32.5, height: 32.5)
.padding(.top, UIScreen.screenHeight < 500 ? -5 : 15)
}
}
}
}
}
//can update with load views here
class ModelView: ObservableObject {
#Published var isPostLoad = false
#Published var isCameraLoad = false
#Published var isUserLoad = false
init() {
//load initial data
isCameraLoad = true
print("Home Data Loaded")
}
func loadPost(){
print("Post Loaded")
isPostLoad = true
}
func loadCamera(){
print("Camera Loaded")
isCameraLoad = true
}
func loadUser(){
print("User loaded")
isUserLoad = true
}
}
I would greatly appreciate any help on how to get the ImagePicker view to show up for iOS 14.1-ios 14.4 I've been scratching my head since I worked on it assuming anything that works on iOS 14.5 and above should work on below but only this specific ImagePicker is not working as intended. Thanks!
I've been trying to animate a ScrollView using ScrollViewReader and withAnimation.
I can't figure out why these two animations are not working, either from Button or .onAppear?
import SwiftUI
struct ScrollView2: View {
#State private var scrollText = false
var body: some View {
ScrollViewReader { scrollView in
ScrollView {
Button("Scroll to bottom") {
withAnimation(.linear(duration: 30)) {
scrollView.scrollTo(99, anchor: .center)
}
}
ForEach(0..<100) { index in
Text(String(index))
.id(index)
}
.onAppear(perform: {
withAnimation(.linear(duration: 30)) {
scrollView.scrollTo(scrollText ? 99 : 1, anchor: .center)
}
scrollText.toggle()
})
}
}
}
}
It seems like duration doesn't work within withAnimation. Alternatively, I created a function that executes a repeating Timer that fires over 30 seconds, calling scrollTo withAnimation on each loop.
struct ScrollView2: View {
#State private var scrollText = false
var body: some View {
ScrollViewReader { scrollView in
ScrollView {
Button("Scroll to bottom") {
animateWithTimer(proxy: scrollView)
}
ForEach(0..<100) { index in
Text(String(index))
.id(index)
}
}
}
}
func animateWithTimer(proxy: ScrollViewProxy) {
let count: Int = 100
let duration: Double = 30.0
let timeInterval: Double = (duration / Double(count))
var counter = 0
let timer = Timer.scheduledTimer(withTimeInterval: timeInterval, repeats: true) { (timer) in
withAnimation(.linear) {
proxy.scrollTo(counter, anchor: .center)
}
counter += 1
if counter >= count {
timer.invalidate()
}
}
timer.fire()
}
}
Note: There is a delay when you press the button initially because when it tries to scrollTo the first ~40 of numbers, they are already high on the screen and the scrollView doesn't need to scroll anywhere to center them. You can update the timeInterval and counter variables as needed.
Because duration doesn't seem to work with withAnimation yet, I had to be a bit hacky to get the animation effect I wanted.
Here's what I did:
I added a ScrollViewReader to my ScrollView
I used ForEach and added IDs to my items in my ScrollView
Used an .offset and .animation modifiers to animate the
ScrollView itself (not the items in it)
Used .scrollTo within .onAppear to move at launch the ScrollView
to an item further away from the start to allow the user to both
scroll back and forward the items, even with the ScrollView being
itself animated from right to left
Here's what my code looks like:
import SwiftUI
import AVKit
struct ProView: View {
#State private var scrollText = false
var body: some View {
ZStack {
// VStack {
// Color(#colorLiteral(red: 0, green: 0, blue: 0, alpha: 1))
// .ignoresSafeArea(.all)
// }
//
// VStack {
//
// VideoPlayer(player: AVPlayer(url: Bundle.main.url(forResource: "wave-1", withExtension: "mp4")!)) {
// VStack {
// Image("pro-text")
// .resizable()
// .frame(width: 200, height: .infinity)
// .scaledToFit()
// }
// }
// .ignoresSafeArea(.all)
// .frame(width: .infinity, height: 300)
ScrollView(.horizontal, showsIndicators: false) {
ScrollViewReader { value in
HStack(spacing: 5) {
ForEach(0 ..< 100) { i in
HStack {
Image("benefit-1")
.resizable()
.frame(width: 120, height: 120)
Image("benefit-2")
.resizable()
.frame(width: 120, height: 120)
Image("benefit-3")
.resizable()
.frame(width: 120, height: 120)
Image("benefit-4")
.resizable()
.frame(width: 120, height: 120)
Image("benefit-5")
.resizable()
.frame(width: 120, height: 120)
Image("benefit-6")
.resizable()
.frame(width: 120, height: 120)
Image("benefit-7")
.resizable()
.frame(width: 120, height: 120)
Image("benefit-8")
.resizable()
.frame(width: 120, height: 120)
Image("benefit-9")
.resizable()
.frame(width: 120, height: 120)
Image("benefit-10")
.resizable()
.frame(width: 120, height: 120)
}
.id(i)
}
}
.offset(x: scrollText ? -10000 : 20)
.animation(Animation.linear(duration: 300).repeatForever(autoreverses: false))
.onAppear() {
value.scrollTo(50, anchor: .trailing)
scrollText.toggle()
}
}
}
Spacer()
}
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ProView()
}
}
I created a tabview with 4 views. Each view has a viewModel where I load the data needed in the view from ana api. It works fine except the first view is always empty unless I tap in a tab then tap on the first view tap again.
Any idea how I could make sure all the first view's data is there as soon as it's loaded?
Find my code below
thanks
-Contentview
#ViewBuilder
var body: some View {
if isLoggedIn() {
MainScreen()
} else {
UnAuthenticatedScreen()
}
}
-MainView
#ObservedObject var eventsVM: EventsVM = EventsVM()
var body: some View {
TabView(){
HomeScreen(events: self.eventsVM.events)
.tabItem {
Image(systemName: "house")
Text("Home")
}
.navigationBarHidden(true)
EventsScreen(events: self.eventsVM.events)
.tabItem {
Image(systemName: "calendar")
Text("Events")
}.navigationBarHidden(true)
}
}
- EventsVM
import Foundation
import Combine
class EventsVM: ObservableObject {
let didChange = PassthroughSubject<[EventModel], Never>()
private let eventsService: EventService
#Published var events = [EventModel]()
init() {
self.eventsService = EventService()
self.fetchEvents()
}
private func fetchEvents(){
self.eventsService.getAllEvents { (_events, _error) in
guard let events = _events else { return }
self.events = events
}
}
}
The home view
import SwiftUI
struct HomeScreen: View {
var eventsVM: EventsVM
#State var news: [NewsModel] = []
#State var albums = [AlbumModel]()
init(eventsVM: EventsVM){
self.eventsVM = EventsVM()
}
var body: some View {
GeometryReader { gr in
VStack(alignment: .leading, spacing: 0) {
HStack{
Spacer()
}
HStack {
Spacer()
Text("About Us")
Image("logo_squad")
.resizable()
.frame(width: 50, height: 50)
}
Text("Events").font(Font.custom("Francois One", size: 30)).foregroundColor(.red)
ScrollView(.horizontal, showsIndicators: false){
HStack {
ForEach(self.eventsVM.events, id: \.self) { event in
HomeEventRow(event: event).frame(width: gr.size.width - 60, height: 170)
.overlay(
RoundedRectangle(cornerRadius: 10)
.stroke(Color.gray, lineWidth: 1).shadow(radius: -2)
)
.clipShape(RoundedRectangle(cornerRadius: 10))
}
}
}
// Text("News").font(Font.custom("Francois One", size: 30)).foregroundColor(.red)
// ScrollView(.horizontal, showsIndicators: false){
// HStack {
// ForEach(self.news, id: \.self){ _news in
// HomeNewsRow(news: _news)
// .frame(width: gr.size.width - 60, height: 150)
// .overlay(
// RoundedRectangle(cornerRadius: 10)
// .stroke(Color.gray, lineWidth: 1).shadow(radius: -2)
// )
// .clipShape(RoundedRectangle(cornerRadius: 10))
// }
// }
// }
// Text("Albums").font(Font.custom("Francois One", size: 30)).foregroundColor(.red)
// ScrollView(Axis.Set.horizontal, showsIndicators: true){
// HStack {
// ForEach(self.albums, id: \.self){ album in
// HomeMediaRow(album: album)
// .frame(width: gr.size.width - 60, height: 150)
// }.overlay(
// RoundedRectangle(cornerRadius: 10)
// .stroke(Color.gray, lineWidth: 1).shadow(radius: -2)
// )
// .clipShape(RoundedRectangle(cornerRadius: 10))
// }
// }
Spacer()
}
.padding(EdgeInsets(top: 0, leading: 0, bottom: 0, trailing: 0))
.onAppear {
print("self.eventsVM.events \(self.eventsVM.events)")
}
}.padding(EdgeInsets(top: 0, leading: 5, bottom: 0, trailing: 0))
}
}
struct HomeScreen_Previews: PreviewProvider {
static var previews: some View {
HomeScreen(eventsVM: EventsVM())
}
}