After setting initialPlaybackTime property, the video(HTTP streaming) still plays from the beginning.
The same code works well in iOS <= 8.3:
self.moviePlayer.initialPlaybackTime = self.lastPlaybackTime;
[self.moviePlayer play];
This works for me, basically you need to setCurrentPlaybackTime when the movie starts playing, But you also need a flag playbackDurationSet which is set to NO when you present movieplayer and it is set to YES when the movie is seeked to the playbackDuration for the first time.
NOTE: this flag is required because when you seek the movie from the seek scrubber the moviePlayerPlaybackStateChanged is fired with playbackState of
MPMoviePlaybackStatePlaying.
BOOL playbackDurationSet = NO;
- (void)moviePlayerPlaybackStateChanged:(NSNotification*)notification
{
MPMoviePlayerController* player = (MPMoviePlayerController*)notification.object;
switch ( player.playbackState ) {
case MPMoviePlaybackStatePlaying:
if(!playbackDurationSet){
[self.moviePlayer setCurrentPlaybackTime:yourStartTime];
playbackDurationSet = YES;
}
break;
}
}
- (void)moviePlayerPresented
{
playbackDurationSet = NO;
}
I've seen some of these issues as well. Since MPMoviePlayerController is deprecated in iOS 9, they're unlikely to be fixed.
hariszaman answer worked for me:
- (void)moviePlaybackStateChanged:(NSNotification*)notif
{
//...
if (self.videoPlayer.playbackState == MPMoviePlaybackStatePlaying) {
if (self.currentBookmark) {
if ([[PSDeviceInfo sharedInstance] is_iOSatLeast84]){
NSTimeInterval toTime = [self.currentBookmark.seconds doubleValue];
[self.videoPlayer setCurrentPlaybackTime:toTime];
self.currentBookmark = nil;
}
}
}
if (self.videoPlayer.playbackState == MPMoviePlaybackStatePaused) {
//...
}
if (self.videoPlayer.playbackState == MPMoviePlaybackStateStopped) {
//...
}
}
also:
- (void)configureMoviePlayer
{
if (self.currentBookmark) {
if ([[PSDeviceInfo sharedInstance] is_iOSatLeast84]){
// will set start after did start
}else {
NSTimeInterval toTime = [self.currentBookmark.seconds doubleValue];
self.videoPlayer.initialPlaybackTime = toTime;
self.currentBookmark = nil;
}
}
else {
self.videoPlayer.initialPlaybackTime = 0;
}
//...
}
the method is_iOSatLeast84:
- (BOOL)is_iOSatLeast84
{
// version 8.4
NSString *version = [[UIDevice currentDevice] systemVersion];
BOOL isAtLeast84 = [version floatValue] >= 8.35;
return isAtLeast84;
}
Although all of this is a workaround, it gets the job done.
Related
I'm following along with this doc from Apple on how to implement stereo capture. After setting the polarity, it's still coming back as omnidirectional instead of stereo. All the outErrors from the setters are nil, and all of the individual setters are yielding YES (success) results. So, everything is as expected except for the final check that the polarity is set to "stereo." That one is failing.
if (#available(iOS 14, *)) {
BOOL wasStereoConfigurationSuccessful = [self _configureStereoCapture:outError];
NSLog(#"wasStereoConfigurationSuccessful=%#", wasStereoConfigurationSuccessful ? #"YES" : #"NO");
}
- (BOOL)_configureStereoCapture:(NSError **)outError {
BOOL wasBuiltInMicConfigurationSuccessful = [self _configureBuiltInMic:outError];
if (!wasBuiltInMicConfigurationSuccessful) {
return NO;
}
BOOL wasMicInputDirectionalityConfigurationSuccessful = [self _configureMicrophoneInputDirectionality:outError];
if (!wasMicInputDirectionalityConfigurationSuccessful) {
return NO;
}
return outError == nil;
}
- (BOOL)_configureBuiltInMic:(NSError **)outError {
// Enabling stereo requires either AVAudioSessionCategoryRecord or AVAudioSessionCategoryPlayAndRecord
BOOL canStereoBeEnabled = [_audioSession.category isEqualToString:AVAudioSessionCategoryRecord] || [_audioSession.category isEqualToString:AVAudioSessionCategoryPlayAndRecord];
if (!canStereoBeEnabled) {
return NO;
}
// Get the set of available inputs. If there are no audio accessories attached, there will be
// only one available input -- the built in microphone.
NSArray<AVAudioSessionPortDescription *> *availableMics = [_audioSession availableInputs];
AVAudioSessionPortDescription *builtInMic = nil;
for (AVAudioSessionPortDescription *mic in availableMics) {
if ([mic.portType isEqualToString:AVAudioSessionPortBuiltInMic]) {
builtInMic = mic;
break;
}
}
if (builtInMic == nil) {
return NO;
}
BOOL wasPreferredInputSetSuccessfully = [_audioSession setPreferredInput:builtInMic error:outError];
if (!wasPreferredInputSetSuccessfully) {
return NO;
}
return wasPreferredInputSetSuccessfully;
}
- (BOOL)_configureMicrophoneInputDirectionality:(NSError **)outError {
AVAudioSessionPortDescription *preferredInput = _audioSession.preferredInput;
NSArray<AVAudioSessionDataSourceDescription *> *dataSources = preferredInput.dataSources;
AVAudioSessionDataSourceDescription *newDataSource = nil;
AVAudioSessionOrientation audioSessionOrientation = _AudioSessionOrientation(); // Proprietary C helper
for (AVAudioSessionDataSourceDescription *dataSource in dataSources) {
if ([dataSource.dataSourceName isEqualToString:audioSessionOrientation]) {
newDataSource = dataSource;
break;
}
}
if (newDataSource == nil) {
*outError = [NSError errorWithDomain:#"class_name" code:0 userInfo:#{#"message" : #"dataSource.dataSourceName does not equal any available AVAudioSessionOrientation."}];
return NO;
}
NSArray<AVAudioSessionPolarPattern> *supportedPolarPatterns = newDataSource.supportedPolarPatterns;
BOOL isStereoSupported = NO;
if (#available(iOS 14, *)) {
isStereoSupported = [supportedPolarPatterns containsObject:AVAudioSessionPolarPatternStereo];
}
if (!isStereoSupported) {
return NO;
}
BOOL wasPreferredPolarPatternSuccessfullySet = NO;
if (#available(iOS 14, *)) {
wasPreferredPolarPatternSuccessfullySet = [newDataSource setPreferredPolarPattern:AVAudioSessionPolarPatternStereo error:outError];
}
if (!wasPreferredPolarPatternSuccessfullySet) {
return NO;
}
BOOL wasPreferreredDataSourceSuccessfullySet = [preferredInput setPreferredDataSource:newDataSource error:outError];
if (!wasPreferreredDataSourceSuccessfullySet) {
return NO;
}
BOOL wasPreferredInputOrientationSuccessfullySet = NO;
if (#available(iOS 14, *)) {
wasPreferredInputOrientationSuccessfullySet = [_audioSession setPreferredInputOrientation:_AudioStereoOrientationFromAudioSessionOrientation(audioSessionOrientation) error:outError];
}
if (!wasPreferredInputOrientationSuccessfullySet) {
return NO;
}
NSLog(#"error is nil=%#", outError == nil ? #"YES" : #"NO");
NSLog(#"preferredInput=%#", _audioSession.preferredInput);
NSLog(#"preferredDataSource=%#", _audioSession.preferredInput.preferredDataSource);
NSLog(#"selectedPolarPattern=%#", _audioSession.preferredInput.preferredDataSource.selectedPolarPattern);
return outError == nil;
}
This is the printout:
error is nil=YES
preferredInput=<AVAudioSessionPortDescription: 0x282e1d470, type = MicrophoneBuiltIn; name = iPhone Microphone; UID = Built-In Microphone; selectedDataSource = Bottom>
preferredDataSource=<AVAudioSessionDataSourceDescription: 0x282e1cd60, ID = 1835216946; name = Front>
// *** PROBLEM ***
selectedPolarPattern=Omnidirectional // This should be 'Stereo'
wasStereoConfigurationSuccessful=YES
I've been expanding my testing on my video rendering code and noticed something unusual during testing with AVPlayerItemVideoOutput. In trying to test my rendering code I check for new pixel buffers using hasNewPixelBufferForItemTime.
While testing, this method never returns yes. But the rendering code works in my app just fine using the same setup, rendering frames to openGL textures.
I setup a github project with the very basics showing the error. In the app you can load the video by tapping the button (Not loaded immediately to avoid any conflict with the test). This proves at least the video loads and plays.
The project also has test, that attempts to setup the AVPlayerItemVideoOutput and check for new pixel buffers. This test always fails, but I can't see what I'm doing wrong, or why the exact same steps work in my own app.
The GitHub project is here.
And this is the test method to peruse:
#import <XCTest/XCTest.h>
#import <AVFoundation/AVFoundation.h>
#interface AVPlayerTestTests : XCTestCase
#end
#implementation AVPlayerTestTests
- (void)setUp {
[super setUp];
// Put setup code here. This method is called before the invocation of each test method in the class.
}
- (void)tearDown {
// Put teardown code here. This method is called after the invocation of each test method in the class.
[super tearDown];
}
- (void) testAVPlayer
{
NSURL *fileURL = [[NSBundle bundleForClass:self.class] URLForResource:#"SampleVideo_1280x720_10mb" withExtension:#"mp4"];
AVPlayerItem *playerItem = [AVPlayerItem playerItemWithURL:fileURL];
[self keyValueObservingExpectationForObject:playerItem
keyPath:#"status" handler:^BOOL(id _Nonnull observedObject, NSDictionary * _Nonnull change) {
AVPlayerItem *oPlayerItem = (AVPlayerItem *)observedObject;
switch (oPlayerItem.status) {
case AVPlayerItemStatusFailed:
{
XCTFail(#"Video failed");
return YES;
}
break;
case AVPlayerItemStatusUnknown:
return NO;
break;
case AVPlayerItemStatusReadyToPlay:
{
return YES;
}
default:
break;
}
}];
AVPlayer *player = [AVPlayer playerWithPlayerItem:playerItem];
NSDictionary *pbOptions = #{
(NSString *)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_32BGRA],
(NSString *)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary],
(NSString *)kCVPixelBufferOpenGLESCompatibilityKey : #YES
};
AVPlayerItemVideoOutput *output = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pbOptions];
XCTAssertNotNil(output);
[self waitForExpectationsWithTimeout:100 handler:nil];
if (playerItem.status == AVPlayerItemStatusReadyToPlay) {
[playerItem addOutput:output];
player.rate = 1.0;
player.muted = YES;
[player play];
CMTime vTime = [output itemTimeForHostTime:CACurrentMediaTime()];
// This is what we're testing
BOOL foundFrame = [output hasNewPixelBufferForItemTime:vTime];
XCTAssertTrue(foundFrame);
if (!foundFrame) {
// Cycle over for ten seconds
for (int i = 0; i < 10; i++) {
sleep(1);
vTime = [output itemTimeForHostTime:CACurrentMediaTime()];
foundFrame = [output hasNewPixelBufferForItemTime:vTime];
if (foundFrame) {
NSLog(#"Got frame at %i", i);
break;
}
if (i == 9) {
XCTFail(#"Failed to acquire");
}
}
}
}
}
#end
EDIT 1 : This seems to be a bug and I've filed a Radar
I've googled this question but did not find any solution.
So, my problem is,
I'm applying video filter using GPUImage to a video file. At same time, sound of that video is not playing. I know that sound playing is not supported in GPUImage while applying filters.
So, how can I achieve that?
You can achieve sound playing by adding support for AVPlayer like this.
In GPUImageMovie.h,
#property(readwrite, nonatomic) BOOL playSound;
In GPUImageMovie.m, update #interface GPUImageMovie () like this.
#interface GPUImageMovie() <AVPlayerItemOutputPullDelegate>
{
// Add this
BOOL hasAudioTrack;
.........
........
// Add all below three lines
AVPlayer *theAudioPlayer;
CFAbsoluteTime startActualFrameTime;
CGFloat currentVideoTime;
}
After that, in -(void)startProcessing method, add following lines.
- (void)startProcessing
{
// Add this
currentVideoTime = 0.0f;
.....
.....
// Add this
if (self.playSound)
{
[self setupSound];
}
GPUImageMovie __block *blockSelf = self;
[inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:#"tracks"] completionHandler: ^{
runSynchronouslyOnVideoProcessingQueue(^{
.........
.........
// Add this
startActualFrameTime = CFAbsoluteTimeGetCurrent() - currentVideoTime;
.......
.......
});
}];
}
Again, In -(AVAssetReader*)createAssetReader update it,
- (AVAssetReader*)createAssetReader
{
.......
.......
BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil));
// Add this
hasAudioTrack = ([audioTracks count] > 0);
.........
}
In - (void)processAsset,
- (void)processAsset
{
.......
.......
if ([reader startReading] == NO)
{
NSLog(#"Error reading from file at URL: %#", self.url);
return;
}
// Add this
if (self.playSound && hasAudioTrack)
{
[theAudioPlayer seekToTime:kCMTimeZero];
[theAudioPlayer play];
}
.........
.........
}
In -(void)endProcessing,
- (void)endProcessing
{
.........
.........
if (self.playerItem && (displayLink != nil))
{
[displayLink invalidate]; // remove from all run loops
displayLink = nil;
}
// Add this
if (theAudioPlayer != nil)
{
[theAudioPlayer pause];
[theAudioPlayer seekToTime:kCMTimeZero];
}
.........
.........
}
Add new method -(void)setupSound at last,
// Add this
- (void)setupSound
{
if (theAudioPlayer != nil)
{
[theAudioPlayer pause];
[theAudioPlayer seekToTime:kCMTimeZero];
theAudioPlayer = nil;
}
theAudioPlayer = [[AVPlayer alloc] initWithURL:self.url];
}
At last, when you create your GPUImageMovie object, don't forget to set playSound flag to YES.
Like this : movieFile.playSound = YES;
Hope this helps.
how to pause the audio during filter applied by GPUImage?
Like we can pause the video writing by movieWriter.paused = YES; when I pause the movie writing, I need to Pause audio also.
I have iPhone application which overlays the camera with custom view.
I have a button to switch between camera flash mode, this is the code
switch ([self.imagePickerController cameraFlashMode]) {
case UIImagePickerControllerCameraFlashModeAuto:
[self.imagePickerController setCameraFlashMode:UIImagePickerControllerCameraFlashModeOn];
return #"On";
break;
case UIImagePickerControllerCameraFlashModeOn:
[self.imagePickerController setCameraFlashMode:UIImagePickerControllerCameraFlashModeOff];
return #"Off";
break;
case UIImagePickerControllerCameraFlashModeOff:
[self.imagePickerController setCameraFlashMode:UIImagePickerControllerCameraFlashModeAuto];
return #"Auto";
break;
default:
break;
}
This is my problem: Worked perfectly fine on iOS 7.0x, but in iOS 7.1 the cameraFlashMode property returns UIImagePickerControllerCameraFlashModeAuto regardless of its real state.
The flash mode does change, but i get no indication of that.
Any clues?
Thanks
I solved it like this:
#property (nonatomic) NSInteger flashMode;
if (_flashMode == UIImagePickerControllerCameraFlashModeAuto)
{
_flashMode = UIImagePickerControllerCameraFlashModeOff;
}
else if (_flashMode == UIImagePickerControllerCameraFlashModeOff)
{
_flashMode = UIImagePickerControllerCameraFlashModeOn;
}
else if (_flashMode == UIImagePickerControllerCameraFlashModeOn)
{
_flashMode = UIImagePickerControllerCameraFlashModeAuto;
}
_cameraPicker.cameraFlashMode = (UIImagePickerControllerCameraFlashMode)_flashMode;
Okay, so I researched this in great detail, and stumbled upon this helpful article online:
http://www.c2itconsulting.com/2013/10/ios-flash-setting-on-camera-picker-only-available-after-view-is-displayed/
I took their advice, and now I set the flash setting just before the user takes the picture. Instead of checking to see what the camera's current flash setting is, all I do is check my flash button's titleLabel text to see what the user wants as their flash setting:
Here is the code I came up with, which solves the problem perfectly for me now. I hope this helps out all of you with this same problem that didn't exist on iOS 7.0, but now does on iOS 7.1.
#define deviceHasCameraFlash [UIImagePickerController isFlashAvailableForCameraDevice:UIImagePickerControllerCameraDeviceRear]
- (void)capturePhoto
{
if (self.cameraDevice != UIImagePickerControllerCameraDeviceFront && deviceHasCameraFlash)
{
if ([self.flashButton.titleLabel.text isEqualToString:#"Auto"])
{
self.cameraFlashMode = UIImagePickerControllerCameraFlashModeAuto;
}
else if ([self.flashButton.titleLabel.text isEqualToString:#"Off"])
{
self.cameraFlashMode = UIImagePickerControllerCameraFlashModeOff;
}
else if ([self.flashButton.titleLabel.text isEqualToString:#"On"])
{
self.cameraFlashMode = UIImagePickerControllerCameraFlashModeOn;
}
}
[self takePicture];
}
Try to use AVCaptureDevice:
Class captureDeviceClass = NSClassFromString(#"AVCaptureDevice");
if (captureDeviceClass != nil) {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[device lockForConfiguration:nil];
if ([device hasTorch]) {
if (device.torchMode == AVCaptureTorchModeAuto) {
NSLog(#"Auto");
}
if (device.torchMode == AVCaptureTorchModeOn) {
NSLog(#"On");
}
if (device.torchMode == AVCaptureTorchModeOff) {
NSLog(#"Of");
}
}
if ([device hasFlash]) {
if (device.flashMode == AVCaptureFlashModeAuto) {
NSLog(#"Auto");
}
if (device.flashMode == AVCaptureFlashModeOn) {
NSLog(#"On");
}
if (device.flashMode == AVCaptureFlashModeOff) {
NSLog(#"Of");
}
}
[device unlockForConfiguration];
}
The answer above did't worked for me in iOS 7.1 #daidai this is what i did and this worked for me
In your .h-file the property flashMode
- (void)didTapFlash:(id)sender
{
if (self.flashMode == UIImagePickerControllerCameraFlashModeAuto) {
//toggle your button to "on"
[self.imagePicker setCameraFlashMode:UIImagePickerControllerCameraFlashModeOn];
[self.flashButton setImage:[UIImage imageNamed:#"flashOn"] forState:UIControlStateNormal];
self.flashMode = UIImagePickerControllerCameraFlashModeOn;
NSLog(#"On state: %d", self.flashMode);
}else if (self.flashMode == UIImagePickerControllerCameraFlashModeOn){
//toggle your button to "Off"
[self.imagePicker setCameraFlashMode:UIImagePickerControllerCameraFlashModeOff];
[self.flashButton setImage:[UIImage imageNamed:#"flashOf"] forState:UIControlStateNormal];
self.flashMode = UIImagePickerControllerCameraFlashModeOff;
NSLog(#"Of state: %d", self.flashMode);
}else if (self.flashMode == UIImagePickerControllerCameraFlashModeOff){
//toggle your button to "Auto"
[self.imagePicker setCameraFlashMode:UIImagePickerControllerCameraFlashModeAuto];
[self.flashButton setImage:[UIImage imageNamed:#"flashAuto"] forState:UIControlStateNormal];
self.flashMode = UIImagePickerControllerCameraFlashModeAuto;
NSLog(#"Auto state: %d", self.flashMode);
}
}
I'm working on a voice memo app that records sound from user and plays them back.
Recording part works good. The recorded file was stored successfully & when a user taps on a saved file it just starts to play the file.
I'm using UISlider to indicate the play progress. Now, I disabled UISlider's userInteractionEnabled property to NO. So, I don't allow the user to interact with UISlider bar.
What do I need is, I want to enable this property to YES, so that one can choose the desired place to play the audio. For this, I added the following line in my slider code.
[mySlider addTarget:self action:#selector(sliderChanged:) forControlEvents:UIControlEventValueChanged];
By adding the above line, I can find the slider position within sliderChanged: method. But, I don't know how to initiate the player from the selected position. Each file has various sizes so their playing time also differs.
Is there any sample code available? Your help is needed.
Thanks
Sharing a solution in Swift.
I was able to get my project working with the slider based on the objective-c implementation zeeran posted, and it even updates the current time continuously now which mine wasnt doing before. I'm so pleased.
So I want to share the swift implementation in case it helps anyone out there.
import UIKit
import AVFoundation
import Foundation
class ActivityViewController: UIViewController, AVAudioPlayerDelegate {
/// The player & sound file object
var audioPlayer = AVAudioPlayer()
var activitySound = NSURL()
var updater : CADisplayLink! = nil // tracks the time into the track
var updater_running : Bool = false // did the updater start?
var playing : Bool = false //indicates if track was started playing
#IBOutlet var playButton: UIButton!
#IBOutlet var timeLabel: UILabel! // shows time and duration
#IBOutlet var audioSlider: UISlider! // slider object
#IBAction func pressedPlayButton(sender: AnyObject) {
if (playing == false) {
updater = CADisplayLink(target: self, selector: Selector("updateProgress"))
updater.frameInterval = 1
updater.addToRunLoop(NSRunLoop.currentRunLoop(), forMode: NSDefaultRunLoopMode)
updater_running = true
audioPlayer.play()
playButton.selected = true // pause image is assigned to "selected"
playing = true
updateProgress()
} else {
updateProgress() // update track time
audioPlayer.pause() // then pause
playButton.selected = false // show play image (unselected button)
playing = false // note track has stopped playing
}
}
// action for when the slider is moved: set as 'valuechanged' for UISlider object
#IBAction func sliderMoved(sender: UISlider) {
// if the track was playing store true, so we can restart playing after changing the track position
var wasPlaying : Bool = false
if playing == true {
audioPlayer.pause()
wasPlaying = true
}
audioPlayer.currentTime = NSTimeInterval(round(audioSlider.value))
updateProgress()
// starts playing track again it it had been playing
if (wasPlaying == true) {
audioPlayer.play()
wasPlaying == false
}
}
// Timer delegate method that updates current time display in minutes
func updateProgress() {
let total = Float(audioPlayer.duration/60)
let current_time = Float(audioPlayer.currentTime/60)
audioSlider.minimumValue = 0.0
audioSlider.maximumValue = Float(audioPlayer.duration)
audioSlider.setValue(Float(audioPlayer.currentTime), animated: true)
timeLabel.text = NSString(format: "%.2f/%.2f", current_time, total) as String
}
//- AVAudioPlayer delegate method - resets things when track finishe playing
func PlayerDidFinishPlaying(player: AVAudioPlayer!, successfully flag: Bool) {
if flag {
playButton.selected = false
playing = false
audioPlayer.currentTime = 0.0
updateProgress()
updater.invalidate()
}
}
override func viewDidLoad() {
super.viewDidLoad()
// configure AVAudioPlayer as audioPlayer object
let file = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("file1", ofType: "mp3")!)!
audioPlayer = audioPlayer(contentsOfURL: file, error: nil)
audioPlayer.delegate = self
audioSlider.continuous = false
}
// method makes sure updater gets stopped when leaving view controller, because otherwise it will run indefinitely in background
override func viewWillDisappear(animated: Bool) {
if playing == true {
audioPlayer.stop()
}
updater.invalidate()
updater_running = false
super.viewWillDisappear(animated)
}
}
before you load sound file refresh slider min and max values
yourSlider.minValue = 0.0;
yourSlider.maxValue = player.duration;
in method sliderChanged:
player.currentTime = yourSlider.value;
The following code sample should get you started. WIth this , the slider can function as a progress indicator and user can scrub it to start playback at specified position.
/* SliderMoved is invoked when slider value is changed- either when user scrubs it or when it is auto updated as audio playback progresses */
-(void)sliderMoved:(id)sender
{
[self.playbackTimer invalidate];
self.playbackTimer = nil;
NSInteger position = (NSInteger)self.slider.value * 60; // Assume the slider scale is in minutes- so convert to sec
if (self.isPlaying)
{
[self.audioPlayer pause];
}
[self.audioPlayer setCurrentTime:position];
if (self.isPlaying)
{
[self startOrResumePlaying];
}
[self updateSlider];
}
/* updateSlider is a helper function for updating slider value */
-(void)updateSlider
{
float total= self.audioPlayer.duration;
float f = (self.audioPlayer.currentTime) ;
self.slider.value = f/60.0;
self.progressLabel.text = [NSString stringWithFormat:#"%.2f",self.slider.value];
}
/* startOrResumePlaying starts audio playback and initializes a timer that auto increments the progress indicator every 3 sec */
-(void)startOrResumePlaying
{
[self.audioPlayer prepareToPlay];
[self.audioPlayer play];
self.isPlaying = YES;
self.playbackTimer=[NSTimer scheduledTimerWithTimeInterval:3.0
target:self
selector:#selector(handleTimer:)
userInfo:nil
repeats:YES];
[[NSRunLoop currentRunLoop] addTimer:self.playbackTimer forMode:NSDefaultRunLoopMode];
}
Mine is working by combining the above answers of rajagp and Igor Bidiniuc so I'm gonna vote them up both.
#import <AVFoundation/AVFoundation.h>
#property (nonatomic, strong) IBOutlet UIButton *playButton;
#property (nonatomic, strong) IBOutlet UISlider *slider;
#property (nonatomic, strong) IBOutlet UILabel *timeLabel;
#property (nonatomic, strong) AVAudioPlayer *audioPlayer;
#property (nonatomic, strong) NSTimer *timer;
- (IBAction)playPause:(id)sender;
- (IBAction)sliderValueChanged:(id)sender;
Here goes the implementation file.
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
self.slider.continuous = YES;
NSError *error = nil;
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:&error];
if (error == nil) {
NSLog(#"audio session initialized successfully");
[self PlayAudio:#"fileName"];
} else {
NSLog(#"error initializing audio session: %#", [error description]);
}
}
- (void)PlayAudio:(NSString*)file {
NSError *error = nil;
NSString *soundPath =[[NSBundle mainBundle] pathForResource:file ofType:#"mp3"];
NSURL *fileURL = [NSURL fileURLWithPath:soundPath];
if ([self.audioPlayer isPlaying]) {
[self.audioPlayer pause];
[self.timer invalidate];
}
self.audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:fileURL error:&error];
self.audioPlayer.delegate = self;
if (error == nil) {
NSLog(#"audio player initialized successfully");
[self playPause:nil];
} else {
NSLog(#"error initializing audio player: %#", [error description]);
}
}
- (void)viewWillDisappear:(BOOL)animated {
if ([self.audioPlayer isPlaying]) {
[self.audioPlayer pause];
[self.timer invalidate];
}
[super viewWillDisappear:animated];
}
-(IBAction)playPause:(id)sender
{
if ([self.audioPlayer isPlaying]) {
[self.audioPlayer pause];
self.playButton.imageView.image = [UIImage imageNamed:#"play"];
[self.timer invalidate];
} else {
[self.audioPlayer play];
self.playButton.imageView.image = [UIImage imageNamed:#"pause"];
self.timer = [NSTimer scheduledTimerWithTimeInterval:1.0f target:self selector:#selector(updateProgress) userInfo:nil repeats:YES];
}
}
- (IBAction)sliderValueChanged:(id)sender {
[self.timer invalidate];
self.timer = nil;
float position = self.slider.value;
if (self.audioPlayer.isPlaying)
{
[self playPause:nil];
}
[self.audioPlayer setCurrentTime:position];
if (self.audioPlayer.isPlaying)
{
[self playPause:nil];
}
[self updateProgress];
}
#pragma mark - Timer delegate
-(void)updateProgress
{
NSInteger durationMinutes = [self.audioPlayer duration] / 60;
NSInteger durationSeconds = [self.audioPlayer duration] - durationMinutes * 60;
NSInteger currentTimeMinutes = [self.audioPlayer currentTime] / 60;
NSInteger currentTimeSeconds = [self.audioPlayer currentTime] - currentTimeMinutes * 60;
NSString *progressString = [NSString stringWithFormat:#"%d:%02d / %d:%02d", currentTimeMinutes, currentTimeSeconds, durationMinutes, durationSeconds];
self.timeLabel.text = progressString;
self.slider.minimumValue = 0.0;
self.slider.maximumValue = [self.audioPlayer duration];
[self.slider setValue:[self.audioPlayer currentTime] animated:YES];
}
#pragma mark - AVAudioPlayer delegate methods
-(void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag
{
if (flag) {
self.playButton.imageView.image = [UIImage imageNamed:#"play"];
[self.timer invalidate];
}
}
AVAudioPlayer has a currentTime property you can set. Please refer to AVAudioPlayer documentation.