I'm writing nativescript plugin for image picker. I'm finished with part of android. Now, I'm writing code for ios. It's showing image picker dialog, but assigned delegates are not getting triggered. Please check my code below.
import * as application from "tns-core-modules/application";
import * as frame from "tns-core-modules/ui/frame"
export class Nativemediapicker extends NSObject implements UIImagePickerControllerDelegate {
public static ObjCProtocols = [UIImagePickerControllerDelegate];
get() {
let version = NSBundle.mainBundle.objectForInfoDictionaryKey("CFBundleShortVersionString");
return version;
}
static new(): Nativemediapicker {
return <Nativemediapicker>super.new();
}
private _callback: (result?) => void;
private _errorCallback: (result?) => void;
public initWithCallbackAndOptions(callback: (result?) => void, errorCallback: (result?) => void, options?): Nativemediapicker {
this._callback = callback;
this._errorCallback = errorCallback;
if (options) {
// collect options
}
console.log('initWithCallbackAndOptions')
return this;
}
static registerFileProvider(provider) { }
static pickFiles(mimeType, onResult, onError) {
onError("ERROR: For ios this feature is comming soon.");
}
static takePicture(onResult, onError) {
// if (!UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.Camera)) {
// onError("ERROR: For ios simulator this feature is not supported.");
// return
// }
let imagePicker = UIImagePickerController.new()
imagePicker.delegate = Nativemediapicker.new().initWithCallbackAndOptions(onResult, onError, null)
imagePicker.sourceType = UIImagePickerControllerSourceType.PhotoLibrary
imagePicker.allowsEditing = false
// imagePicker.showsCameraControls = true
let topMostFrame = frame.topmost();
if (topMostFrame) {
let viewController: UIViewController = topMostFrame.currentPage && topMostFrame.currentPage.ios;
if (viewController) {
while (viewController.parentViewController) {
// find top-most view controler
viewController = viewController.parentViewController;
}
while (viewController.presentedViewController) {
// find last presented modal
viewController = viewController.presentedViewController;
}
viewController.presentViewControllerAnimatedCompletion(imagePicker, true, null);
}
}
}
static recordVideo(onResult, onError) {
onError("ERROR: For ios this feature is comming soon.");
}
static recordAudio(onResult, onError) {
onError("ERROR: For ios this feature is comming soon.");
}
imagePickerControllerDidCancel(picker): void {
console.log("imagePickerControllerDidCancel")
this._errorCallback("ERROR: Image capturing cancelled.");
}
imagePickerControllerDidFinishPickingMediaWithInfo(picker, info): void {
console.log("imagePickerControllerDidCancel")
this._errorCallback("ERROR: Image capturing done.");
}
}
I'm not getting, what I'm doing wrong and where?
Please help me, guys...
I suspect the reason is that your delegate is being cleaned up by garbage collector. One important rule with iOS is, you must always keep a reference of native object in a JS variable to keep it alive.
Try,
private _delegate;
....
this._delegate = Nativemediapicker.new().initWithCallbackAndOptions(onResult, onError, null);
imagePicker.delegate = this._delegate;
After adding this line in takePicture function it worked.
imagePicker.modalPresentationStyle = UIModalPresentationStyle.CurrentContext;
Related
I have a KMM app, and there is code:
fun getWeather(callback: (WeatherInfo) -> Unit) {
println("Start loading")
GlobalScope.launch(ApplicationDispatcher) {
while (true) {
val response = httpClient.get<String>(API_URL) {
url.parameters.apply {
set("q", "Moscow")
set("units", "metric")
set("appid", weatherApiKey())
}
println(url.build())
}
val result = Json {
ignoreUnknownKeys = true
}.decodeFromString<WeatherApiResponse>(response).main
callback(result)
// because ApplicationDispatcher on IOS do not support delay
withContext(Dispatchers.Default) { delay(DELAY_TIME) }
}
}
}
And if I replace withContext(Dispatchers.Default) { delay(DELAY_TIME) } with delay(DELAY_TIME) execution is never returned to while cycle and it will have only one iteration.
And ApplicationDispatcher for IOS looks like:
internal actual val ApplicationDispatcher: CoroutineDispatcher = NsQueueDispatcher(dispatch_get_main_queue())
internal class NsQueueDispatcher(
private val dispatchQueue: dispatch_queue_t
) : CoroutineDispatcher() {
override fun dispatch(context: CoroutineContext, block: Runnable) {
dispatch_async(dispatchQueue) {
block.run()
}
}
}
And from delay source code I can guess, that DefaultDelay should be returned and there is should be similar behaviour with/without withContext(Dispatchers.Default)
/** Returns [Delay] implementation of the given context */
internal val CoroutineContext.delay: Delay get() = get(ContinuationInterceptor) as? Delay ?: DefaultDelay
Thanks!
P.S. I got ApplicationDispatcher from ktor-samples.
Probably ApplicationDispatcher is some old stuff, you don't need to use it anymore:
CoroutineScope(Dispatchers.Default).launch {
}
or
MainScope().launch {
}
And don't forget to use -native-mt version of coroutines, more info in this issue
I am trying to implement Stripe in Xamarin forms with Apply Pay
I am following the following tutorial but I am stuck at one problem I can't find the STPApplePayContext class in Xamarin
The Library I am using
IOS Bindings or Xamarin.Stripe.iOS
Both provide iOS binidngs for Stripe.
Now as I am following the tutorial I can't find the STPApplePayContext in any of those Libs or something similar.
My code with iOS Bindings
public Task<bool> InitializeAsync(string clientToken)
{
// STPPaymentConfiguration
StripeSdk.STPPaymentConfiguration.SharedConfiguration().PublishableKey =
"pk_test_123456";
_canPay = Stripe_ApplePay.DeviceSupportsApplePay;
return Task.FromResult(true);
}
public bool CreatePaymentRequest(double amount)
{
_paymentRequest = _stripe.PaymentRequestWithMerchantIdentifier(_merchantID,"US","USD");
_paymentRequest.SupportedNetworks = _paymentNetworks;
_paymentRequest.MerchantCapabilities = PKMerchantCapability.ThreeDS;
_paymentRequest.PaymentSummaryItems = new[]
{
new PKPaymentSummaryItem()
{
Label = "Item",
Amount = new NSDecimalNumber(amount)
}
};
return true;
}
public bool Pay()
{
// var applePayContext = STPApplePayContext;
// if (applePayContext)
// {
// }
// if let applePayContext = STPApplePayContext(paymentRequest: paymentRequest, delegate: self) {
// // Present Apple Pay payment sheet
// applePayContext.presentApplePay(on: self)
// } else {
// // There is a problem with your Apple Pay configuration
// }
}
Can anybody point me in the right direction?
I am getting an error where my TwilioVideo module, which expects a Capturer (camera or microphone), is not receiving that input. This error started happening after we switched to Cocoapods for the installation of the SDK and the PureLayout UI library. Previously we had manually installed all these dependencies into XCode.
I am developing a React Native iOS 0.40.0 version, with react-native-cli version 1.0.0. I am using XCode Version 8.2.1 (8C1002), with the iPhone 6 simulator running on iOS 10.2. I am using Cocoapods version 1.2.0. I am using TwilioVideo SDK version 1.0.0-beta5. There is also a 1.0.0-beta6 version, which I have tried as well (with the same result). Reverting to version 1.0.0-beta4 does remove the error, which suggests to me a problem with the way I have implemented registering the audio and video tracks.
Here is my Podfile:
source 'https://github.com/CocoaPods/Specs'
source 'https://github.com/twilio/cocoapod-specs'
target 'MyApp' do
# Uncomment the next line if you're using Swift or would like to use dynamic frameworks
# use_frameworks!
# Pods for MyApp
pod 'TwilioVideo', '1.0.0-beta5'
pod 'PureLayout', '~> 3.0'
target 'MapleNativeProviderTests' do
inherit! :search_paths
# Pods for testing
end
end
I have implemented a TwilioVideo module in XCode based on this repository: react-native-twilio-video-webrtc. He recently updated the repository to work for React Native 0.40.0, which changed the import syntax for XCode. I have tried both with the old import syntax and the new import syntax, and I continue to get the following error when I try to mount my video component:
Here is the documentation for the TwilioVideo SDK. This is the TVIVideoCapturer.
I made a modification to the react-native-twilio-video-webrtc, which is essentially just a thin wrapper for the TwilioVideo SDK using RCT_EXPORT_METHOD to expose key API methods. The library initializes the audio and video tracks in the init method, which causes some annoying behaviours to do with event listeners not receiving callbacks when the application starts. So I moved these tracks to a custom, publicly exposed RCT_EXPORT_METHOD called initialize. This I call from a specific view in the application, which mounts the video and initializes the camera/microphone inputs.
My implementation of TWVideoModule.m is:
#import "TWVideoModule.h"
static NSString* roomDidConnect = #"roomDidConnect";
static NSString* roomDidDisconnect = #"roomDidDisconnect";
static NSString* roomDidFailToConnect = #"roomDidFailToConnect";
static NSString* roomParticipantDidConnect = #"roomParticipantDidConnect";
static NSString* roomParticipantDidDisconnect = #"roomParticipantDidDisconnect";
static NSString* participantAddedVideoTrack = #"participantAddedVideoTrack";
static NSString* participantRemovedVideoTrack = #"participantRemovedVideoTrack";
static NSString* participantAddedAudioTrack = #"participantAddedAudioTrack";
static NSString* participantRemovedAudioTrack = #"participantRemovedAudioTrack";
static NSString* participantEnabledTrack = #"participantEnabledTrack";
static NSString* participantDisabledTrack = #"participantDisabledTrack";
static NSString* cameraDidStart = #"cameraDidStart";
static NSString* cameraWasInterrupted = #"cameraWasInterrupted";
static NSString* cameraDidStopRunning = #"cameraDidStopRunning";
#interface TWVideoModule () <TVIParticipantDelegate, TVIRoomDelegate, TVIVideoTrackDelegate, TVICameraCapturerDelegate>
#end
#implementation TWVideoModule
#synthesize bridge = _bridge;
RCT_EXPORT_MODULE();
- (dispatch_queue_t)methodQueue
{
return dispatch_get_main_queue();
}
- (NSArray<NSString *> *)supportedEvents
{
return #[roomDidConnect,
roomDidDisconnect,
roomDidFailToConnect,
roomParticipantDidConnect,
roomParticipantDidDisconnect,
participantAddedVideoTrack,
participantRemovedVideoTrack,
participantAddedAudioTrack,
participantRemovedAudioTrack,
participantEnabledTrack,
participantDisabledTrack,
cameraDidStopRunning,
cameraDidStart,
cameraWasInterrupted];
}
- (instancetype)init
{
self = [super init];
if (self) {
UIView* remoteMediaView = [[UIView alloc] init];
//remoteMediaView.backgroundColor = [UIColor blueColor];
//remoteMediaView.translatesAutoresizingMaskIntoConstraints = NO;
self.remoteMediaView = remoteMediaView;
UIView* previewView = [[UIView alloc] init];
//previewView.backgroundColor = [UIColor yellowColor];
//previewView.translatesAutoresizingMaskIntoConstraints = NO;
self.previewView = previewView;
}
return self;
}
- (void)dealloc
{
[self.remoteMediaView removeFromSuperview];
self.remoteMediaView = nil;
[self.previewView removeFromSuperview];
self.previewView = nil;
self.participant = nil;
self.localMedia = nil;
self.camera = nil;
self.localVideoTrack = nil;
self.videoClient = nil;
self.room = nil;
}
RCT_EXPORT_METHOD(initialize) {
self.localMedia = [[TVILocalMedia alloc] init];
self.camera = [[TVICameraCapturer alloc] init];
NSLog(#"Camera %#", self.camera);
self.camera.delegate = self;
self.localVideoTrack = [self.localMedia addVideoTrack:YES
capturer:self.camera
constraints:[self videoConstraints]
error:nil];
self.localAudioTrack = [self.localMedia addAudioTrack:YES];
if (!self.localVideoTrack) {
NSLog(#"Failed to add video track");
} else {
// Attach view to video track for local preview
[self.localVideoTrack attach:self.previewView];
}
}
The rest of this file pertains to adding and removing tracks and joining/disconnecting from the Twilio channel, so I have not included it. I also have TWVideoPreviewManager and TWRemotePreviewManager, which simply provide UIViews for the media objects for local and remote video streams.
My TwilioVideoComponent.js component is:
import React, { Component, PropTypes } from 'react'
import {
NativeModules,
NativeEventEmitter
} from 'react-native';
import {
View,
} from 'native-base';
const {TWVideoModule} = NativeModules;
class TwilioVideoComponent extends Component {
state = {};
static propTypes = {
onRoomDidConnect: PropTypes.func,
onRoomDidDisconnect: PropTypes.func,
onRoomDidFailToConnect: PropTypes.func,
onRoomParticipantDidConnect: PropTypes.func,
onRoomParticipantDidDisconnect: PropTypes.func,
onParticipantAddedVideoTrack: PropTypes.func,
onParticipantRemovedVideoTrack: PropTypes.func,
onParticipantAddedAudioTrack: PropTypes.func,
onParticipantRemovedAudioTrack: PropTypes.func,
onParticipantEnabledTrack: PropTypes.func,
onParticipantDisabledTrack: PropTypes.func,
onCameraDidStart: PropTypes.func,
onCameraWasInterrupted: PropTypes.func,
onCameraDidStopRunning: PropTypes.func,
...View.propTypes,
};
_subscriptions = [];
constructor(props) {
super(props);
this.flipCamera = this.flipCamera.bind(this);
this.startCall = this.startCall.bind(this);
this.endCall = this.endCall.bind(this);
this._eventEmitter = new NativeEventEmitter(TWVideoModule)
}
//
// Methods
/**
* Initializes camera and microphone tracks
*/
initializeVideo() {
TWVideoModule.initialize();
}
flipCamera() {
TWVideoModule.flipCamera();
}
startCall({roomName, accessToken}) {
TWVideoModule.startCallWithAccessToken(accessToken, roomName);
}
endCall() {
TWVideoModule.disconnect();
}
toggleVideo() {
TWVideoModule.toggleVideo();
}
toggleAudio() {
TWVideoModule.toggleAudio();
}
_unregisterEvents() {
this._subscriptions.forEach(e => e.remove());
this._subscriptions = []
}
_registerEvents() {
this._subscriptions = [
this._eventEmitter.addListener('roomDidConnect', (data) => {
if (this.props.onRoomDidConnect) {
this.props.onRoomDidConnect(data)
}
}),
this._eventEmitter.addListener('roomDidDisconnect', (data) => {
if (this.props.onRoomDidDisconnect) {
this.props.onRoomDidDisconnect(data)
}
}),
this._eventEmitter.addListener('roomDidFailToConnect', (data) => {
if (this.props.onRoomDidFailToConnect) {
this.props.onRoomDidFailToConnect(data)
}
}),
this._eventEmitter.addListener('roomParticipantDidConnect', (data) => {
if (this.props.onRoomParticipantDidConnect) {
this.props.onRoomParticipantDidConnect(data)
}
}),
this._eventEmitter.addListener('roomParticipantDidDisconnect', (data) => {
if (this.props.onRoomParticipantDidDisconnect) {
this.props.onRoomParticipantDidDisconnect(data)
}
}),
this._eventEmitter.addListener('participantAddedVideoTrack', (data) => {
if (this.props.onParticipantAddedVideoTrack) {
this.props.onParticipantAddedVideoTrack(data)
}
}),
this._eventEmitter.addListener('participantRemovedVideoTrack', (data) => {
if (this.props.onParticipantRemovedVideoTrack) {
this.props.onParticipantRemovedVideoTrack(data)
}
}),
this._eventEmitter.addListener('participantAddedAudioTrack', (data) => {
if (this.props.onParticipantAddedAudioTrack) {
this.props.onParticipantAddedAudioTrack(data)
}
}),
this._eventEmitter.addListener('participantRemovedAudioTrack', (data) => {
if (this.props.onParticipantRemovedAudioTrack) {
this.props.onParticipantRemovedAudioTrack(data)
}
}),
this._eventEmitter.addListener('participantEnabledTrack', (data) => {
if (this.props.onParticipantEnabledTrack) {
this.props.onParticipantEnabledTrack(data)
}
}),
this._eventEmitter.addListener('participantDisabledTrack', (data) => {
if (this.props.onParticipantDisabledTrack) {
this.props.onParticipantDisabledTrack(data)
}
}),
this._eventEmitter.addListener('cameraDidStart', (data) => {
if (this.props.onCameraDidStart) {
this.props.onCameraDidStart(data)
}
}),
this._eventEmitter.addListener('cameraWasInterrupted', (data) => {
if (this.props.onCameraWasInterrupted) {
this.props.onCameraWasInterrupted(data)
}
}),
this._eventEmitter.addListener('cameraDidStopRunning', (data) => {
if (this.props.onCameraDidStopRunning) {
this.props.onCameraDidStopRunning(data)
}
})
]
}
componentWillMount() {
this._eventEmitter.addListener('cameraDidStart', (data) => {
if (this.props.onCameraDidStart) {
this.props.onCameraDidStart(data)
}
});
this._registerEvents()
}
componentWillUnmount() {
this._unregisterEvents()
}
render() {
return this.props.children || null
}
}
export default TwilioVideoComponent;
I'm not sure how to modify the XCode to have compatibility with the TwilioVideo beta5 API. Any help would be appreciated.
In your podfile, look for # use_frameworks! and remove the #.
I am trying to add Instagram in "Share To" functionality in my app. I have seen the Instagram's iPhone hooks documents. I have created custom UIActivty which works fine but my question is, is there a way to add "Import with Instagram" functionality as it can be seen in iOS's Photos app iOS Photo App:
In my app for some reason, it does not show that "Import with Instagram". my app Share view :
I do not want to share only with Instagram so no ".igo"
EDIT: All of this is specifically for iOS versions < 10. For some reasons Instagram Share Extension works fine (for my app) in devices with iOS >= 10.
EDIT: I am trying to share image and video with ".jpeg" and ".mov" formats respectively
I have seen/read that Instagram added share extension in release 8.2, so technically all the apps should show "Instagram" in share tray, i.e. it can be seen in Google Photos app.
public void NativeShareImage(UIView sourceView, CGRect sourceRect,
UIImage image, string shareCaption, string emailSubject)
{
string filename = Path.Combine(FileSystemUtils.GetTemporaryDataPath(), "Image.jpg");
NSError err = null;
using(var imgData = image.AsJPEG(JpgImageQuality))
{
if(imgData.Save(filename, false, out err))
{
Logger.Information("Image saved before native share as {FileName}", filename);
}
else
{
Logger.Error("Image NOT saved before native share as to path {FileName}. {Error}", filename, err.Description);
return;
}
}
// this are the items that needs to be shared
// Instagram ignores the caption, that is known
var activityItems = new List<NSObject>
{
new NSString(shareCaption),
new NSUrl(new Uri(filename).AbsoluteUri)
};
// Here i add the custom UIActivity for Instagram
UIActivity[] applicationActivities =
{
new InstagramActivity(image, sourceRect, sourceView),
}
var activityViewController = new UIActivityViewController(activityItems.ToArray(), applicationActivities);
activityViewController.SetValueForKey(new NSString(emailSubject), new NSString("subject"));
activityViewController.CompletionWithItemsHandler = (activityType, completed, returnedItems, error) =>
{
UserSharedTo(activityType, completed);
};
// Hide some of the less used activity types so that Instagram shows up in the list. Otherwise it's pushed off the activity view
// and the user has to scroll to see it.
activityViewController.ExcludedActivityTypes = new[] { UIActivityType.AssignToContact, UIActivityType.CopyToPasteboard, UIActivityType.Print };
if(UIDevice.CurrentDevice.UserInterfaceIdiom == UIUserInterfaceIdiom.Phone)
{
PresentViewController(activityViewController, true, null);
}
else
{
activityViewController.ModalPresentationStyle = UIModalPresentationStyle.Popover;
PresentViewController(activityViewController, true, null);
// Get the popover presentation controller and configure it.
UIPopoverPresentationController presentationController = activityViewController.PopoverPresentationController;
presentationController.PermittedArrowDirections = UIPopoverArrowDirection.Down;
presentationController.SourceRect = sourceRect;
presentationController.SourceView = sourceView;
}
}
// when opening custom activity use ".igo" to only show instagram
public class InstagramActivity : UIActivity
{
public InstagramActivity(UIImage imageToShare, CGRect frame, UIView view, string shareCaption = "")
{
_ImageToShare = imageToShare;
_Frame = frame;
_View = view;
}
public override UIImage Image { get { return UIImage.FromBundle("Instagram"); } }
public override string Title { get { return "Instagram"; } }
public override NSString Type { get { return new NSString("PostToInstagram"); } }
public string Caption { get; set; }
public override bool CanPerform(NSObject[] activityItems)
{
return UIApplication.SharedApplication.CanOpenUrl(NSUrl.FromString("instagram://app"));
}
public override void Prepare(NSObject[] activityItems)
{
}
public override void Perform()
{
string filename = Path.Combine(FileSystemUtils.GetTemporaryDataPath(), "Image.igo");
NSError err = null;
using(var imgData = _ImageToShare.AsJPEG(JpgImageQuality))
{
if(imgData.Save(filename, false, out err))
{
Logger.Information("Instagram image saved as {FileName}", filename);
}
else
{
Logger.Error("Instagram image NOT saved as to path {FileName}. {Error}", filename, err.Description);
Finished(false);
return;
}
}
var url = NSUrl.FromFilename(filename);
_DocumentController = UIDocumentInteractionController.FromUrl(url);
_DocumentController.DidEndSendingToApplication += (o, e) => Finished(true);
_DocumentController.Uti = "com.instagram.exclusivegram";
if(!string.IsNullOrEmpty(ShareCaption))
{
_DocumentController.Annotation = NSDictionary.FromObjectAndKey(new NSString(ShareCaption), new NSString("InstagramCaption"));
}
_DocumentController.PresentOpenInMenu(_Frame, _View, true);
}
UIImage _ImageToShare;
CGRect _Frame;
UIView _View;
UIDocumentInteractionController _DocumentController;
}
I am trying to present a UIImagePickerController via a Button action. My project crashes with error:
Got a SIGABRT while executing native code. This usually indicates
a fatal error in the mono runtime or one of the native libraries
used by your application.
I only have a ViewController embedded in a NavigationController in the storyboard. Code snippets below:
UIImagePickerController imagePicker;
public override void ViewDidLoad()
{
base.ViewDidLoad();
this.setupImagePicker();
CapturePhotoButton.TouchUpInside += delegate
{
this.AddMedia();
};
}
public void setupImagePicker()
{
imagePicker = new UIImagePickerController();
imagePicker.SourceType = UIImagePickerControllerSourceType.Camera;
imagePicker.ModalPresentationStyle = UIModalPresentationStyle.Popover;
imagePicker.MediaTypes = UIImagePickerController.AvailableMediaTypes(
UIImagePickerControllerSourceType.Camera);
imagePicker.FinishedPickingMedia += HandleFinishedPickingMedia;
imagePicker.Canceled += (sender, e) => {
imagePicker.DismissModalViewController(true);
};
}
public void HandleFinishedPickingMedia(object sender,
UIImagePickerMediaPickedEventArgs e)
{
bool isImage = false;
switch (e.Info[UIImagePickerController.MediaType].ToString())
{
case "public.image":
isImage = true;
break;
case "public.video":
break;
}
if (isImage)
{
UIImage originalImage = e.Info[UIImagePickerController.OriginalImage] as UIImage;
if (originalImage != null)
{
PreviewImageView.Image = originalImage;
imagePicker.DismissViewController(true, null);
}
}
}
public void AddMedia()
{
//Crashes on this line
this.NavigationController.PresentViewController(imagePicker, true, null);
}
Added Privacy - Camera Usage Description to your info.plist and that resolved the issue
Add the following to your info.plist for camera use:
<key>NSCameraUsageDescription</key>
<string>This app needs access to the camera to take photos.</string>
for library:
<key>NSPhotoLibraryUsageDescription</key>
<string>This app needs access to photos.</string>