No head tracking in demo scene - sdk

I have downloaded the sdk and tried to load up the demo scene but there is no head tracking when I export an apk, any suggestions?
on installing the apk, i get the stereo scene and the cube is rotating, but no head tracking. I can turn on and off the distortion but have not found where I can enable head tracking.
Any help would be much appreciated.

1.) Create a new c# script in Unity3D with this content:
using UnityEngine;
using System.Collections;
public class VROneRotateAround : MonoBehaviour {
public Transform target;
public float distance = 5f;
public Vector3 offset = Vector3.zero;
public bool useAngleX = true;
public bool useAngleY = true;
public bool useAngleZ = true;
public bool useRealHorizontalAngle = true;
public bool resetViewOnTouch = true;
private Quaternion initialRotation = Quaternion.identity;
private Quaternion currentRotation;
private static Vector3 gyroAngles; // original angles from gyro
private static Vector3 usedAngles; // converted into unity world coordinates
private int userSleepTimeOut; // original device SleepTimeOut setting
private bool gyroAvail = false;
void Start() {
Input.compensateSensors = true;
transform.position = (target.position + offset) - transform.forward * distance;
}
void FixedUpdate() {
if (gyroAvail == false) {
if (Input.gyro.attitude.eulerAngles != Vector3.zero && Time.frameCount > 30) {
gyroAvail = true;
initialRotation = Input.gyro.attitude;
target.gameObject.SendMessage("StartFlight");
}
return; // early out
}
// reset origin on touch or not yet set origin
if(resetViewOnTouch && (Input.touchCount > 0))
initialRotation = Input.gyro.attitude;
// new rotation
currentRotation = Quaternion.Inverse(initialRotation)*Input.gyro.attitude;
gyroAngles = currentRotation.eulerAngles;
//usedAngles = Quaternion.Inverse (currentRotation).eulerAngles;
usedAngles = gyroAngles;
// reset single angle values
if (useAngleX == false)
usedAngles.x = 0f;
if (useAngleY == false)
usedAngles.y = 0f;
if (useAngleZ == false)
usedAngles.z = 0f;
if (useRealHorizontalAngle)
usedAngles.y *= -1;
transform.localRotation = Quaternion.Euler (new Vector3(-usedAngles.x, usedAngles.y, usedAngles.z));
transform.position = (target.position + offset) - transform.forward * distance;
}
public static Vector3 GetUsedAngles() {
return usedAngles;
}
public static Vector3 GetGyroAngles() {
return gyroAngles;
}
public void ResetView() {
initialRotation = Input.gyro.attitude;
}
void OnEnable() {
// sensor on
Input.gyro.enabled = true;
initialRotation = Quaternion.identity;
gyroAvail = false;
// store device sleep timeout setting
userSleepTimeOut = Screen.sleepTimeout;
// disable sleep timeout when app is running
Screen.sleepTimeout = SleepTimeout.NeverSleep;
}
void OnDisable() {
// restore original sleep timeout
Screen.sleepTimeout = userSleepTimeOut;
//sensor off
Input.gyro.enabled = false;
}
}
Open the demo scene and attach this script to the VROneSDK prefab.
In the property editor select Cube as Target and enter 6 for distance.
Build the app and test it on a device or use UnityRemote to test the behaviour in the editor.

the SDK seems to run only on the supported deviced you need to get a S5 to get it working. You also need to get the Android Pro Plugin for Unity.
We actually did get the distotion going but the screen is cropped for like 10 pxels on either side. That can't be right.

Same here: I see the demo scene (rotating cube on a grid backdrop and three light sources) in a left/right split-screen, but there is no head tracking. No pre-distortion either.
This is on a Samsung Galaxy S3 (Android 4.3) with Unity 4.5.3f3.

Related

How to create audioclip in WebGL IOS

I'm trying to create an audio clip then automaticly goos up in frequentie.
This is allready working in the editor and windows but on IOS I get the error message
Creating stream audio clips is not supported in webgl. Creating clip as non streamed.
is there a way to do this that is also allowed in IOS?
What we have is this:
async void Update()
{
isPlaying = audioSource.isPlaying;
if (audioSource.isPlaying)
{
this.frequency += 150 * Time.deltaTime;
this.currentWaitTime += Time.deltaTime;
this.currentSampleTimer += Time.deltaTime;
var clip = CreateAudioClip();
this.audioSource.clip = clip;
}
}
private AudioClip CreateAudioClip()
{
var myClip = AudioClip.Create("MySinoid", this.sampleRate * 2, 1, this.sampleRate, true, OnAudioRead, OnAudioSetPosition);
return myClip;
}
private void OnAudioRead(float[] data)
{
int count = 0;
while (count < data.Length)
{
data[count] = Mathf.Sin(2 * Mathf.PI * frequency * position / 44100);
position++;
count++;
}
}
private void OnAudioSetPosition(int newPosition)
{
position = newPosition;
}

How to show zooming scalling values shows in camerapage android xamarin

Currently i am implementing zoomin and zoomout in camerapagerenderer but while zoomin and zoomout scalling values not shown in UI view, how to achieve this in android for above screenshot try to achive like these
public override bool OnTouchEvent(MotionEvent e)
{
switch (e.Action & MotionEventActions.Mask)
{
case MotionEventActions.Down:
oldDist = getFingerSpacing(e);
break;
case MotionEventActions.Move:
float newDist = getFingerSpacing(e);
if (newDist > oldDist)
{
//mCamera is your Camera which used to take picture, it should already exit in your custom Camera
handleZoom(true, camera);
}
else if (newDist < oldDist)
{
handleZoom(false, camera);
}
oldDist = newDist;
break;
}
return true;
}
private void handleZoom(bool isZoomIn, global::Android.Hardware.Camera camera)
{
global::Android.Hardware.Camera.Parameters parameters = camera.GetParameters();
if (parameters.IsZoomSupported)
{
int maxZoom = parameters.MaxZoom;
int zoom = parameters.Zoom;
if (isZoomIn && zoom < maxZoom)
{
zoom++;
}
else if (zoom > 0)
{
zoom--;
}
parameters.Zoom = zoom;
camera.SetPreviewTexture(surfaceTexture);
PrepareAndStartCamera();
}

When we try to ZoomIn and ZoomOut on Xamarin Android, several color shades are occur Android version 7 and 8 only

In cameraPageRenderer, we handle zoomIn and zoomOut in our application. When we try to ZoomIn and ZoomOut, several colour shades occur. Please take a look at this image. As a result, we are resolving this issue by handling PrepareAndStartCamera (). Please see the code snippet below.
public override bool OnTouchEvent(MotionEvent e)
{
switch (e.Action & MotionEventActions.Mask)
{
case MotionEventActions.Down:
oldDist = getFingerSpacing(e);
break;
case MotionEventActions.Move:
float newDist = getFingerSpacing(e);
if (newDist > oldDist)
{
//mCamera is your Camera which used to take picture, it should already exit in your custom Camera
handleZoom(true, camera);
}
else if (newDist < oldDist)
{
handleZoom(false, camera);
}
oldDist = newDist;
break;
}
return true;
}
private static float getFingerSpacing(MotionEvent e)
{
if (e.PointerCount == 2)
{
float x = e.GetX(0) - e.GetX(1);
float y = e.GetY(0) - e.GetY(1);
return (float)Math.Sqrt(x * x + y * y);
}
return 0;
}
private void handleZoom(bool isZoomIn, global::Android.Hardware.Camera camera)
{
//camera.StopPreview();
// camera.Release();
// camera = global::Android.Hardware.Camera.Open((int)cameraType);
global::Android.Hardware.Camera.Parameters parameters = camera.GetParameters();
if (parameters.IsZoomSupported)
{
int maxZoom = parameters.MaxZoom;
int zoom = parameters.Zoom;
if (isZoomIn && zoom < maxZoom)
{
zoom++;
}
else if (zoom > 0)
{
zoom--;
}
parameters.Zoom = zoom;
camera.SetParameters(parameters);
camera.SetPreviewTexture(surfaceTexture);
// PrepareAndStartCamera();
}
else
{
Android.Util.Log.Error("lv", "zoom not supported");
}
}
After using PrepareAndStartCamera(), when ZoomingIn or ZoomingOut the camera and taking a picture, it returns to the normal size picture. if we are not using this PrepareAndStartCamera() method, then the Color shades are appear.
private void PrepareAndStartCamera()
{
try
{
camera.StopPreview();
var display = activity.WindowManager.DefaultDisplay;
if (display.Rotation == SurfaceOrientation.Rotation0)
{
camera.SetDisplayOrientation(90);
}
if (display.Rotation == SurfaceOrientation.Rotation270)
{
camera.SetDisplayOrientation(180);
}
camera.StartPreview();
}
}

Unity - Disable AR HitTest after initial placement

I am using ARKit plugin for Unity leveraging the UnityARHitTestExample.cs.
After I place my object into the world scene I want to disable the ARKit from trying to place the object again every time I touch the screen. Can someone please help?
There are a number of ways you can achieve this, although perhaps the simplest is creating a boolean to determine whether or not your model has been placed.
First off all you would create a boolean as noted above e.g:
private bool modelPlaced = false;
Then you would set this to true within the HitTestResultType function once your model has been placed:
bool HitTestWithResultType (ARPoint point, ARHitTestResultType resultTypes)
{
List<ARHitTestResult> hitResults = UnityARSessionNativeInterface.GetARSessionNativeInterface ().HitTest (point, resultTypes);
if (hitResults.Count > 0) {
foreach (var hitResult in hitResults) {
//1. If Our Model Hasnt Been Placed Then Set Its Transform From The HitTest WorldTransform
if (!modelPlaced){
m_HitTransform.position = UnityARMatrixOps.GetPosition (hitResult.worldTransform);
m_HitTransform.rotation = UnityARMatrixOps.GetRotation (hitResult.worldTransform);
Debug.Log (string.Format ("x:{0:0.######} y:{1:0.######} z:{2:0.######}", m_HitTransform.position.x, m_HitTransform.position.y, m_HitTransform.position.z));
//2. Prevent Our Model From Being Positioned Again
modelPlaced = true;
}
return true;
}
}
return false;
}
And then in the Update() function:
void Update () {
//Only Run The HitTest If We Havent Placed Our Model
if (!modelPlaced){
if (Input.touchCount > 0 && m_HitTransform != null)
{
var touch = Input.GetTouch(0);
if (touch.phase == TouchPhase.Began || touch.phase == TouchPhase.Moved)
{
var screenPosition = Camera.main.ScreenToViewportPoint(touch.position);
ARPoint point = new ARPoint {
x = screenPosition.x,
y = screenPosition.y
};
ARHitTestResultType[] resultTypes = {
ARHitTestResultType.ARHitTestResultTypeExistingPlaneUsingExtent,
};
foreach (ARHitTestResultType resultType in resultTypes)
{
if (HitTestWithResultType (point, resultType))
{
return;
}
}
}
}
}
}
Hope it helps...

How to show more than one location in Blackberry MapField?

I can able to show one location using co ordinates or longtitude and latitude but i dont know how to show more than one location in the blackberry MapField.If is it possible pls share with me how to do this..
Same way as How to show our own icon in BlackBerry Map?.
Pass an array of Coordinates into custom MapField, define a bitmap for location point and paint it for each Coordinate in custom MapField paint() method.
Remember to zoom in/out CustomMapField for best fit of all location points.
Sample of implementation
Lets display Liverpool Sheffield and London with custom bitmap icons (yellow circle with black border). Code for custom MapField:
class MultyMapField extends MapField {
Coordinates[] mPoints = new Coordinates[0];
Bitmap mPoint;
Bitmap mPointsBitmap;
XYRect mDest;
XYRect[] mPointDest;
public void addCoordinates(Coordinates coordinates) {
Arrays.add(mPoints, coordinates);
zoomToFitPoints();
repaintPoints();
}
protected void zoomToFitPoints() {
// zoom to max
setZoom(getMaxZoom());
// get pixels of all points
int minLeft = getWidth();
int minUp = getHeight();
int maxRight = 0;
int maxDown = 0;
Coordinates minLeftCoordinates = null;
Coordinates minUpCoordinates = null;
Coordinates maxRightCoordinates = null;
Coordinates maxDownCoordinates = null;
for (int i = 0; i < mPoints.length; i++) {
XYPoint point = new XYPoint();
convertWorldToField(mPoints[i], point);
if (point.x <= minLeft) {
minLeft = point.x;
minLeftCoordinates = mPoints[i];
}
if (point.x >= maxRight) {
maxRight = point.x;
maxRightCoordinates = mPoints[i];
}
if (point.y <= minUp) {
minUp = point.y;
minUpCoordinates = mPoints[i];
}
if (point.y >= maxDown) {
maxDown = point.y;
maxDownCoordinates = mPoints[i];
}
}
double moveToLat = maxDownCoordinates.getLatitude()
+ (minUpCoordinates.getLatitude() - maxDownCoordinates
.getLatitude()) / 2;
double moveToLong = minLeftCoordinates.getLongitude()
+ (maxRightCoordinates.getLongitude() - minLeftCoordinates
.getLongitude()) / 2;
Coordinates moveTo = new Coordinates(moveToLat, moveToLong, 0);
moveTo(moveTo);
// zoom to min left up, max right down pixels + 1
int zoom = getZoom();
boolean outOfBounds = false;
while (!outOfBounds && zoom > getMinZoom()) {
zoom--;
setZoom(zoom);
XYPoint point = new XYPoint();
try {
convertWorldToField(minLeftCoordinates, point);
if (point.x < 0)
outOfBounds = true;
convertWorldToField(minUpCoordinates, point);
if (point.y < 0)
outOfBounds = true;
convertWorldToField(maxRightCoordinates, point);
if (point.x > getWidth())
outOfBounds = true;
convertWorldToField(maxDownCoordinates, point);
if (point.y > getHeight())
outOfBounds = true;
} catch (IllegalArgumentException ex) {
outOfBounds = true;
}
}
zoom++;
setZoom(zoom);
}
protected void repaintPoints() {
mPointsBitmap = new Bitmap(getWidth(), getHeight());
mPointsBitmap.createAlpha(Bitmap.ALPHA_BITDEPTH_8BPP);
mDest = new XYRect(0, 0, mPointsBitmap.getWidth(), mPointsBitmap
.getHeight());
Graphics g = new Graphics(mPointsBitmap);
if (null != mPoint) {
mPointDest = new XYRect[mPoints.length];
for (int i = 0; i < mPoints.length; i++) {
if (null == mPointDest[i]) {
XYPoint fieldOut = new XYPoint();
convertWorldToField(mPoints[i], fieldOut);
int imgW = mPoint.getWidth();
int imgH = mPoint.getHeight();
mPointDest[i] = new XYRect(fieldOut.x - imgW / 2,
fieldOut.y - imgH, imgW, imgH);
}
g.drawBitmap(mPointDest[i], mPoint, 0, 0);
}
}
}
protected void paint(Graphics graphics) {
super.paint(graphics);
if (null != mPointsBitmap) {
graphics.setGlobalAlpha(100);
graphics.drawBitmap(mDest, mPointsBitmap, 0, 0);
}
}
}
Sample of use:
class Scr extends MainScreen {
// test coordinates:
// London
// 51.507778, -0.128056
Coordinates mLondonC = new Coordinates(51.507778, -0.128056, 0);
// Liverpool
// 53.4, -2.983333
Coordinates mLiverpoolC = new Coordinates(53.4, -2.983333, 0);
// Sheffield
// 53.385833, -1.469444
Coordinates mSheffieldC = new Coordinates(53.385833, -1.469444, 0);
MultyMapField mMultyMapField;
public Scr() {
add(mMultyMapField = new MultyMapField());
mMultyMapField.mPoint = createPointBitmap();
}
protected void onUiEngineAttached(boolean attached) {
super.onUiEngineAttached(attached);
if (attached) {
mMultyMapField.addCoordinates(mLondonC);
mMultyMapField.addCoordinates(mLiverpoolC);
mMultyMapField.addCoordinates(mSheffieldC);
}
}
private Bitmap createPointBitmap() {
int r = 10;
Bitmap result = new Bitmap(2 * r, 2 * r);
result.createAlpha(Bitmap.ALPHA_BITDEPTH_8BPP);
Graphics g = new Graphics(result);
g.setColor(Color.BLACK);
g.fillEllipse(r, r, 2 * r, r, r, 2 * r, 0, 360);
g.setColor(Color.YELLOW);
g.fillEllipse(r, r, r + (r - 2), r, r, r + (r - 2), 0, 360);
return result;
}
}
A valid and probably simpler option would be to use this open source library by Monits https://github.com/Monits/blackberry-commons
It contains several common functionality found in BB applications and is compatible for BB 4.6.1+
Among other things, it provides a map field with the ability to add and display markers on top of it, with and without focus, and optionally "open" them. This makes for an API much more alike that found on other smart phones such as iPhone or Android.
The documentation is pretty good, and the wiki even has a tutorial on how to achieve it https://github.com/Monits/blackberry-commons/wiki/CustomMap

Resources