This workshop will show you how to:

Final Result

To complete this workshop you will need:

Languages used:

Additional resources

multiple sensors on a mobile device. GPS location, for example, can be useful for triggering AR experiences when the user is in a specific place. Note that newer versions of Android and iOS require explicit user permissions to access various sensors.

Create a new Unity project using Unity Hub and select the AR Mobile template.

Once the project is ready, switch the platform to Android from the File -> Build Profiles.
Create a new empty scene name GPSNative

Create a new MonoBehaviour Script and name it GPSManager

using UnityEngine;
using System.Collections;
using UnityEngine.Android;
using TMPro;

public class GPSManager : MonoBehaviour
{
    [Header("UI Output")]
    public TextMeshProUGUI gpsText; // Link TextMeshProUGUI in Inspector

    private bool isRunning = false;

    void Start()
    {
        StartCoroutine(StartLocationService());
    }

    IEnumerator StartLocationService()
    {
        // Request permission for Android
        if (!Permission.HasUserAuthorizedPermission(Permission.FineLocation))
        {
            Permission.RequestUserPermission(Permission.FineLocation);
            yield return new WaitForSeconds(2); // wait for user response
        }

        // Check if location service is enabled
        if (!Input.location.isEnabledByUser)
        {
            Debug.Log("Location services not enabled by user.");
            yield break;
        }

        Input.location.Start(); //Start to get the location GPS, this works just on devices with GPS

        int maxWait = 30;
        while (Input.location.status == LocationServiceStatus.Initializing && maxWait > 0)
        {
            yield return new WaitForSeconds(1);
            maxWait--;
        }

        if (maxWait < 1)
        {
            Debug.Log("Timed out");
            yield break;
        }

        if (Input.location.status == LocationServiceStatus.Failed)
        {
            Debug.Log("Unable to determine device location");
            yield break;
        }

        isRunning = true;
        StartCoroutine(UpdateGPSData());
    }

    IEnumerator UpdateGPSData()
    {
        while (isRunning)
        {
            var data = Input.location.lastData;

            float latitude = data.latitude;
            float longitude = data.longitude;
            float altitude = data.altitude;
            float hAcc = data.horizontalAccuracy;
            float vAcc = data.verticalAccuracy;
            double timestamp = data.timestamp;

//F is used to set the number of decimals in the string
            string info = $"Lat: {latitude:F6}\n" + 
                          $"Lon: {longitude:F6}\n" +
                          $"Alt: {altitude:F2} m\n" +
                          $"H-Acc: {hAcc:F2} m\n" +
                          $"V-Acc: {vAcc:F2} m\n" +
                          $"Time: {timestamp:F0} s";

            Debug.Log(info);

            if (gpsText != null)
                gpsText.text = info;

            yield return new WaitForSeconds(1f); // update every second
        }
    }

    void OnDisable()
    {
        isRunning = false;
        Input.location.Stop();
    }
}

Attach the script to the GameObject GPSData and add to the field GPS Text in the Inspector window the GameObject TextMeshProUI.

Save the Scene.

To test the script we need to build the project as the plugin can access just GPS devices on Android or iOS devices

GPS Location

The on board GPS sensor can be used with ARFoundation to create location-based Augmented Reality experiences.
However, using specialised packages such as ARDK allows to provide a more straightforward developing experience.

Setting up ARDK

Install the ARDK package (more details can be find in the documentation).

Unity will display a Project Validation window with warnings and errors that can be automatically fixed. Typically, there will be one warning asking you to enable XR Plug-In Management. Click Edit to open the Plug-In settings automatically, or navigate to Project Settings -> XR Plug-In Management manually.

From XR Plug-in Management menu, select Niantic Lightship SDK + Google ARCore.

In Player Settings:

Control that there aren't any issues in the Project Validation (it can be found in Edit -> Project Settings or Lightship -> Project Validation).

Create a new scene

Create a new Empty scene

Finally, Create a new MonoBeahiour# script namedAddWPSObjects`

using UnityEngine;
using System;
using Niantic.Lightship.AR.WorldPositioning;

public class AddWPSObjects : MonoBehaviour
{
    [SerializeField] ARWorldPositioningObjectHelper positioningHelper;
    [SerializeField] Camera trackingCamera;
    // Replace the coordinates here with your location
    double latitude = 51.538418391071;
    double longitude = -0.01290085376900409;
    double altitude = 115.0; // It is possible to change the refence altitude from the ARWorldPositioningObjectHelper attached on the XR Origin (Mobile AR)

    public GameObject Cube; //to be set on the Inspector
    public GameObject Sphere;
    
    void Start()
    {
        // instantiate a cube, scale it up for visibility if needed, then update its location using Niantic WPS
        GameObject cubeClone = Instantiate(Cube) as GameObject;
        positioningHelper.AddOrUpdateObject(cubeClone, latitude, longitude, altitude, Quaternion.identity);
    }

    // Create a Sphere and move it to the position predicted using the raw GPS + compass
    private GameObject gpsSphere = null;

    void Update()
    {
        // If the Sphere doesn't exist:
        if (gpsSphere == null)
        {
            gpsSphere = Instantiate(Sphere) as GameObject;
        }

        if (Input.location.isEnabledByUser)
        {
            double deviceLatitude = Input.location.lastData.latitude;
            double deviceLongitude = Input.location.lastData.longitude;

            Vector2 eastNorthOffsetMetres = EastNorthOffset(latitude, longitude, deviceLatitude, deviceLongitude);
            Vector3 trackingOffsetMetres = Quaternion.Euler(0, 0, Input.compass.trueHeading) * new Vector3(eastNorthOffsetMetres[0], (float)altitude, eastNorthOffsetMetres[1]);
            Vector3 trackingMetres = trackingCamera.transform.localPosition + trackingOffsetMetres;
            gpsSphere.transform.localPosition = trackingMetres;
        }
    }

    public Vector2 EastNorthOffset(double latitudeDegreesA, double longitudeDegreesA, double latitudeDegreesB, double longitudeDegreesB)
    {
        double DEGREES_TO_METRES = 111139.0;
        float lonDifferenceMetres = (float)(Math.Cos((latitudeDegreesA + latitudeDegreesB) * 0.5 * Math.PI / 180.0) * (longitudeDegreesA - longitudeDegreesB) * DEGREES_TO_METRES);
        float latDifferenceMetres = (float)((latitudeDegreesA - latitudeDegreesB) * DEGREES_TO_METRES);
        return new Vector2(lonDifferenceMetres, latDifferenceMetres);
    }
}

In the Hierarchy, create a new empty GameObject by right-clicking and selecting Create Empty. Name it WPSObjects and attach the script you just created. Fill the public field:

Build and run the app, then test it outdoors. The device's GPS usually acquires position faster, but WPS generally offers greater precision.

In order to add multiple elements at runtime, we can modify the script to import an external JSON object. First, we need to generate a GeoJSON file with the locations of the sensors or other elements that we want to visualize in our app. In this example, we are going to use the BatSensors located in the Queen Elizabeth Olympic Park.

If the GeoJSON file is not readily available, it is possible to create a new one quite easily using geojson.io. Through this online service, it is also possible to add additional properties to the locations that can be used in our application.

Bat sensors on geojson.io

Create a new Prefab GameObject to be use as visualisation of the sensor. It could be a primitive such a sphere (scale 2 2 2), or an actual 3D model. Inside of the prefab add also a TextMeshPro - Text with width = 0.2; height = 0.3 and Font Size = 0.2, place it on top of the primitive object and name it Info (the name is used also in the script below).

In order to provide a feedback to the user on the position of the closest sensor we are going to add a simple UI:

Create a new MonoBeahviour script named AddWPSObjectsList that is going to read the JSON file and instantiate a prefab for each location

using UnityEngine;
using System.Collections.Generic;
using Niantic.Lightship.AR.WorldPositioning;
using TMPro;

public class AddWPSObjectsList : MonoBehaviour
{
    [SerializeField] ARWorldPositioningObjectHelper positioningHelper;
    [SerializeField] GameObject prefab; // Assign your prefab in the Inspector
    [SerializeField] TextAsset jsonFilePath; // Path to your JSON file
    private List<GameObject> sensors = new List<GameObject>();

    public Camera mainCamera;
    public TextMeshProUGUI distanceText;
    private Vector3 previousCameraPosition;

    void Start()
    {
        // Initialize the previous camera position
        previousCameraPosition = mainCamera.transform.position;

        string jsonText = jsonFilePath.text;
        var json = JsonUtility.FromJson<batSensors.Root>(jsonText);

        foreach (var feature in json.features)
        {
            double longitude = feature.geometry.coordinates[0];
            double latitude = feature.geometry.coordinates[1];
            double altitude = feature.properties.altitude;

            // Instantiate the prefab and update its location
            GameObject obj = Instantiate(prefab);
            obj.name = feature.properties.Name;
            positioningHelper.AddOrUpdateObject(obj, latitude, longitude, altitude, Quaternion.identity);

            Debug.Log("add " + obj.name);

            obj.transform.Find("Info").GetComponent<TextMeshPro>().text = feature.properties.Name + "\n" + feature.properties.Habitat;

            sensors.Add(obj);
        }
    }

    void LateUpdate()
    {
        // Check if the camera has moved
        if (mainCamera.transform.position != previousCameraPosition)
        {
            // Update the previous camera position
            previousCameraPosition = mainCamera.transform.position;
            // Find the closest object and display the distance
            FindAndDisplayClosestObject();
        }
    }


    void FindAndDisplayClosestObject()
    {
        GameObject closestObject = null;
        float closestDistance = Mathf.Infinity;

        // Iterate through all objects with the tag "Detectable"
        foreach (GameObject obj in sensors)
        {
            float distance = Vector3.Distance(mainCamera.transform.position, obj.transform.position);

            if (distance < closestDistance)
            {
                closestDistance = distance;
                closestObject = obj;
            }
        }

        if (closestObject != null)
        {
            // Display the distance in meters and two decimals
            distanceText.text = $"Closest Sensor: {closestObject.name:F2} | Distance: {closestDistance:F2} m";
        }
    }
}

// Root myDeserializedClass = JsonConvert.DeserializeObject<Root>(myJsonResponse);
public class batSensors
{
    [System.Serializable]
    public class Feature
    {
        public string type;
        public Properties properties;
        public Geometry geometry;
    }
    [System.Serializable]
    public class Geometry
    {
        public string type;
        public List<double> coordinates;
    }
    [System.Serializable]
    public class Properties
    {
        public string Name;
        public string Habitat;
        public double altitude;
    }
    [System.Serializable]
    public class Root
    {
        public string type;
        public List<Feature> features;
    }
}

We can add the script to the same WPSObjects GameObject, paying attention to disable the old one (AddWPSObjects) and to fill the public variables

Public variable for the AddWPSObjectsList component

Build and test the app outdoor (it is possible to use it indoor, but the quality of the GPS signal might not be optimal)

Valve Steam Audio is a free, open-source spatial audio toolkit designed to create highly immersive soundscapes in VR and AR environments. It supports advanced features such as Sound Occlusion, to simulate how objects block or absorb sound, making audio behave realistically when sources are behind obstacles; Reflections and Reverb, how sound bounces off surfaces; HRTF-based Spatialization, Head-Related Transfer Functions to deliver accurate 3D positioning of sounds for headphones; Dynamic Geometry Handling, updating audio propagation in real time as environments change. Cross-Platform Integration works across Windows, Linux, and macOS.

Build and Run the application