first commit
This commit is contained in:
565
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToFarPlane.cs
Normal file
565
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToFarPlane.cs
Normal file
@@ -0,0 +1,565 @@
|
||||
using System;
|
||||
using UnityEngine;
|
||||
using UnityEngine.Rendering;
|
||||
using UnityEngine.UIElements;
|
||||
using UnityEngine.Video;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2024 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// displays the video to the far camera plane
|
||||
/// </summary>
|
||||
// Note:
|
||||
// - This will not work if the camera ClearFlag is set to Skybox because of how it is rendered.
|
||||
// the skybox is rendered at position 2000.5 between the Opaque and Transparent objects
|
||||
// with a unique sphere scaled to the camera far plane, meaning that it will only render where
|
||||
// nothing has been written to the depth bufffer. <- that is where the issue arrises, we are
|
||||
// not writing to the depth buffer when rendering the video so the skybox will think nothing
|
||||
// is their and draw over the top.
|
||||
|
||||
[AddComponentMenu("AVPro Video/Apply To Far Plane", 300)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public sealed class ApplyToFarPlane : ApplyToBase
|
||||
{
|
||||
[Header("Shader Options")]
|
||||
[Tooltip("The color override to apply to the material")]
|
||||
[SerializeField] Color _mainColor;
|
||||
public Color MainColor
|
||||
{
|
||||
get { return _mainColor; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetColor("_Color", value); _mainColor = value; }
|
||||
}
|
||||
[Tooltip("The Main Texture that is being written to by the Media Player")]
|
||||
[SerializeField] Texture _texture;
|
||||
public Texture Texture
|
||||
{
|
||||
get { return _texture; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetTexture("_MainTex", value); _texture = value; }
|
||||
}
|
||||
[Tooltip("The Chroma Texture to apply to the material")]
|
||||
[SerializeField] Texture _chroma;
|
||||
public Texture Chroma
|
||||
{
|
||||
get { return _chroma; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetTexture("_ChromaTex", value); _chroma = value; }
|
||||
}
|
||||
[Tooltip("Alpha of the far plane that is drawn")]
|
||||
[SerializeField] float _alpha = 1f;
|
||||
public float Alpha
|
||||
{
|
||||
get { return _alpha; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetFloat("_Alpha", value); _alpha = value; }
|
||||
}
|
||||
[Tooltip("The Camera far plane to draw to, if left empty main cam will be selected")]
|
||||
[SerializeField] Camera _camera;
|
||||
public Camera Camera
|
||||
{
|
||||
get { return _camera; }
|
||||
set { _camera = value; if (!_material) CreateMaterial(); _material.SetFloat("_TargetCamID", value.GetInstanceID());
|
||||
}
|
||||
}
|
||||
[Tooltip("The aspect ratio of the video shown, not used when a custom scaling is set")]
|
||||
[SerializeField] VideoAspectRatio _aspectRatio = VideoAspectRatio.Stretch;
|
||||
public VideoAspectRatio VideoAspectRatio
|
||||
{
|
||||
get { return _aspectRatio; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetFloat("_Aspect", (int)value); _aspectRatio = value; }
|
||||
}
|
||||
[Tooltip("How much to offset the image by")]
|
||||
public Vector2 _drawOffset;
|
||||
public Vector2 DrawOffset
|
||||
{
|
||||
get { return _drawOffset; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetVector("_DrawOffset", value); _drawOffset = value; }
|
||||
}
|
||||
[Tooltip("Will replace the Aspect Ratio with custom scaling for the video, when both values are non-zero")]
|
||||
public Vector2 _customScaling;
|
||||
public Vector2 CustomScaling
|
||||
{
|
||||
get { return _customScaling; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetVector("_CustomScale", value); _customScaling = value; }
|
||||
}
|
||||
|
||||
// the object that is active as the holder for the camera far plane
|
||||
private GameObject _renderedObject;
|
||||
private bool _changedSkybox;
|
||||
|
||||
public void Awake()
|
||||
{
|
||||
// if the camera was then set the camera to the main camera in the scene
|
||||
if (!_camera)
|
||||
_camera = Camera.main;
|
||||
if (_material)
|
||||
_material.SetFloat("_TargetCamID", _camera.GetInstanceID());
|
||||
}
|
||||
|
||||
protected override void OnDisable()
|
||||
{
|
||||
// need to set background back to skybox if we disabled it
|
||||
if (_changedSkybox && _camera)
|
||||
_camera.clearFlags = CameraClearFlags.Skybox;
|
||||
|
||||
base.OnDisable();
|
||||
if (_renderedObject)
|
||||
_renderedObject.SetActive(false);
|
||||
}
|
||||
|
||||
private void OnDestroy()
|
||||
{
|
||||
// ensure to destroy the created object
|
||||
Destroy(_renderedObject);
|
||||
}
|
||||
|
||||
public void Update()
|
||||
{
|
||||
// move the rendered object to ensure that it will allways be rendered by the camera,
|
||||
// ensuring that the shader is allways running to display the output on the far plane of the camera
|
||||
_renderedObject.transform.position = new Vector3(0, 0, _camera.nearClipPlane) + _camera.transform.position + _camera.transform.forward;
|
||||
_renderedObject.transform.rotation = _camera.transform.rotation;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Creates a Quad mesh used for basic rendering
|
||||
/// </summary>
|
||||
/// <returns>Quad created</returns>
|
||||
public Mesh CreateQuadMesh()
|
||||
{
|
||||
var width = 1;
|
||||
var height = 1;
|
||||
Mesh mesh = new Mesh();
|
||||
// verts
|
||||
Vector3[] vertices = new Vector3[4]
|
||||
{
|
||||
new Vector3(0, 0, 0),
|
||||
new Vector3(width, 0, 0),
|
||||
new Vector3(0, height, 0),
|
||||
new Vector3(width, height, 0)
|
||||
};
|
||||
mesh.vertices = vertices;
|
||||
// tris
|
||||
int[] tris = new int[6]
|
||||
{
|
||||
0, 2, 1,
|
||||
2, 3, 1
|
||||
};
|
||||
mesh.triangles = tris;
|
||||
// normals
|
||||
Vector3[] normals = new Vector3[4]
|
||||
{
|
||||
-_camera.transform.forward,
|
||||
-_camera.transform.forward,
|
||||
-_camera.transform.forward,
|
||||
-_camera.transform.forward
|
||||
};
|
||||
mesh.normals = normals;
|
||||
// uv's
|
||||
Vector2[] uv = new Vector2[4]
|
||||
{
|
||||
new Vector2(0, 0),
|
||||
new Vector2(1, 0),
|
||||
new Vector2(0, 1),
|
||||
new Vector2(1, 1)
|
||||
};
|
||||
mesh.uv = uv;
|
||||
return mesh;
|
||||
}
|
||||
|
||||
public void CreateMaterial()
|
||||
{
|
||||
_material = new Material(Shader.Find("AVProVideo/Background/AVProVideo-ApplyToFarPlane"));
|
||||
if (_renderedObject)
|
||||
{
|
||||
if (_renderedObject.TryGetComponent(out ApplyToFarPlane_CameraApplier applier))
|
||||
applier.Material = _material;
|
||||
else
|
||||
{
|
||||
var applier2 = _renderedObject.AddComponent<ApplyToFarPlane_CameraApplier>();
|
||||
applier2.Material = _material;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Below this point is basically the same as ApplyToMaterial, with a few unecessary functions
|
||||
removed.
|
||||
This is because other than the quad with fancy shader, this is just taking the video
|
||||
to a material, then applying it.
|
||||
*/
|
||||
|
||||
[Header("Display")]
|
||||
[Tooltip("Default texture to display when the video texture is preparing")]
|
||||
[SerializeField]
|
||||
Texture2D _defaultTexture = null;
|
||||
public Texture2D DefaultTexture
|
||||
{
|
||||
get { return _defaultTexture; }
|
||||
set
|
||||
{
|
||||
if (_defaultTexture != value)
|
||||
{
|
||||
_defaultTexture = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Tooltip("The Material to use when rendering the video, if not set will use internal " +
|
||||
"\n Note: Material must use the AVProVideo/Background/AVProVideo-ApplyToFarPlane shader")]
|
||||
// this material must use the AVProVideo/Background/AVProVideo-ApplyToFarPlane shader
|
||||
// otherwise it will not render correctly
|
||||
[SerializeField] Material _material = null;
|
||||
|
||||
[SerializeField]
|
||||
string _texturePropertyName = Helper.UnityBaseTextureName;
|
||||
public string TexturePropertyName
|
||||
{
|
||||
get { return _texturePropertyName; }
|
||||
set
|
||||
{
|
||||
if (_texturePropertyName != value)
|
||||
{
|
||||
_texturePropertyName = value;
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _offset = Vector2.zero;
|
||||
public Vector2 Offset
|
||||
{
|
||||
get { return _offset; }
|
||||
set
|
||||
{
|
||||
if (_offset != value)
|
||||
{
|
||||
_offset = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _scale = Vector2.one;
|
||||
public Vector2 Scale
|
||||
{
|
||||
get { return _scale; }
|
||||
set
|
||||
{
|
||||
if (_scale != value)
|
||||
{
|
||||
_scale = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Texture _lastTextureApplied;
|
||||
private LazyShaderProperty _propTexture;
|
||||
private LazyShaderProperty _propTexture_R;
|
||||
|
||||
private Texture _originalTexture;
|
||||
private Vector2 _originalScale = Vector2.one;
|
||||
private Vector2 _originalOffset = Vector2.zero;
|
||||
|
||||
|
||||
private Vector2 ImageSize
|
||||
{
|
||||
get { return new Vector2(_media.Info.GetVideoWidth(), _media.Info.GetVideoHeight()); }
|
||||
}
|
||||
|
||||
protected override void OnEnable()
|
||||
{
|
||||
base.OnEnable();
|
||||
if (!_material)
|
||||
{
|
||||
CreateMaterial();
|
||||
}
|
||||
// if the rendered object already exists just enable it otherwise
|
||||
// create a new one and set it up to be used correctly
|
||||
if (_renderedObject)
|
||||
_renderedObject.SetActive(true);
|
||||
else
|
||||
{
|
||||
_renderedObject = new GameObject("Display Background Object");
|
||||
//_renderedObject.hideFlags = HideFlags.HideAndDontSave;
|
||||
var rend = _renderedObject.AddComponent<MeshRenderer>();
|
||||
var filt = _renderedObject.AddComponent<MeshFilter>();
|
||||
Mesh mesh = CreateQuadMesh();
|
||||
filt.sharedMesh = mesh;
|
||||
//rend.sharedMaterial = _material;
|
||||
var applier = _renderedObject.AddComponent<ApplyToFarPlane_CameraApplier>();
|
||||
if (_camera)
|
||||
_material.SetFloat("_TargetCamID", _camera.GetInstanceID());
|
||||
applier.Material = _material;
|
||||
rend.sharedMaterial = _material;
|
||||
}
|
||||
|
||||
// ApplyToFarPlane does not work if the background clear mode is set to skybox, so if it is then change it to color
|
||||
if (_camera.clearFlags == CameraClearFlags.Skybox)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] Warning: ApplyToFarPlane does not work with the background clear mode set to skybox, automatically changed to color, this will be undone when the object is disabled");
|
||||
_changedSkybox = true;
|
||||
_camera.clearFlags = CameraClearFlags.Color;
|
||||
}
|
||||
}
|
||||
|
||||
// We do a LateUpdate() to allow for any changes in the texture that may have happened in Update()
|
||||
private void LateUpdate()
|
||||
{
|
||||
Apply();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Called via the Editor compoenent, this will allow updating of the material
|
||||
/// properties when they are changed rather than updating them each frame
|
||||
/// </summary>
|
||||
/// <param name="target">Which material property was effected</param>
|
||||
public void UpdateMaterialProperties(int target)
|
||||
{
|
||||
if (_material == null)
|
||||
CreateMaterial();
|
||||
switch (target)
|
||||
{
|
||||
case 0:
|
||||
_material.SetColor("_Color", _mainColor);
|
||||
break;
|
||||
case 3:
|
||||
_material.SetTexture("_MainTex", _texture);
|
||||
break;
|
||||
case 4:
|
||||
_material.SetTexture("_ChromaTex", _chroma);
|
||||
break;
|
||||
case 5:
|
||||
_material.SetFloat("_Alpha", _alpha);
|
||||
break;
|
||||
case 7:
|
||||
_material.SetFloat("_Aspect", (int)_aspectRatio);
|
||||
break;
|
||||
case 8:
|
||||
_material.SetVector("_DrawOffset", _drawOffset);
|
||||
break;
|
||||
case 9:
|
||||
_material.SetVector("_CustomScale", _customScaling);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public override void Apply()
|
||||
{
|
||||
bool applied = false;
|
||||
|
||||
if (_media != null && _media.TextureProducer != null)
|
||||
{
|
||||
Texture resamplerTex = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[0];
|
||||
Texture texture = _media.UseResampler ? resamplerTex : _media.TextureProducer.GetTexture(0);
|
||||
if (texture != null)
|
||||
{
|
||||
// Check for changing texture
|
||||
if (texture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
|
||||
if (_isDirty)
|
||||
{
|
||||
bool requiresVerticalFlip = _media.TextureProducer.RequiresVerticalFlip();
|
||||
StereoPacking stereoPacking = _media.TextureProducer.GetTextureStereoPacking();
|
||||
bool isMultiview = stereoPacking == StereoPacking.MultiviewLeftPrimary || stereoPacking == StereoPacking.MultiviewRightPrimary;
|
||||
|
||||
int planeCount = 1;
|
||||
if (!_media.UseResampler)
|
||||
{
|
||||
// We're not using the resampler so the number of planes will be the texture count
|
||||
planeCount = _media.TextureProducer.GetTextureCount();
|
||||
if (isMultiview)
|
||||
{
|
||||
// Unless we're using two texture stereo in which case it'll be half the texture count
|
||||
planeCount /= 2;
|
||||
}
|
||||
}
|
||||
|
||||
for (int plane = 0; plane < planeCount; ++plane)
|
||||
{
|
||||
Texture resamplerTexPlane = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[plane];
|
||||
texture = _media.UseResampler ? resamplerTexPlane : _media.TextureProducer.GetTexture(plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, requiresVerticalFlip, plane);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle the right eye if we're using two texture stereo packing
|
||||
if (isMultiview)
|
||||
{
|
||||
for (int plane = 0; plane < planeCount; ++plane)
|
||||
{
|
||||
texture = _media.TextureProducer.GetTexture(planeCount + plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, requiresVerticalFlip, plane, Eye.Right);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
applied = true;
|
||||
}
|
||||
}
|
||||
|
||||
// If the media didn't apply a texture, then try to apply the default texture
|
||||
if (!applied)
|
||||
{
|
||||
if (_defaultTexture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
if (_isDirty)
|
||||
{
|
||||
#if UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
if (_material != null && _material.HasProperty(VideoRender.PropUseYpCbCr.Id))
|
||||
{
|
||||
_material.DisableKeyword(VideoRender.Keyword_UseYpCbCr);
|
||||
}
|
||||
#endif
|
||||
ApplyMapping(_defaultTexture, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Eye
|
||||
{
|
||||
Left,
|
||||
Right
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="texture"></param>
|
||||
/// <param name="requiresYFlip"></param>
|
||||
/// <param name="plane"></param>
|
||||
/// <param name="eye">Which eye we're mapping, defaults to the left eye</param>
|
||||
private void ApplyMapping(Texture texture, bool requiresYFlip, int plane = 0, Eye eye = Eye.Left)
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
_isDirty = false;
|
||||
|
||||
if (plane == 0)
|
||||
{
|
||||
int propTextureId = _propTexture.Id;
|
||||
if (eye == Eye.Left)
|
||||
{
|
||||
VideoRender.SetupMaterialForMedia(_material, _media, propTextureId, texture, texture == _defaultTexture);
|
||||
_lastTextureApplied = texture;
|
||||
#if !UNITY_EDITOR && UNITY_ANDROID
|
||||
if (texture == _defaultTexture)
|
||||
{
|
||||
_material.EnableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.DisableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
propTextureId = _propTexture_R.Id;
|
||||
_material.SetTexture(propTextureId, texture);
|
||||
}
|
||||
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
if (_material.HasProperty(propTextureId)) // editor error on not being initilised on first run
|
||||
{
|
||||
_material.SetTextureScale(propTextureId, new Vector2(_scale.x, -_scale.y));
|
||||
_material.SetTextureOffset(propTextureId, Vector2.up + _offset);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTextureScale(propTextureId, _scale);
|
||||
_material.SetTextureOffset(propTextureId, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (plane == 1)
|
||||
{
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
_material.SetTextureScale(VideoRender.PropChromaTex.Id, new Vector2(_scale.x, -_scale.y));
|
||||
_material.SetTextureOffset(VideoRender.PropChromaTex.Id, Vector2.up + _offset);
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTextureScale(VideoRender.PropChromaTex.Id, _scale);
|
||||
_material.SetTextureOffset(VideoRender.PropChromaTex.Id, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
CreateMaterial();
|
||||
}
|
||||
|
||||
protected override void SaveProperties()
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(_texturePropertyName))
|
||||
{
|
||||
_originalTexture = _material.mainTexture;
|
||||
_originalScale = _material.mainTextureScale;
|
||||
_originalOffset = _material.mainTextureOffset;
|
||||
}
|
||||
else
|
||||
{
|
||||
_originalTexture = _material.GetTexture(_texturePropertyName);
|
||||
_originalScale = _material.GetTextureScale(_texturePropertyName);
|
||||
_originalOffset = _material.GetTextureOffset(_texturePropertyName);
|
||||
}
|
||||
}
|
||||
else
|
||||
CreateMaterial();
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
}
|
||||
|
||||
protected override void RestoreProperties()
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(_texturePropertyName))
|
||||
{
|
||||
_material.mainTexture = _originalTexture;
|
||||
_material.mainTextureScale = _originalScale;
|
||||
_material.mainTextureOffset = _originalOffset;
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTexture(_texturePropertyName, _originalTexture);
|
||||
_material.SetTextureScale(_texturePropertyName, _originalScale);
|
||||
_material.SetTextureOffset(_texturePropertyName, _originalOffset);
|
||||
}
|
||||
}
|
||||
else
|
||||
CreateMaterial();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 98c7fe5a0f3343d45ad618b4612b65f1
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,27 @@
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
/// <summary>
|
||||
/// This class will be attached to the object created by the ApplyToFar plane to register the camera that is currently, trying to render,
|
||||
/// this is needed so we only render to the correct camera in the shader
|
||||
/// </summary>
|
||||
public class ApplyToFarPlane_CameraApplier : MonoBehaviour
|
||||
{
|
||||
[SerializeField] private Material _material;
|
||||
public Material Material
|
||||
{
|
||||
get { return _material; }
|
||||
set { _material = value; }
|
||||
}
|
||||
|
||||
// this is called before the rendering of the object, by a specific camera, Camera.current is also changed
|
||||
// to be the camera currently rendering at the time.
|
||||
void OnWillRenderObject()
|
||||
{
|
||||
if (_material)
|
||||
{
|
||||
_material.SetFloat("_CurrentCamID", Camera.current.GetInstanceID());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 5b662008ef07b3b4aab4042e13a7ae8f
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
335
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToMaterial.cs
Normal file
335
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToMaterial.cs
Normal file
@@ -0,0 +1,335 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2024 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#if UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_TVOS || UNITY_VISIONOS
|
||||
#define UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
#endif
|
||||
|
||||
using UnityEngine;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Sets up a material to display the video from a MediaPlayer
|
||||
/// </summary>
|
||||
[AddComponentMenu("AVPro Video/Apply To Material", 300)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public sealed class ApplyToMaterial : ApplyToBase
|
||||
{
|
||||
[Header("Display")]
|
||||
|
||||
[Tooltip("Default texture to display when the video texture is preparing")]
|
||||
[SerializeField]
|
||||
Texture2D _defaultTexture = null;
|
||||
|
||||
public Texture2D DefaultTexture
|
||||
{
|
||||
get
|
||||
{
|
||||
return _defaultTexture;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_defaultTexture != value)
|
||||
{
|
||||
_defaultTexture = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Space(8f)]
|
||||
[Header("Material Target")]
|
||||
|
||||
[SerializeField]
|
||||
Material _material = null;
|
||||
|
||||
public Material Material
|
||||
{
|
||||
get
|
||||
{
|
||||
return _material;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_material != value)
|
||||
{
|
||||
_material = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
string _texturePropertyName = Helper.UnityBaseTextureName;
|
||||
|
||||
public string TexturePropertyName
|
||||
{
|
||||
get
|
||||
{
|
||||
return _texturePropertyName;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_texturePropertyName != value)
|
||||
{
|
||||
_texturePropertyName = value;
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _offset = Vector2.zero;
|
||||
|
||||
public Vector2 Offset
|
||||
{
|
||||
get
|
||||
{
|
||||
return _offset;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_offset != value)
|
||||
{
|
||||
_offset = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _scale = Vector2.one;
|
||||
|
||||
public Vector2 Scale
|
||||
{
|
||||
get
|
||||
{
|
||||
return _scale;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_scale != value)
|
||||
{
|
||||
_scale = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Texture _lastTextureApplied;
|
||||
private LazyShaderProperty _propTexture;
|
||||
private LazyShaderProperty _propTexture_R; // Default property for the right-eye texture
|
||||
|
||||
private Texture _originalTexture;
|
||||
private Vector2 _originalScale = Vector2.one;
|
||||
private Vector2 _originalOffset = Vector2.zero;
|
||||
|
||||
// We do a LateUpdate() to allow for any changes in the texture that may have happened in Update()
|
||||
private void LateUpdate()
|
||||
{
|
||||
Apply();
|
||||
}
|
||||
|
||||
public override void Apply()
|
||||
{
|
||||
bool applied = false;
|
||||
|
||||
if (_media != null && _media.TextureProducer != null)
|
||||
{
|
||||
Texture resamplerTex = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[0];
|
||||
Texture texture = _media.UseResampler ? resamplerTex : _media.TextureProducer.GetTexture(0);
|
||||
if (texture != null)
|
||||
{
|
||||
// Check for changing texture
|
||||
if (texture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
|
||||
if (_isDirty)
|
||||
{
|
||||
bool requiresVerticalFlip = _media.TextureProducer.RequiresVerticalFlip();
|
||||
StereoPacking stereoPacking = _media.TextureProducer.GetTextureStereoPacking();
|
||||
bool isMultiview = stereoPacking == StereoPacking.MultiviewLeftPrimary || stereoPacking == StereoPacking.MultiviewRightPrimary;
|
||||
|
||||
int planeCount = 1;
|
||||
if (!_media.UseResampler)
|
||||
{
|
||||
// We're not using the resampler so the number of planes will be the texture count
|
||||
planeCount = _media.TextureProducer.GetTextureCount();
|
||||
if (isMultiview)
|
||||
{
|
||||
// Unless we're using two texture stereo in which case it'll be half the texture count
|
||||
planeCount /= 2;
|
||||
}
|
||||
}
|
||||
|
||||
for (int plane = 0; plane < planeCount; ++plane)
|
||||
{
|
||||
Texture resamplerTexPlane = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[plane];
|
||||
texture = _media.UseResampler ? resamplerTexPlane : _media.TextureProducer.GetTexture(plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, requiresVerticalFlip, plane);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle the right eye if we're using two texture stereo packing
|
||||
if (isMultiview)
|
||||
{
|
||||
for (int plane = 0; plane < planeCount; ++plane)
|
||||
{
|
||||
texture = _media.TextureProducer.GetTexture(planeCount + plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, requiresVerticalFlip, plane, Eye.Right);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
applied = true;
|
||||
}
|
||||
}
|
||||
|
||||
// If the media didn't apply a texture, then try to apply the default texture
|
||||
if (!applied)
|
||||
{
|
||||
if (_defaultTexture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
if (_isDirty)
|
||||
{
|
||||
#if UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
if (_material != null && _material.HasProperty(VideoRender.PropUseYpCbCr.Id))
|
||||
{
|
||||
_material.DisableKeyword(VideoRender.Keyword_UseYpCbCr);
|
||||
}
|
||||
#endif
|
||||
ApplyMapping(_defaultTexture, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Eye
|
||||
{
|
||||
Left,
|
||||
Right
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="texture"></param>
|
||||
/// <param name="requiresYFlip"></param>
|
||||
/// <param name="plane"></param>
|
||||
/// <param name="eye">Which eye we're mapping, defaults to the left eye</param>
|
||||
private void ApplyMapping(Texture texture, bool requiresYFlip, int plane = 0, Eye eye = Eye.Left)
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
_isDirty = false;
|
||||
|
||||
if (plane == 0)
|
||||
{
|
||||
int propTextureId = _propTexture.Id;
|
||||
if (eye == Eye.Left)
|
||||
{
|
||||
VideoRender.SetupMaterialForMedia(_material, _media, propTextureId, texture, texture == _defaultTexture);
|
||||
_lastTextureApplied = texture;
|
||||
#if !UNITY_EDITOR && UNITY_ANDROID
|
||||
if (texture == _defaultTexture)
|
||||
{
|
||||
_material.EnableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.DisableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
propTextureId = _propTexture_R.Id;
|
||||
_material.SetTexture(propTextureId, texture);
|
||||
}
|
||||
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
_material.SetTextureScale(propTextureId, new Vector2(_scale.x, -_scale.y));
|
||||
_material.SetTextureOffset(propTextureId, Vector2.up + _offset);
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTextureScale(propTextureId, _scale);
|
||||
_material.SetTextureOffset(propTextureId, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (plane == 1)
|
||||
{
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
_material.SetTextureScale(VideoRender.PropChromaTex.Id, new Vector2(_scale.x, -_scale.y));
|
||||
_material.SetTextureOffset(VideoRender.PropChromaTex.Id, Vector2.up + _offset);
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTextureScale(VideoRender.PropChromaTex.Id, _scale);
|
||||
_material.SetTextureOffset(VideoRender.PropChromaTex.Id, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override void SaveProperties()
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(_texturePropertyName))
|
||||
{
|
||||
_originalTexture = _material.mainTexture;
|
||||
_originalScale = _material.mainTextureScale;
|
||||
_originalOffset = _material.mainTextureOffset;
|
||||
}
|
||||
else
|
||||
{
|
||||
_originalTexture = _material.GetTexture(_texturePropertyName);
|
||||
_originalScale = _material.GetTextureScale(_texturePropertyName);
|
||||
_originalOffset = _material.GetTextureOffset(_texturePropertyName);
|
||||
}
|
||||
}
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
}
|
||||
|
||||
protected override void RestoreProperties()
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(_texturePropertyName))
|
||||
{
|
||||
_material.mainTexture = _originalTexture;
|
||||
_material.mainTextureScale = _originalScale;
|
||||
_material.mainTextureOffset = _originalOffset;
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTexture(_texturePropertyName, _originalTexture);
|
||||
_material.SetTextureScale(_texturePropertyName, _originalScale);
|
||||
_material.SetTextureOffset(_texturePropertyName, _originalOffset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: d2feedce2e2e63647b8f875ec0894a15
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
364
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToMesh.cs
Normal file
364
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToMesh.cs
Normal file
@@ -0,0 +1,364 @@
|
||||
using System;
|
||||
using UnityEngine;
|
||||
using UnityEngine.Serialization;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Sets up a mesh to display the video from a MediaPlayer
|
||||
/// </summary>
|
||||
[AddComponentMenu("AVPro Video/Apply To Mesh", 300)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public sealed class ApplyToMesh : ApplyToBase
|
||||
{
|
||||
// TODO: add specific material / material index to target in the mesh if there are multiple materials
|
||||
|
||||
[Space(8f)]
|
||||
[Header("Display")]
|
||||
|
||||
[Tooltip("Default texture to display when the video texture is preparing")]
|
||||
[SerializeField] Texture2D _defaultTexture = null;
|
||||
|
||||
public Texture2D DefaultTexture
|
||||
{
|
||||
get
|
||||
{
|
||||
return _defaultTexture;
|
||||
}
|
||||
set
|
||||
{
|
||||
ChangeDefaultTexture(value);
|
||||
}
|
||||
}
|
||||
|
||||
[Space(8f)]
|
||||
[FormerlySerializedAs("_mesh")]
|
||||
[Header("Renderer Target")]
|
||||
[SerializeField] Renderer _renderer = null;
|
||||
|
||||
public Renderer MeshRenderer
|
||||
{
|
||||
get
|
||||
{
|
||||
return _renderer;
|
||||
}
|
||||
set
|
||||
{
|
||||
ChangeRenderer(value);
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
int _materialIndex = -1;
|
||||
|
||||
public int MaterialIndex
|
||||
{
|
||||
get
|
||||
{
|
||||
return _materialIndex;
|
||||
}
|
||||
set
|
||||
{
|
||||
_materialIndex = value;
|
||||
}
|
||||
}
|
||||
|
||||
private void ChangeDefaultTexture(Texture2D texture)
|
||||
{
|
||||
if (_defaultTexture != texture)
|
||||
{
|
||||
_defaultTexture = texture;
|
||||
ForceUpdate();
|
||||
}
|
||||
}
|
||||
|
||||
private void ChangeRenderer(Renderer renderer)
|
||||
{
|
||||
if (_renderer != renderer)
|
||||
{
|
||||
if (_renderer)
|
||||
{
|
||||
// TODO: Remove from renderer
|
||||
}
|
||||
_renderer = renderer;
|
||||
if (_renderer)
|
||||
{
|
||||
ForceUpdate();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
string _texturePropertyName = Helper.UnityBaseTextureName;
|
||||
|
||||
public string TexturePropertyName
|
||||
{
|
||||
get
|
||||
{
|
||||
return _texturePropertyName;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_texturePropertyName != value)
|
||||
{
|
||||
_texturePropertyName = value;
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _offset = Vector2.zero;
|
||||
|
||||
public Vector2 Offset
|
||||
{
|
||||
get
|
||||
{
|
||||
return _offset;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_offset != value)
|
||||
{
|
||||
_offset = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _scale = Vector2.one;
|
||||
|
||||
public Vector2 Scale
|
||||
{
|
||||
get
|
||||
{
|
||||
return _scale;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_scale != value)
|
||||
{
|
||||
_scale = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Texture _lastTextureApplied;
|
||||
private LazyShaderProperty _propTexture;
|
||||
private LazyShaderProperty _propTexture_R; // Default property for the right-eye texture
|
||||
|
||||
// We do a LateUpdate() to allow for any changes in the texture that may have happened in Update()
|
||||
private void LateUpdate()
|
||||
{
|
||||
Apply();
|
||||
}
|
||||
|
||||
public override void Apply()
|
||||
{
|
||||
bool applied = false;
|
||||
|
||||
// Try to apply texture from media
|
||||
if (_media != null && _media.TextureProducer != null)
|
||||
{
|
||||
Texture resamplerTex = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[0];
|
||||
Texture texture = _media.UseResampler ? resamplerTex : _media.TextureProducer.GetTexture(0);
|
||||
if (texture != null)
|
||||
{
|
||||
// Check for changing texture
|
||||
if (texture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
|
||||
if (_isDirty)
|
||||
{
|
||||
bool requiresVerticalFlip = _media.TextureProducer.RequiresVerticalFlip();
|
||||
StereoPacking stereoPacking = _media.TextureProducer.GetTextureStereoPacking();
|
||||
bool isMultiview = stereoPacking == StereoPacking.MultiviewLeftPrimary || stereoPacking == StereoPacking.MultiviewRightPrimary;
|
||||
|
||||
int planeCount = 1;
|
||||
if (!_media.UseResampler)
|
||||
{
|
||||
// We're not using the resampler so the number of planes will be the texture count
|
||||
planeCount = _media.TextureProducer.GetTextureCount();
|
||||
if (isMultiview)
|
||||
{
|
||||
// Unless we're using two texture stereo in which case it'll be half the texture count
|
||||
planeCount /= 2;
|
||||
}
|
||||
}
|
||||
|
||||
for (int plane = 0; plane < planeCount; plane++)
|
||||
{
|
||||
Texture resamplerTexPlane = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[plane];
|
||||
texture = _media.UseResampler ? resamplerTexPlane : _media.TextureProducer.GetTexture(plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, _media.TextureProducer.RequiresVerticalFlip(), plane, materialIndex: _materialIndex);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle the right eye if we're using two texture stereo packing
|
||||
if (isMultiview)
|
||||
{
|
||||
for (int plane = 0; plane < planeCount; ++plane)
|
||||
{
|
||||
texture = _media.TextureProducer.GetTexture(planeCount + plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, requiresVerticalFlip, plane, Eye.Right);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
applied = true;
|
||||
}
|
||||
}
|
||||
|
||||
// If the media didn't apply a texture, then try to apply the default texture
|
||||
if (!applied)
|
||||
{
|
||||
if (_defaultTexture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
if (_isDirty)
|
||||
{
|
||||
ApplyMapping(_defaultTexture, false, 0, materialIndex: _materialIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Eye
|
||||
{
|
||||
Left,
|
||||
Right
|
||||
}
|
||||
|
||||
private void ApplyMapping(Texture texture, bool requiresYFlip, int plane, Eye eye = Eye.Left, int materialIndex = -1)
|
||||
{
|
||||
if (_renderer != null)
|
||||
{
|
||||
_isDirty = false;
|
||||
#if UNITY_EDITOR
|
||||
Material[] meshMaterials = _renderer.sharedMaterials;
|
||||
#else
|
||||
Material[] meshMaterials = _renderer.materials;
|
||||
#endif
|
||||
|
||||
if (meshMaterials != null)
|
||||
{
|
||||
for (int i = 0; i < meshMaterials.Length; i++)
|
||||
{
|
||||
if (_materialIndex < 0 || i == _materialIndex)
|
||||
{
|
||||
Material mat = meshMaterials[i];
|
||||
if (mat != null)
|
||||
{
|
||||
if (StereoRedGreenTint)
|
||||
{
|
||||
mat.EnableKeyword("STEREO_DEBUG");
|
||||
}
|
||||
else
|
||||
{
|
||||
mat.DisableKeyword("STEREO_DEBUG");
|
||||
}
|
||||
|
||||
if (plane == 0)
|
||||
{
|
||||
int propTextureId = _propTexture.Id;
|
||||
if (eye == Eye.Left)
|
||||
{
|
||||
VideoRender.SetupMaterialForMedia(mat, _media, _propTexture.Id, texture, texture == _defaultTexture);
|
||||
_lastTextureApplied = texture;
|
||||
|
||||
#if !UNITY_EDITOR && UNITY_ANDROID
|
||||
if (texture == _defaultTexture)
|
||||
{
|
||||
mat.EnableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
else
|
||||
{
|
||||
mat.DisableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
propTextureId = _propTexture_R.Id;
|
||||
mat.SetTexture(propTextureId, texture);
|
||||
}
|
||||
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
mat.SetTextureScale(_propTexture.Id, new Vector2(_scale.x, -_scale.y));
|
||||
mat.SetTextureOffset(_propTexture.Id, Vector2.up + _offset);
|
||||
}
|
||||
else
|
||||
{
|
||||
mat.SetTextureScale(_propTexture.Id, _scale);
|
||||
mat.SetTextureOffset(_propTexture.Id, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (plane == 1)
|
||||
{
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
mat.SetTextureScale(VideoRender.PropChromaTex.Id, new Vector2(_scale.x, -_scale.y));
|
||||
mat.SetTextureOffset(VideoRender.PropChromaTex.Id, Vector2.up + _offset);
|
||||
}
|
||||
else
|
||||
{
|
||||
mat.SetTextureScale(VideoRender.PropChromaTex.Id, _scale);
|
||||
mat.SetTextureOffset(VideoRender.PropChromaTex.Id, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override void OnEnable()
|
||||
{
|
||||
if (_renderer == null)
|
||||
{
|
||||
_renderer = this.GetComponent<MeshRenderer>();
|
||||
if (_renderer == null)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] No MeshRenderer set or found in gameobject");
|
||||
}
|
||||
}
|
||||
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
|
||||
ForceUpdate();
|
||||
}
|
||||
|
||||
protected override void OnDisable()
|
||||
{
|
||||
ApplyMapping(_defaultTexture, false, 0, materialIndex: _materialIndex);
|
||||
}
|
||||
|
||||
protected override void SaveProperties()
|
||||
{
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: f6d1977a52888584496b1acc7e998011
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
@@ -0,0 +1,81 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2019-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// Allows per-channel volume control
|
||||
/// Currently supported on Windows and UWP (Media Foundation API only), macOS, iOS, tvOS and Android (ExoPlayer API only)
|
||||
[AddComponentMenu("AVPro Video/Audio Channel Mixer", 401)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public class AudioChannelMixer : MonoBehaviour
|
||||
{
|
||||
const int MaxChannels = 8;
|
||||
|
||||
[Range(0f, 1f)]
|
||||
[SerializeField] float[] _channels = null;
|
||||
|
||||
/// Range 0.0 to 1.0
|
||||
public float[] Channel
|
||||
{
|
||||
get { return _channels; }
|
||||
set { _channels = value; }
|
||||
}
|
||||
|
||||
void Reset()
|
||||
{
|
||||
_channels = new float[MaxChannels];
|
||||
for (int i = 0; i < MaxChannels; i++)
|
||||
{
|
||||
_channels[i] = 1f;
|
||||
}
|
||||
}
|
||||
|
||||
void ChangeChannelCount(int numChannels)
|
||||
{
|
||||
float[] channels = new float[numChannels];
|
||||
if (_channels != null && _channels.Length != 0)
|
||||
{
|
||||
for (int i = 0; i < channels.Length; i++)
|
||||
{
|
||||
if (i < _channels.Length)
|
||||
{
|
||||
channels[i] = _channels[i];
|
||||
}
|
||||
else
|
||||
{
|
||||
channels[i] = 1f;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int i = 0; i < numChannels; i++)
|
||||
{
|
||||
channels[i] = 1f;
|
||||
}
|
||||
}
|
||||
_channels = channels;
|
||||
}
|
||||
|
||||
void OnAudioFilterRead(float[] data, int channels)
|
||||
{
|
||||
if (channels != _channels.Length)
|
||||
{
|
||||
ChangeChannelCount(channels);
|
||||
}
|
||||
int k = 0;
|
||||
int numSamples = data.Length / channels;
|
||||
for (int j = 0; j < numSamples; j++)
|
||||
{
|
||||
for (int i = 0; i < channels; i++)
|
||||
{
|
||||
data[k] *= _channels[i];
|
||||
k++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 383a68f1e3e94be4b84df59dd26074db
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
180
Assets/AVProVideo/Runtime/Scripts/Components/AudioOutput.cs
Normal file
180
Assets/AVProVideo/Runtime/Scripts/Components/AudioOutput.cs
Normal file
@@ -0,0 +1,180 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2026 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
using UnityEngine;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Audio is grabbed from the MediaPlayer and rendered via Unity AudioSource
|
||||
/// This allows audio to have 3D spatial control, effects applied and to be spatialised for VR
|
||||
/// Currently supported on Windows and UWP (Media Foundation API only), macOS, iOS, tvOS and Android (ExoPlayer API only)
|
||||
/// </summary>
|
||||
[RequireComponent(typeof(AudioSource))]
|
||||
[AddComponentMenu("AVPro Video/Audio Output", 400)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public class AudioOutput : MonoBehaviour
|
||||
{
|
||||
public enum AudioOutputMode
|
||||
{
|
||||
OneToAllChannels,
|
||||
MultipleChannels
|
||||
}
|
||||
|
||||
[SerializeField] MediaPlayer _mediaPlayer = null;
|
||||
[SerializeField] AudioOutputMode _audioOutputMode = AudioOutputMode.MultipleChannels;
|
||||
[HideInInspector, SerializeField] int _channelMask = 0xffff;
|
||||
[SerializeField] bool _supportPositionalAudio = false;
|
||||
|
||||
private int _mediaPlayerInstanceID = 0;
|
||||
|
||||
public MediaPlayer Player
|
||||
{
|
||||
get { return _mediaPlayer; }
|
||||
set { ChangeMediaPlayer(value); }
|
||||
}
|
||||
|
||||
public AudioOutputMode OutputMode
|
||||
{
|
||||
get { return _audioOutputMode; }
|
||||
set { _audioOutputMode = value; }
|
||||
}
|
||||
|
||||
public int ChannelMask
|
||||
{
|
||||
get { return _channelMask; }
|
||||
set { _channelMask = value; }
|
||||
}
|
||||
|
||||
public bool SupportPositionalAudio
|
||||
{
|
||||
get { return _supportPositionalAudio; }
|
||||
set { _supportPositionalAudio = value; }
|
||||
}
|
||||
|
||||
private AudioSource _audioSource;
|
||||
|
||||
void Awake()
|
||||
{
|
||||
_audioSource = this.GetComponent<AudioSource>();
|
||||
Debug.Assert(_audioSource != null);
|
||||
}
|
||||
|
||||
void Start()
|
||||
{
|
||||
AudioSettings.OnAudioConfigurationChanged += OnAudioConfigurationChanged;
|
||||
ChangeMediaPlayer(_mediaPlayer);
|
||||
}
|
||||
|
||||
void OnAudioConfigurationChanged(bool deviceChanged)
|
||||
{
|
||||
if (_mediaPlayer == null || _mediaPlayer.Control == null)
|
||||
return;
|
||||
_mediaPlayer.Control.AudioConfigurationChanged(deviceChanged);
|
||||
}
|
||||
|
||||
void OnDestroy()
|
||||
{
|
||||
ChangeMediaPlayer(null);
|
||||
}
|
||||
|
||||
void Update()
|
||||
{
|
||||
if (_mediaPlayer != null && _mediaPlayer.Control != null && _mediaPlayer.Control.IsPlaying())
|
||||
{
|
||||
ApplyAudioSettings(_mediaPlayer, _audioSource);
|
||||
}
|
||||
}
|
||||
|
||||
public AudioSource GetAudioSource()
|
||||
{
|
||||
return _audioSource;
|
||||
}
|
||||
public void SetAudioSource(AudioSource source)
|
||||
{
|
||||
_audioSource = source;
|
||||
if (_mediaPlayer)
|
||||
_mediaPlayer.AudioSource = source;
|
||||
}
|
||||
|
||||
public void ChangeMediaPlayer(MediaPlayer newPlayer)
|
||||
{
|
||||
// When changing the media player, handle event subscriptions
|
||||
if (_mediaPlayer != null)
|
||||
{
|
||||
_mediaPlayer.AudioSource = null;
|
||||
_mediaPlayer.Events.RemoveListener(OnMediaPlayerEvent);
|
||||
AudioOutputManager.Instance.RemovePlayerInstance(_mediaPlayerInstanceID);
|
||||
_mediaPlayer = null;
|
||||
_mediaPlayerInstanceID = 0;
|
||||
}
|
||||
|
||||
_mediaPlayer = newPlayer;
|
||||
if (_mediaPlayer != null)
|
||||
{
|
||||
_mediaPlayer.Events.AddListener(OnMediaPlayerEvent);
|
||||
_mediaPlayer.AudioSource = _audioSource;
|
||||
_mediaPlayerInstanceID = _mediaPlayer.GetInstanceID();
|
||||
AudioOutputManager.Instance.AddPlayerInstance(_mediaPlayerInstanceID);
|
||||
}
|
||||
|
||||
if (_supportPositionalAudio)
|
||||
{
|
||||
if (_audioSource.clip == null)
|
||||
{
|
||||
// Position audio is implemented from hints found on this thread:
|
||||
// https://forum.unity.com/threads/onaudiofilterread-sound-spatialisation.362782/
|
||||
int frameCount = 2048 * 10;
|
||||
int sampleCount = frameCount * Helper.GetUnityAudioSpeakerCount();
|
||||
AudioClip clip = AudioClip.Create("dummy", frameCount, Helper.GetUnityAudioSpeakerCount(), Helper.GetUnityAudioSampleRate(), false);
|
||||
float[] samples = new float[sampleCount];
|
||||
for (int i = 0; i < samples.Length; i++) { samples[i] = 1f; }
|
||||
clip.SetData(samples, 0);
|
||||
_audioSource.clip = clip;
|
||||
_audioSource.loop = true;
|
||||
}
|
||||
}
|
||||
else if (_audioSource.clip != null)
|
||||
{
|
||||
_audioSource.clip = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Callback function to handle events
|
||||
private void OnMediaPlayerEvent(MediaPlayer mp, MediaPlayerEvent.EventType et, ErrorCode errorCode)
|
||||
{
|
||||
switch (et)
|
||||
{
|
||||
case MediaPlayerEvent.EventType.Closing:
|
||||
_audioSource.Stop();
|
||||
break;
|
||||
case MediaPlayerEvent.EventType.Started:
|
||||
ApplyAudioSettings(_mediaPlayer, _audioSource);
|
||||
_audioSource.Play();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private static void ApplyAudioSettings(MediaPlayer player, AudioSource audioSource)
|
||||
{
|
||||
// Apply volume and mute from the MediaPlayer to the AudioSource
|
||||
if (audioSource != null && player != null && player.Control != null)
|
||||
{
|
||||
float volume = player.Control.GetVolume();
|
||||
bool isMuted = player.Control.IsMuted();
|
||||
float rate = player.Control.GetPlaybackRate();
|
||||
audioSource.volume = volume;
|
||||
audioSource.mute = isMuted;
|
||||
audioSource.pitch = rate;
|
||||
}
|
||||
}
|
||||
|
||||
#if (UNITY_EDITOR_WIN || UNITY_EDITOR_OSX) || (!UNITY_EDITOR && (UNITY_STANDALONE_WIN || UNITY_WSA_10_0 || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_TVOS || UNITY_VISIONOS || UNITY_ANDROID))
|
||||
void OnAudioFilterRead(float[] audioData, int channelCount)
|
||||
{
|
||||
AudioOutputManager.Instance.RequestAudio(this, _mediaPlayer, _mediaPlayerInstanceID, audioData, channelCount, _channelMask, _audioOutputMode, _supportPositionalAudio);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 3b05a64a5de3f8546bf586f42e37b979
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
448
Assets/AVProVideo/Runtime/Scripts/Components/DisplayIMGUI.cs
Normal file
448
Assets/AVProVideo/Runtime/Scripts/Components/DisplayIMGUI.cs
Normal file
@@ -0,0 +1,448 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2025 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#define UNITY_PLATFORM_SUPPORTS_LINEAR
|
||||
|
||||
#if UNITY_EDITOR_WIN || (!UNITY_EDITOR && UNITY_STANDALONE_WIN)
|
||||
#define UNITY_PLATFORM_SUPPORTS_VIDEOASPECTRATIO
|
||||
#endif
|
||||
|
||||
using UnityEngine;
|
||||
using UnityEngine.Serialization;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Displays the video from MediaPlayer component using IMGUI
|
||||
/// </summary>
|
||||
[AddComponentMenu("AVPro Video/Display IMGUI", 200)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
[ExecuteInEditMode]
|
||||
public class DisplayIMGUI : MonoBehaviour
|
||||
{
|
||||
[SerializeField]
|
||||
MediaPlayer _mediaPlayer = null;
|
||||
public MediaPlayer Player
|
||||
{
|
||||
get
|
||||
{
|
||||
return _mediaPlayer;
|
||||
}
|
||||
set
|
||||
{
|
||||
_mediaPlayer = value;
|
||||
Update();
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
ScaleMode _scaleMode = ScaleMode.ScaleToFit;
|
||||
public ScaleMode ScaleMode
|
||||
{
|
||||
get
|
||||
{
|
||||
return _scaleMode;
|
||||
}
|
||||
set
|
||||
{
|
||||
_scaleMode = value;
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Color _color = UnityEngine.Color.white;
|
||||
public Color Color
|
||||
{
|
||||
get
|
||||
{
|
||||
return _color;
|
||||
}
|
||||
set
|
||||
{
|
||||
_color = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_alphaBlend")]
|
||||
[SerializeField] bool _allowTransparency = false;
|
||||
public bool AllowTransparency
|
||||
{
|
||||
get
|
||||
{
|
||||
return _allowTransparency;
|
||||
}
|
||||
set
|
||||
{
|
||||
_allowTransparency = value;
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
bool _useDepth = false;
|
||||
public bool UseDepth
|
||||
{
|
||||
get
|
||||
{
|
||||
return _useDepth;
|
||||
}
|
||||
set
|
||||
{
|
||||
_useDepth = value;
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
int _depth = 0;
|
||||
public int Depth
|
||||
{
|
||||
get
|
||||
{
|
||||
return _depth;
|
||||
}
|
||||
set
|
||||
{
|
||||
_depth = value;
|
||||
}
|
||||
}
|
||||
|
||||
[Header("Area")]
|
||||
|
||||
[FormerlySerializedAs("_fullScreen")]
|
||||
[SerializeField]
|
||||
bool _isAreaFullScreen = true;
|
||||
public bool IsAreaFullScreen
|
||||
{
|
||||
get
|
||||
{
|
||||
return _isAreaFullScreen;
|
||||
}
|
||||
set
|
||||
{
|
||||
_isAreaFullScreen = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_x")]
|
||||
[Range(0f, 1f)]
|
||||
[SerializeField]
|
||||
float _areaX = 0f;
|
||||
public float AreaX
|
||||
{
|
||||
get
|
||||
{
|
||||
return _areaX;
|
||||
}
|
||||
set
|
||||
{
|
||||
_areaX = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_y")]
|
||||
[Range(0f, 1f)]
|
||||
[SerializeField]
|
||||
float _areaY = 0f;
|
||||
public float AreaY
|
||||
{
|
||||
get
|
||||
{
|
||||
return _areaY;
|
||||
}
|
||||
set
|
||||
{
|
||||
_areaY = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_width")]
|
||||
[Range(0f, 1f)]
|
||||
[SerializeField] float _areaWidth = 1f;
|
||||
public float AreaWidth
|
||||
{
|
||||
get
|
||||
{
|
||||
return _areaWidth;
|
||||
}
|
||||
set
|
||||
{
|
||||
_areaWidth = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_height")]
|
||||
[Range(0f, 1f)]
|
||||
[SerializeField] float _areaHeight = 1f;
|
||||
public float AreaHeight
|
||||
{
|
||||
get
|
||||
{
|
||||
return _areaHeight;
|
||||
}
|
||||
set
|
||||
{
|
||||
_areaHeight = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_displayInEditor")]
|
||||
[SerializeField] bool _showAreaInEditor = false;
|
||||
public bool ShowAreaInEditor
|
||||
{
|
||||
get
|
||||
{
|
||||
return _showAreaInEditor;
|
||||
}
|
||||
set
|
||||
{
|
||||
_showAreaInEditor = value;
|
||||
}
|
||||
}
|
||||
|
||||
private static Shader _shaderAlphaPacking;
|
||||
private Material _material;
|
||||
|
||||
void Start()
|
||||
{
|
||||
// Disabling useGUILayout lets you skip the GUI layout phase which helps performance, but this also breaks the GUI.depth usage.
|
||||
if (!_useDepth)
|
||||
{
|
||||
this.useGUILayout = false;
|
||||
}
|
||||
|
||||
if (!_shaderAlphaPacking)
|
||||
{
|
||||
_shaderAlphaPacking = Shader.Find("AVProVideo/Internal/IMGUI/Texture Transparent");
|
||||
if (!_shaderAlphaPacking)
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Missing shader 'AVProVideo/Internal/IMGUI/Texture Transparent'");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void Update()
|
||||
{
|
||||
if (_mediaPlayer != null)
|
||||
{
|
||||
SetupMaterial();
|
||||
}
|
||||
}
|
||||
|
||||
void OnDestroy()
|
||||
{
|
||||
// Destroy existing material
|
||||
if (_material != null)
|
||||
{
|
||||
#if UNITY_EDITOR
|
||||
Material.DestroyImmediate(_material);
|
||||
#else
|
||||
Material.Destroy(_material);
|
||||
#endif
|
||||
_material = null;
|
||||
}
|
||||
}
|
||||
|
||||
private Shader GetRequiredShader()
|
||||
{
|
||||
// [MOZ] Always default to the alpha packed shader for now to force using the material rendering path
|
||||
// in OnGUI. This fixed issues with incorrect colourisation and orientation/cropping for certain videos.
|
||||
Shader result = _shaderAlphaPacking;
|
||||
#if false
|
||||
if (result == null && _mediaPlayer.TextureProducer != null)
|
||||
{
|
||||
switch (_mediaPlayer.TextureProducer.GetTextureAlphaPacking())
|
||||
{
|
||||
case AlphaPacking.None:
|
||||
break;
|
||||
case AlphaPacking.LeftRight:
|
||||
case AlphaPacking.TopBottom:
|
||||
result = _shaderAlphaPacking;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
#if UNITY_PLATFORM_SUPPORTS_LINEAR
|
||||
if (result == null && _mediaPlayer.Info != null)
|
||||
{
|
||||
// If the player does support generating sRGB textures then we need to use a shader to convert them for display via IMGUI
|
||||
if (QualitySettings.activeColorSpace == ColorSpace.Linear && !_mediaPlayer.Info.PlayerSupportsLinearColorSpace())
|
||||
{
|
||||
result = _shaderAlphaPacking;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
if (result == null && _mediaPlayer.TextureProducer != null)
|
||||
{
|
||||
if (_mediaPlayer.TextureProducer.GetTextureCount() == 2)
|
||||
{
|
||||
result = _shaderAlphaPacking;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
private void SetupMaterial()
|
||||
{
|
||||
// Get required shader
|
||||
Shader currentShader = null;
|
||||
if (_material != null)
|
||||
{
|
||||
currentShader = _material.shader;
|
||||
}
|
||||
Shader nextShader = GetRequiredShader();
|
||||
|
||||
// If the shader requirement has changed
|
||||
if (currentShader != nextShader)
|
||||
{
|
||||
// Destroy existing material
|
||||
if (_material != null)
|
||||
{
|
||||
#if UNITY_EDITOR
|
||||
Material.DestroyImmediate(_material);
|
||||
#else
|
||||
Material.Destroy(_material);
|
||||
#endif
|
||||
_material = null;
|
||||
}
|
||||
|
||||
// Create new material
|
||||
if (nextShader != null)
|
||||
{
|
||||
_material = new Material(nextShader);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if UNITY_EDITOR
|
||||
private void DrawArea()
|
||||
{
|
||||
Rect rect = GetAreaRect();
|
||||
Rect uv = rect;
|
||||
uv.x /= Screen.width;
|
||||
uv.width /= Screen.width;
|
||||
uv.y /= Screen.height;
|
||||
uv.height /= Screen.height;
|
||||
uv.width *= 16f;
|
||||
uv.height *= 16f;
|
||||
uv.x += 0.5f;
|
||||
uv.y += 0.5f;
|
||||
Texture2D icon = Resources.Load<Texture2D>("AVProVideoIcon");
|
||||
GUI.depth = _depth;
|
||||
GUI.color = _color;
|
||||
GUI.DrawTextureWithTexCoords(rect, icon, uv);
|
||||
}
|
||||
#endif
|
||||
|
||||
void OnGUI()
|
||||
{
|
||||
#if UNITY_EDITOR
|
||||
if (_showAreaInEditor && !Application.isPlaying)
|
||||
{
|
||||
DrawArea();
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (_mediaPlayer == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
Texture texture = null;
|
||||
if (_showAreaInEditor)
|
||||
{
|
||||
#if UNITY_EDITOR
|
||||
texture = Texture2D.whiteTexture;
|
||||
#endif
|
||||
}
|
||||
texture = VideoRender.GetTexture(_mediaPlayer, 0);
|
||||
if (_mediaPlayer.Info != null && !_mediaPlayer.Info.HasVideo())
|
||||
{
|
||||
texture = null;
|
||||
}
|
||||
|
||||
if (texture != null)
|
||||
{
|
||||
bool isTextureVisible = (_color.a > 0f || !_allowTransparency);
|
||||
if (isTextureVisible)
|
||||
{
|
||||
GUI.depth = _depth;
|
||||
GUI.color = _color;
|
||||
|
||||
Rect rect = GetAreaRect();
|
||||
|
||||
// TODO: change this to a material-only path so we only have a single drawing path
|
||||
if (_material != null)
|
||||
{
|
||||
// TODO: Only setup material when needed
|
||||
VideoRender.SetupMaterialForMedia(_material, _mediaPlayer);
|
||||
|
||||
// NOTE: It seems that Graphics.DrawTexture() behaves differently than GUI.DrawTexture() when it comes to sRGB writing
|
||||
// on newer versions of Unity (at least 2018.2.19 and above), so now we have to force the conversion to sRGB on writing
|
||||
bool restoreSRGBWrite = false;
|
||||
#if UNITY_EDITOR_WIN || (!UNITY_EDITOR && UNITY_STANDALONE_WIN)
|
||||
if (QualitySettings.activeColorSpace == ColorSpace.Linear && !GL.sRGBWrite)
|
||||
{
|
||||
restoreSRGBWrite = true;
|
||||
}
|
||||
#endif
|
||||
if (restoreSRGBWrite)
|
||||
{
|
||||
GL.sRGBWrite = true;
|
||||
}
|
||||
|
||||
VideoRender.DrawTexture(rect, texture, _scaleMode, _mediaPlayer.TextureProducer.GetTextureAlphaPacking(), _mediaPlayer.TextureProducer.GetTexturePixelAspectRatio(), _material);
|
||||
|
||||
if (restoreSRGBWrite)
|
||||
{
|
||||
GL.sRGBWrite = false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
bool requiresVerticalFlip = false;
|
||||
if (_mediaPlayer.TextureProducer != null)
|
||||
{
|
||||
requiresVerticalFlip = _mediaPlayer.TextureProducer.RequiresVerticalFlip();
|
||||
}
|
||||
if (requiresVerticalFlip)
|
||||
{
|
||||
GUIUtility.ScaleAroundPivot(new Vector2(1f, -1f), new Vector2(0f, rect.y + (rect.height / 2f)));
|
||||
}
|
||||
#if UNITY_PLATFORM_SUPPORTS_VIDEOASPECTRATIO
|
||||
float par = _mediaPlayer.TextureProducer.GetTexturePixelAspectRatio();
|
||||
if (par > 0f)
|
||||
{
|
||||
if (par > 1f)
|
||||
{
|
||||
GUIUtility.ScaleAroundPivot(new Vector2(par, 1f), new Vector2(rect.x + (rect.width / 2f), rect.y + (rect.height / 2f)));
|
||||
}
|
||||
else
|
||||
{
|
||||
GUIUtility.ScaleAroundPivot(new Vector2(1f, 1f/par), new Vector2(rect.x + (rect.width / 2f), rect.y + (rect.height / 2f)));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
GUI.DrawTexture(rect, texture, _scaleMode, _allowTransparency);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Rect GetAreaRect()
|
||||
{
|
||||
Rect rect;
|
||||
if (_isAreaFullScreen)
|
||||
{
|
||||
rect = new Rect(0.0f, 0.0f, Screen.width, Screen.height);
|
||||
}
|
||||
else
|
||||
{
|
||||
rect = new Rect(_areaX * (Screen.width - 1), _areaY * (Screen.height - 1), _areaWidth * Screen.width, _areaHeight * Screen.height);
|
||||
}
|
||||
|
||||
return rect;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 75f3b319d2d69934d8bf545ab45c918d
|
||||
timeCreated: 1544813301
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
1571
Assets/AVProVideo/Runtime/Scripts/Components/MediaPlayer.cs
Normal file
1571
Assets/AVProVideo/Runtime/Scripts/Components/MediaPlayer.cs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 638c870cac4da414fba921606d504407
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,65 @@
|
||||
#if !(UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_TVOS)
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
#region Application Focus and Pausing
|
||||
#if !UNITY_EDITOR
|
||||
void OnApplicationFocus(bool focusStatus)
|
||||
{
|
||||
#if !(UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN)
|
||||
// Debug.Log("OnApplicationFocus: focusStatus: " + focusStatus);
|
||||
|
||||
if (focusStatus && (isActiveAndEnabled && enabled))
|
||||
{
|
||||
if (Control != null && _wasPlayingOnPause)
|
||||
{
|
||||
_wasPlayingOnPause = false;
|
||||
Control.Play();
|
||||
|
||||
Helper.LogInfo("OnApplicationFocus: playing video again");
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void OnApplicationPause(bool pauseStatus)
|
||||
{
|
||||
#if !(UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN)
|
||||
// Debug.Log("OnApplicationPause: pauseStatus: " + pauseStatus);
|
||||
|
||||
if (pauseStatus)
|
||||
{
|
||||
if (_pauseMediaOnAppPause)
|
||||
{
|
||||
if (Control!= null && Control.IsPlaying())
|
||||
{
|
||||
_wasPlayingOnPause = true;
|
||||
#if !UNITY_IPHONE
|
||||
Control.Pause();
|
||||
#endif
|
||||
Helper.LogInfo("OnApplicationPause: pausing video");
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (_playMediaOnAppUnpause)
|
||||
{
|
||||
// Catch coming back from power off state when no lock screen
|
||||
OnApplicationFocus(true);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#endregion // Application Focus and Pausing
|
||||
}
|
||||
}
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 3a3464021ab2fb14a81d5d35b3097023
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,29 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
#region Audio Mute Support for Unity Editor
|
||||
#if UNITY_EDITOR
|
||||
private bool _unityAudioMasterMute = false;
|
||||
private void CheckEditorAudioMute()
|
||||
{
|
||||
// Detect a change
|
||||
if (UnityEditor.EditorUtility.audioMasterMute != _unityAudioMasterMute)
|
||||
{
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
_unityAudioMasterMute = UnityEditor.EditorUtility.audioMasterMute;
|
||||
_controlInterface.MuteAudio(_audioMuted || _unityAudioMasterMute);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#endregion // Audio Mute Support for Unity Editor
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 16be519f584387149bd75947276c3a72
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,83 @@
|
||||
using UnityEngine;
|
||||
|
||||
#if UNITY_EDITOR
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
#region Play/Pause Support for Unity Editor
|
||||
// This code handles the pause/play buttons in the editor
|
||||
private static void SetupEditorPlayPauseSupport()
|
||||
{
|
||||
#if UNITY_2017_2_OR_NEWER
|
||||
UnityEditor.EditorApplication.pauseStateChanged -= OnUnityPauseModeChanged;
|
||||
UnityEditor.EditorApplication.pauseStateChanged += OnUnityPauseModeChanged;
|
||||
#else
|
||||
UnityEditor.EditorApplication.playmodeStateChanged -= OnUnityPlayModeChanged;
|
||||
UnityEditor.EditorApplication.playmodeStateChanged += OnUnityPlayModeChanged;
|
||||
#endif
|
||||
}
|
||||
|
||||
#if UNITY_2017_2_OR_NEWER
|
||||
private static void OnUnityPauseModeChanged(UnityEditor.PauseState state)
|
||||
{
|
||||
OnUnityPlayModeChanged();
|
||||
}
|
||||
#endif
|
||||
|
||||
private static void OnUnityPlayModeChanged()
|
||||
{
|
||||
if (UnityEditor.EditorApplication.isPlaying)
|
||||
{
|
||||
bool isPaused = UnityEditor.EditorApplication.isPaused;
|
||||
MediaPlayer[] players = Resources.FindObjectsOfTypeAll<MediaPlayer>();
|
||||
foreach (MediaPlayer player in players)
|
||||
{
|
||||
if (isPaused)
|
||||
{
|
||||
player.EditorPause();
|
||||
}
|
||||
else
|
||||
{
|
||||
player.EditorUnpause();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void EditorPause()
|
||||
{
|
||||
if (this.isActiveAndEnabled)
|
||||
{
|
||||
if (_controlInterface != null && _controlInterface.IsPlaying())
|
||||
{
|
||||
_wasPlayingOnPause = true;
|
||||
_controlInterface.Pause();
|
||||
}
|
||||
StopRenderCoroutine();
|
||||
}
|
||||
}
|
||||
|
||||
private void EditorUnpause()
|
||||
{
|
||||
if (this.isActiveAndEnabled)
|
||||
{
|
||||
if (_controlInterface != null && _wasPlayingOnPause)
|
||||
{
|
||||
_autoPlayOnStart = true;
|
||||
_wasPlayingOnPause = false;
|
||||
_autoPlayOnStartTriggered = false;
|
||||
}
|
||||
StartRenderCoroutine();
|
||||
}
|
||||
}
|
||||
#endregion // Play/Pause Support for Unity Editor
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 083c5ace9dbfda84cb8b4afaa19bdcde
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,323 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
// Event state
|
||||
private bool _eventFired_MetaDataReady = false;
|
||||
private bool _eventFired_ReadyToPlay = false;
|
||||
private bool _eventFired_Started = false;
|
||||
private bool _eventFired_FirstFrameReady = false;
|
||||
private bool _eventFired_FinishedPlaying = false;
|
||||
private bool _eventState_PlaybackBuffering = false;
|
||||
private bool _eventState_PlaybackSeeking = false;
|
||||
private bool _eventState_PlaybackStalled = false;
|
||||
private int _eventState_PreviousWidth = 0;
|
||||
private int _eventState_PreviousHeight = 0;
|
||||
private int _previousSubtitleIndex = -1;
|
||||
private bool _finishedFrameOpenCheck = false;
|
||||
private bool _eventState_Paused = false;
|
||||
|
||||
#if UNITY_EDITOR
|
||||
public static MediaPlayerLoadEvent InternalMediaLoadedEvent = new MediaPlayerLoadEvent();
|
||||
#endif
|
||||
|
||||
private void ResetEvents()
|
||||
{
|
||||
_eventFired_MetaDataReady = false;
|
||||
_eventFired_ReadyToPlay = false;
|
||||
_eventFired_Started = false;
|
||||
_eventFired_FirstFrameReady = false;
|
||||
_eventFired_FinishedPlaying = false;
|
||||
_eventState_PlaybackBuffering = false;
|
||||
_eventState_PlaybackSeeking = false;
|
||||
_eventState_PlaybackStalled = false;
|
||||
_eventState_PreviousWidth = 0;
|
||||
_eventState_PreviousHeight = 0;
|
||||
_previousSubtitleIndex = -1;
|
||||
_finishedFrameOpenCheck = false;
|
||||
}
|
||||
|
||||
private void CheckAndClearStartedAndFinishedEvents()
|
||||
{
|
||||
//NOTE: Fixes a bug where the event was being fired immediately, so when a file is opened, the finishedPlaying fired flag gets set but
|
||||
//is then set to true immediately afterwards due to the returned value
|
||||
_finishedFrameOpenCheck = false;
|
||||
if (IsHandleEvent(MediaPlayerEvent.EventType.FinishedPlaying))
|
||||
{
|
||||
if (FireEventIfPossible(MediaPlayerEvent.EventType.FinishedPlaying, _eventFired_FinishedPlaying))
|
||||
{
|
||||
_eventFired_FinishedPlaying = !_finishedFrameOpenCheck;
|
||||
}
|
||||
}
|
||||
|
||||
if (_eventFired_FinishedPlaying &&
|
||||
IsHandleEvent(MediaPlayerEvent.EventType.FinishedPlaying) &&
|
||||
_controlInterface.IsPlaying() &&
|
||||
!_controlInterface.IsFinished())
|
||||
{
|
||||
bool reset = true;
|
||||
// RJT NOTE: Commented out for now as seems over-aggressive and can lead to freeze conditions as seen in: https://github.com/RenderHeads/UnityPlugin-AVProVideo/issues/1692
|
||||
// - If we need to reinstate then we'd likely need considerably more tolerance, especially on slower machines
|
||||
#if false//UNITY_EDITOR_WIN || (!UNITY_EDITOR && (UNITY_STANDALONE_WIN || UNITY_WSA))
|
||||
reset = false;
|
||||
if (_infoInterface.HasVideo())
|
||||
{
|
||||
// Some streaming HLS/Dash content don't provide a frame rate
|
||||
if (_infoInterface.GetVideoFrameRate() > 0f)
|
||||
{
|
||||
// Don't reset if within a frame of the end of the video, important for time > duration workaround
|
||||
float secondsPerFrame = 1f / _infoInterface.GetVideoFrameRate();
|
||||
if (_infoInterface.GetDuration() - _controlInterface.GetCurrentTime() > secondsPerFrame)
|
||||
{
|
||||
reset = true;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Just check if we're not beyond the duration
|
||||
if (_controlInterface.GetCurrentTime() < _infoInterface.GetDuration())
|
||||
{
|
||||
reset = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// For audio only media just check if we're not beyond the duration
|
||||
if (_controlInterface.GetCurrentTime() < _infoInterface.GetDuration())
|
||||
{
|
||||
reset = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
if (reset)
|
||||
{
|
||||
//Debug.Log("Reset");
|
||||
_eventFired_FinishedPlaying = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void HandleOneShotEvents()
|
||||
{
|
||||
_eventFired_MetaDataReady = FireEventIfPossible(MediaPlayerEvent.EventType.MetaDataReady, _eventFired_MetaDataReady);
|
||||
_eventFired_ReadyToPlay = FireEventIfPossible(MediaPlayerEvent.EventType.ReadyToPlay, _eventFired_ReadyToPlay);
|
||||
_eventFired_Started = FireEventIfPossible(MediaPlayerEvent.EventType.Started, _eventFired_Started);
|
||||
_eventFired_FirstFrameReady = FireEventIfPossible(MediaPlayerEvent.EventType.FirstFrameReady, _eventFired_FirstFrameReady);
|
||||
}
|
||||
|
||||
private void HandleRecurringEvents()
|
||||
{
|
||||
// Subtitle changing
|
||||
if (FireEventIfPossible(MediaPlayerEvent.EventType.SubtitleChange, false))
|
||||
{
|
||||
_previousSubtitleIndex = _subtitlesInterface.GetSubtitleIndex();
|
||||
}
|
||||
|
||||
// Resolution changing
|
||||
if (FireEventIfPossible(MediaPlayerEvent.EventType.ResolutionChanged, false))
|
||||
{
|
||||
_eventState_PreviousWidth = _infoInterface.GetVideoWidth();
|
||||
_eventState_PreviousHeight = _infoInterface.GetVideoHeight();
|
||||
}
|
||||
|
||||
// Timed Metadata
|
||||
if (FireEventIfPossible(MediaPlayerEvent.EventType.TimedMetadataChanged, false))
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Stalling
|
||||
if (IsHandleEvent(MediaPlayerEvent.EventType.Stalled))
|
||||
{
|
||||
bool newState = _infoInterface.IsPlaybackStalled();
|
||||
if (newState != _eventState_PlaybackStalled)
|
||||
{
|
||||
_eventState_PlaybackStalled = newState;
|
||||
|
||||
var newEvent = _eventState_PlaybackStalled ? MediaPlayerEvent.EventType.Stalled : MediaPlayerEvent.EventType.Unstalled;
|
||||
FireEventIfPossible(newEvent, false);
|
||||
}
|
||||
}
|
||||
|
||||
// Seeking
|
||||
if (IsHandleEvent(MediaPlayerEvent.EventType.StartedSeeking))
|
||||
{
|
||||
bool newState = _controlInterface.IsSeeking();
|
||||
if (newState != _eventState_PlaybackSeeking)
|
||||
{
|
||||
_eventState_PlaybackSeeking = newState;
|
||||
|
||||
var newEvent = _eventState_PlaybackSeeking ? MediaPlayerEvent.EventType.StartedSeeking : MediaPlayerEvent.EventType.FinishedSeeking;
|
||||
FireEventIfPossible(newEvent, false);
|
||||
}
|
||||
}
|
||||
|
||||
// Buffering
|
||||
if (IsHandleEvent(MediaPlayerEvent.EventType.StartedBuffering))
|
||||
{
|
||||
bool newState = _controlInterface.IsBuffering();
|
||||
if (newState != _eventState_PlaybackBuffering)
|
||||
{
|
||||
_eventState_PlaybackBuffering = newState;
|
||||
|
||||
var newEvent = _eventState_PlaybackBuffering ? MediaPlayerEvent.EventType.StartedBuffering : MediaPlayerEvent.EventType.FinishedBuffering;
|
||||
FireEventIfPossible(newEvent, false);
|
||||
}
|
||||
}
|
||||
|
||||
// Pausing
|
||||
if (IsHandleEvent(MediaPlayerEvent.EventType.Paused))
|
||||
{
|
||||
bool newState = _controlInterface.IsPaused();
|
||||
if (newState != _eventState_Paused)
|
||||
{
|
||||
_eventState_Paused = newState;
|
||||
var newEvent = _eventState_Paused ? MediaPlayerEvent.EventType.Paused : MediaPlayerEvent.EventType.Unpaused;
|
||||
FireEventIfPossible(newEvent, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void UpdateEvents()
|
||||
{
|
||||
if (_controlInterface == null)
|
||||
return;
|
||||
if (_events == null || !_events.HasListeners())
|
||||
return;
|
||||
|
||||
// Reset some event states that can reset during playback
|
||||
CheckAndClearStartedAndFinishedEvents();
|
||||
|
||||
// Events that can only fire once
|
||||
HandleOneShotEvents();
|
||||
|
||||
// Events that can fire multiple times
|
||||
HandleRecurringEvents();
|
||||
}
|
||||
|
||||
protected bool IsHandleEvent(MediaPlayerEvent.EventType eventType)
|
||||
{
|
||||
return ((uint)_eventMask & (1 << (int)eventType)) != 0;
|
||||
}
|
||||
|
||||
private bool FireEventIfPossible(MediaPlayerEvent.EventType eventType, bool hasFired)
|
||||
{
|
||||
if (CanFireEvent(eventType, hasFired))
|
||||
{
|
||||
#if UNITY_EDITOR
|
||||
// Special internal global event, called when media is loaded
|
||||
// Currently used by the RecentItem class
|
||||
if (eventType == MediaPlayerEvent.EventType.Started)
|
||||
{
|
||||
string fullPath = GetResolvedFilePath(_mediaPath.Path, _mediaPath.PathType);
|
||||
InternalMediaLoadedEvent.Invoke(fullPath);
|
||||
}
|
||||
#endif
|
||||
|
||||
hasFired = true;
|
||||
_events.Invoke(this, eventType, ErrorCode.None);
|
||||
}
|
||||
return hasFired;
|
||||
}
|
||||
|
||||
private bool CanFireEvent(MediaPlayerEvent.EventType et, bool hasFired)
|
||||
{
|
||||
if (_controlInterface == null)
|
||||
return false;
|
||||
if (_events == null)
|
||||
return false;
|
||||
if (hasFired)
|
||||
return false;
|
||||
if (!IsHandleEvent(et))
|
||||
return false;
|
||||
|
||||
bool result = false;
|
||||
switch (et)
|
||||
{
|
||||
case MediaPlayerEvent.EventType.FinishedPlaying:
|
||||
result = (!_controlInterface.IsLooping() && _controlInterface.CanPlay() && _controlInterface.IsFinished());
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.MetaDataReady:
|
||||
result = (_controlInterface.HasMetaData());
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.FirstFrameReady:
|
||||
// [MOZ 20/1/21] Removed HasMetaData check as preventing the event from being triggered on (i|mac|tv)OS
|
||||
result = (_textureInterface != null && _controlInterface.CanPlay() /*&& _controlInterface.HasMetaData()*/ && _textureInterface.GetTextureFrameCount() > 0);
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.ReadyToPlay:
|
||||
result = (!_controlInterface.IsPlaying() && _controlInterface.CanPlay() && !_autoPlayOnStart);
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.Started:
|
||||
result = (_controlInterface.IsPlaying());
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.SubtitleChange:
|
||||
{
|
||||
result = _previousSubtitleIndex != _subtitlesInterface.GetSubtitleIndex();
|
||||
if (!result)
|
||||
{
|
||||
result = _baseMediaPlayer.InternalIsChangedTextCue();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case MediaPlayerEvent.EventType.Stalled:
|
||||
result = _infoInterface.IsPlaybackStalled();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.Unstalled:
|
||||
result = !_infoInterface.IsPlaybackStalled();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.StartedSeeking:
|
||||
result = _controlInterface.IsSeeking();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.FinishedSeeking:
|
||||
result = !_controlInterface.IsSeeking();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.StartedBuffering:
|
||||
result = _controlInterface.IsBuffering();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.FinishedBuffering:
|
||||
result = !_controlInterface.IsBuffering();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.ResolutionChanged:
|
||||
result = (_infoInterface != null && (_eventState_PreviousWidth != _infoInterface.GetVideoWidth() || _eventState_PreviousHeight != _infoInterface.GetVideoHeight()));
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.Paused:
|
||||
result = _controlInterface.IsPaused();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.Unpaused:
|
||||
result = !_controlInterface.IsPaused();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.TimedMetadataChanged:
|
||||
result = _baseMediaPlayer.HasNewTimedMetadataItem();
|
||||
break;
|
||||
|
||||
default:
|
||||
Debug.LogWarning("[AVProVideo] Unhandled event type");
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace RenderHeads.Media.AVProVideo
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 6be886b3f1f953843bda70e505701ee3
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,211 @@
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
#region Extract Frame
|
||||
|
||||
private bool ForceWaitForNewFrame(int lastFrameCount, float timeoutMs)
|
||||
{
|
||||
bool result = false;
|
||||
// Wait for the frame to change, or timeout to happen (for the case that there is no new frame for this time)
|
||||
System.DateTime startTime = System.DateTime.Now;
|
||||
int iterationCount = 0;
|
||||
while (Control != null && (System.DateTime.Now - startTime).TotalMilliseconds < (double)timeoutMs)
|
||||
{
|
||||
_playerInterface.Update();
|
||||
|
||||
// TODO: check if Seeking has completed! Then we don't have to wait
|
||||
|
||||
// If frame has changed we can continue
|
||||
// NOTE: this will never happen because GL.IssuePlugin.Event is never called in this loop
|
||||
if (lastFrameCount != TextureProducer.GetTextureFrameCount())
|
||||
{
|
||||
result = true;
|
||||
break;
|
||||
}
|
||||
|
||||
iterationCount++;
|
||||
|
||||
// NOTE: we tried to add Sleep for 1ms but it was very slow, so switched to this time based method which burns more CPU but about double the speed
|
||||
// NOTE: had to add the Sleep back in as after too many iterations (over 1000000) of GL.IssuePluginEvent Unity seems to lock up
|
||||
// NOTE: seems that GL.IssuePluginEvent can't be called if we're stuck in a while loop and they just stack up
|
||||
//System.Threading.Thread.Sleep(0);
|
||||
}
|
||||
|
||||
_playerInterface.Render();
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create or return (if cached) a camera that is inactive and renders nothing
|
||||
/// This camera is used to call .Render() on which causes the render thread to run
|
||||
/// This is useful for forcing GL.IssuePluginEvent() to run and is used for
|
||||
/// wait for frames to render for ExtractFrame() and UpdateTimeScale()
|
||||
/// </summary>
|
||||
private static Camera GetDummyCamera()
|
||||
{
|
||||
if (_dummyCamera == null)
|
||||
{
|
||||
const string goName = "AVPro Video Dummy Camera";
|
||||
GameObject go = GameObject.Find(goName);
|
||||
if (go == null)
|
||||
{
|
||||
go = new GameObject(goName);
|
||||
go.hideFlags = HideFlags.HideInHierarchy | HideFlags.DontSave;
|
||||
go.SetActive(false);
|
||||
Object.DontDestroyOnLoad(go);
|
||||
|
||||
_dummyCamera = go.AddComponent<Camera>();
|
||||
_dummyCamera.hideFlags = HideFlags.HideInInspector | HideFlags.DontSave;
|
||||
_dummyCamera.cullingMask = 0;
|
||||
_dummyCamera.clearFlags = CameraClearFlags.Nothing;
|
||||
_dummyCamera.enabled = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
_dummyCamera = go.GetComponent<Camera>();
|
||||
}
|
||||
}
|
||||
//Debug.Assert(_dummyCamera != null);
|
||||
return _dummyCamera;
|
||||
}
|
||||
|
||||
private IEnumerator ExtractFrameCoroutine(Texture2D target, ProcessExtractedFrame callback, double timeSeconds = -1.0, bool accurateSeek = true, int timeoutMs = 1000, int timeThresholdMs = 100)
|
||||
{
|
||||
#if (!UNITY_EDITOR && UNITY_ANDROID) || UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN || UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX || UNITY_IOS || UNITY_TVOS
|
||||
Texture2D result = target;
|
||||
|
||||
Texture frame = null;
|
||||
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
if (timeSeconds >= 0f)
|
||||
{
|
||||
Pause();
|
||||
|
||||
// If the right frame is already available (or close enough) just grab it
|
||||
if (TextureProducer.GetTexture() != null && (System.Math.Abs(_controlInterface.GetCurrentTime() - timeSeconds) < (timeThresholdMs / 1000.0)))
|
||||
{
|
||||
frame = TextureProducer.GetTexture();
|
||||
}
|
||||
else
|
||||
{
|
||||
int preSeekFrameCount = _textureInterface.GetTextureFrameCount();
|
||||
|
||||
// Seek to the frame
|
||||
if (accurateSeek)
|
||||
{
|
||||
_controlInterface.Seek(timeSeconds);
|
||||
}
|
||||
else
|
||||
{
|
||||
_controlInterface.SeekFast(timeSeconds);
|
||||
}
|
||||
|
||||
// Wait for the new frame to arrive
|
||||
if (!_controlInterface.WaitForNextFrame(GetDummyCamera(), preSeekFrameCount))
|
||||
{
|
||||
// If WaitForNextFrame fails (e.g. in android single threaded), we run the below code to asynchronously wait for the frame
|
||||
int currFc = TextureProducer.GetTextureFrameCount();
|
||||
int iterations = 0;
|
||||
int maxIterations = 50;
|
||||
|
||||
//+1 as often there will be an extra frame produced after pause (so we need to wait for the second frame instead)
|
||||
while((currFc + 1) >= TextureProducer.GetTextureFrameCount() && iterations++ < maxIterations)
|
||||
{
|
||||
yield return null;
|
||||
}
|
||||
}
|
||||
frame = TextureProducer.GetTexture();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
frame = TextureProducer.GetTexture();
|
||||
}
|
||||
}
|
||||
if (frame != null)
|
||||
{
|
||||
result = Helper.GetReadableTexture(frame, TextureProducer.RequiresVerticalFlip(), Helper.GetOrientation(Info.GetTextureTransform()), target);
|
||||
}
|
||||
#else
|
||||
Texture2D result = ExtractFrame(target, timeSeconds, accurateSeek, timeoutMs, timeThresholdMs);
|
||||
#endif
|
||||
callback(result);
|
||||
|
||||
yield return null;
|
||||
}
|
||||
|
||||
public void ExtractFrameAsync(Texture2D target, ProcessExtractedFrame callback, double timeSeconds = -1.0, bool accurateSeek = true, int timeoutMs = 1000, int timeThresholdMs = 100)
|
||||
{
|
||||
StartCoroutine(ExtractFrameCoroutine(target, callback, timeSeconds, accurateSeek, timeoutMs, timeThresholdMs));
|
||||
}
|
||||
|
||||
// "target" can be null or you can pass in an existing texture.
|
||||
public Texture2D ExtractFrame(Texture2D target, double timeSeconds = -1.0, bool accurateSeek = true, int timeoutMs = 1000, int timeThresholdMs = 100)
|
||||
{
|
||||
Texture2D result = target;
|
||||
|
||||
// Extract frames returns the internal frame of the video player
|
||||
Texture frame = ExtractFrame(timeSeconds, accurateSeek, timeoutMs, timeThresholdMs);
|
||||
if (frame != null)
|
||||
{
|
||||
result = Helper.GetReadableTexture(frame, TextureProducer.RequiresVerticalFlip(), Helper.GetOrientation(Info.GetTextureTransform()), target);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private Texture ExtractFrame(double timeSeconds = -1.0, bool accurateSeek = true, int timeoutMs = 1000, int timeThresholdMs = 100)
|
||||
{
|
||||
Texture result = null;
|
||||
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
if (timeSeconds >= 0f)
|
||||
{
|
||||
Pause();
|
||||
|
||||
// If the right frame is already available (or close enough) just grab it
|
||||
if (TextureProducer.GetTexture() != null && (System.Math.Abs(_controlInterface.GetCurrentTime() - timeSeconds) < (timeThresholdMs / 1000.0)))
|
||||
{
|
||||
result = TextureProducer.GetTexture();
|
||||
}
|
||||
else
|
||||
{
|
||||
// Store frame count before seek
|
||||
int frameCount = TextureProducer.GetTextureFrameCount();
|
||||
|
||||
// Seek to the frame
|
||||
if (accurateSeek)
|
||||
{
|
||||
_controlInterface.Seek(timeSeconds);
|
||||
}
|
||||
else
|
||||
{
|
||||
_controlInterface.SeekFast(timeSeconds);
|
||||
}
|
||||
|
||||
// Wait for frame to change
|
||||
ForceWaitForNewFrame(frameCount, timeoutMs);
|
||||
result = TextureProducer.GetTexture();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
result = TextureProducer.GetTexture();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
#endregion // Extract Frame
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 810d3ce69a3b01f409c733c7cfbd119c
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,129 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
public bool OpenMediaFromBuffer(byte[] buffer, bool autoPlay = true)
|
||||
{
|
||||
_mediaPath = new MediaPath("buffer", MediaPathType.AbsolutePathOrURL);
|
||||
_autoPlayOnStart = autoPlay;
|
||||
|
||||
if (_controlInterface == null)
|
||||
{
|
||||
Initialise();
|
||||
}
|
||||
|
||||
return OpenMediaFromBufferInternal(buffer);
|
||||
}
|
||||
|
||||
public bool StartOpenChunkedMediaFromBuffer(ulong length, bool autoPlay = true)
|
||||
{
|
||||
_mediaPath = new MediaPath("buffer", MediaPathType.AbsolutePathOrURL);
|
||||
_autoPlayOnStart = autoPlay;
|
||||
|
||||
if (_controlInterface == null)
|
||||
{
|
||||
Initialise();
|
||||
}
|
||||
|
||||
return StartOpenMediaFromBufferInternal(length);
|
||||
}
|
||||
|
||||
public bool AddChunkToVideoBuffer(byte[] chunk, ulong offset, ulong chunkSize)
|
||||
{
|
||||
return AddChunkToBufferInternal(chunk, offset, chunkSize);
|
||||
}
|
||||
|
||||
public bool EndOpenChunkedVideoFromBuffer()
|
||||
{
|
||||
return EndOpenMediaFromBufferInternal();
|
||||
}
|
||||
|
||||
private bool OpenMediaFromBufferInternal(byte[] buffer)
|
||||
{
|
||||
bool result = false;
|
||||
// Open the video file
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
CloseMedia();
|
||||
|
||||
_isMediaOpened = true;
|
||||
_autoPlayOnStartTriggered = !_autoPlayOnStart;
|
||||
|
||||
Helper.LogInfo("Opening buffer of length " + buffer.Length, this);
|
||||
|
||||
if (!_controlInterface.OpenMediaFromBuffer(buffer))
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to open buffer", this);
|
||||
if (GetCurrentPlatformOptions() != PlatformOptionsWindows || PlatformOptionsWindows.videoApi != Windows.VideoApi.DirectShow)
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Loading from buffer is currently only supported in Windows when using the DirectShow API");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
SetPlaybackOptions();
|
||||
result = true;
|
||||
StartRenderCoroutine();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private bool StartOpenMediaFromBufferInternal(ulong length)
|
||||
{
|
||||
bool result = false;
|
||||
// Open the video file
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
CloseMedia();
|
||||
|
||||
_isMediaOpened = true;
|
||||
_autoPlayOnStartTriggered = !_autoPlayOnStart;
|
||||
|
||||
Helper.LogInfo("Starting Opening buffer of length " + length, this);
|
||||
|
||||
if (!_controlInterface.StartOpenMediaFromBuffer(length))
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to start open video from buffer", this);
|
||||
if (GetCurrentPlatformOptions() != PlatformOptionsWindows || PlatformOptionsWindows.videoApi != Windows.VideoApi.DirectShow)
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Loading from buffer is currently only supported in Windows when using the DirectShow API");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
SetPlaybackOptions();
|
||||
result = true;
|
||||
StartRenderCoroutine();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private bool AddChunkToBufferInternal(byte[] chunk, ulong offset, ulong chunkSize)
|
||||
{
|
||||
if (Control != null)
|
||||
{
|
||||
return Control.AddChunkToMediaBuffer(chunk, offset, chunkSize);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private bool EndOpenMediaFromBufferInternal()
|
||||
{
|
||||
if (Control != null)
|
||||
{
|
||||
return Control.EndOpenMediaFromBuffer();
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: bd1bd18da7d2dc7468c9799e5b02caea
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,60 @@
|
||||
using UnityEngine;
|
||||
#if NETFX_CORE
|
||||
using Windows.Storage.Streams;
|
||||
#endif
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
|
||||
#if NETFX_CORE
|
||||
public bool OpenVideoFromStream(IRandomAccessStream ras, string path, bool autoPlay = true)
|
||||
{
|
||||
_videoLocation = FileLocation.AbsolutePathOrURL;
|
||||
_videoPath = path;
|
||||
_autoPlayOnStart = autoPlay;
|
||||
|
||||
if (_controlInterface == null)
|
||||
{
|
||||
Initialise();
|
||||
}
|
||||
|
||||
return OpenVideoFromStream(ras);
|
||||
}
|
||||
|
||||
private bool OpenVideoFromStream(IRandomAccessStream ras)
|
||||
{
|
||||
bool result = false;
|
||||
// Open the video file
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
CloseVideo();
|
||||
|
||||
_isVideoOpened = true;
|
||||
_autoPlayOnStartTriggered = !_autoPlayOnStart;
|
||||
|
||||
// Potentially override the file location
|
||||
long fileOffset = GetPlatformFileOffset();
|
||||
|
||||
if (!Control.OpenVideoFromFile(ras, _videoPath, fileOffset, null, _manuallySetAudioSourceProperties ? _sourceAudioSampleRate : 0,
|
||||
_manuallySetAudioSourceProperties ? _sourceAudioChannels : 0))
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to open " + _videoPath, this);
|
||||
}
|
||||
else
|
||||
{
|
||||
SetPlaybackOptions();
|
||||
result = true;
|
||||
StartRenderCoroutine();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 4e6c8c5399247d0478ed7ecf17b7d87f
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 1d9536a1e758279489d9add3e1ba26ad
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,150 @@
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
public bool EnableSubtitles(MediaPath mediaPath)
|
||||
{
|
||||
bool result = false;
|
||||
if (_subtitlesInterface != null)
|
||||
{
|
||||
if (mediaPath != null && !string.IsNullOrEmpty(mediaPath.Path))
|
||||
{
|
||||
string fullPath = mediaPath.GetResolvedFullPath();
|
||||
|
||||
bool checkForFileExist = true;
|
||||
if (fullPath.Contains("://"))
|
||||
{
|
||||
checkForFileExist = false;
|
||||
}
|
||||
#if (!UNITY_EDITOR && UNITY_ANDROID)
|
||||
checkForFileExist = false;
|
||||
#endif
|
||||
|
||||
if (checkForFileExist && !System.IO.File.Exists(fullPath))
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Subtitle file not found: " + fullPath, this);
|
||||
}
|
||||
else
|
||||
{
|
||||
Helper.LogInfo("Opening subtitles " + fullPath, this);
|
||||
|
||||
_previousSubtitleIndex = -1;
|
||||
|
||||
try
|
||||
{
|
||||
if (fullPath.Contains("://"))
|
||||
{
|
||||
// Use coroutine and WWW class for loading
|
||||
if (_loadSubtitlesRoutine != null)
|
||||
{
|
||||
StopCoroutine(_loadSubtitlesRoutine);
|
||||
_loadSubtitlesRoutine = null;
|
||||
}
|
||||
_loadSubtitlesRoutine = StartCoroutine(LoadSubtitlesCoroutine(fullPath, mediaPath));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Load directly from file
|
||||
string subtitleData = System.IO.File.ReadAllText(fullPath);
|
||||
if (_subtitlesInterface.LoadSubtitlesSRT(subtitleData))
|
||||
{
|
||||
_subtitlePath = mediaPath;
|
||||
_sideloadSubtitles = false;
|
||||
result = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to load subtitles" + fullPath, this);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
catch (System.Exception e)
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to load subtitles " + fullPath, this);
|
||||
Debug.LogException(e, this);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] No subtitle file path specified", this);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_queueSubtitlePath = mediaPath;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private IEnumerator LoadSubtitlesCoroutine(string url, MediaPath mediaPath)
|
||||
{
|
||||
UnityEngine.Networking.UnityWebRequest www = UnityEngine.Networking.UnityWebRequest.Get(url);
|
||||
#if UNITY_2017_2_OR_NEWER
|
||||
yield return www.SendWebRequest();
|
||||
#else
|
||||
yield return www.Send();
|
||||
#endif
|
||||
|
||||
string subtitleData = string.Empty;
|
||||
|
||||
#if UNITY_2020_1_OR_NEWER
|
||||
if (www.result == UnityEngine.Networking.UnityWebRequest.Result.Success)
|
||||
#elif UNITY_2017_1_OR_NEWER
|
||||
if (!www.isNetworkError)
|
||||
#else
|
||||
if (!www.isError)
|
||||
#endif
|
||||
{
|
||||
subtitleData = ((UnityEngine.Networking.DownloadHandler)www.downloadHandler).text;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Error loading subtitles '" + www.error + "' from " + url);
|
||||
}
|
||||
|
||||
if (_subtitlesInterface.LoadSubtitlesSRT(subtitleData))
|
||||
{
|
||||
_subtitlePath = mediaPath;
|
||||
_sideloadSubtitles = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to load subtitles" + url, this);
|
||||
}
|
||||
|
||||
_loadSubtitlesRoutine = null;
|
||||
|
||||
www.Dispose();
|
||||
}
|
||||
|
||||
public void DisableSubtitles()
|
||||
{
|
||||
if (_loadSubtitlesRoutine != null)
|
||||
{
|
||||
StopCoroutine(_loadSubtitlesRoutine);
|
||||
_loadSubtitlesRoutine = null;
|
||||
}
|
||||
|
||||
if (_subtitlesInterface != null)
|
||||
{
|
||||
_previousSubtitleIndex = -1;
|
||||
_sideloadSubtitles = false;
|
||||
_subtitlesInterface.LoadSubtitlesSRT(string.Empty);
|
||||
}
|
||||
else
|
||||
{
|
||||
_queueSubtitlePath = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: f4ed2744d6ff80845bbbd59e8f6c732b
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,93 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
|
||||
#region Support for Time Scale
|
||||
#if AVPROVIDEO_BETA_SUPPORT_TIMESCALE
|
||||
// Adjust this value to get faster performance but may drop frames.
|
||||
// Wait longer to ensure there is enough time for frames to process
|
||||
private const float TimeScaleTimeoutMs = 20f;
|
||||
private bool _timeScaleIsControlling;
|
||||
private double _timeScaleVideoTime;
|
||||
|
||||
private void UpdateTimeScale()
|
||||
{
|
||||
if (Time.timeScale != 1f || Time.captureFramerate != 0)
|
||||
{
|
||||
if (_controlInterface.IsPlaying())
|
||||
{
|
||||
_controlInterface.Pause();
|
||||
_timeScaleIsControlling = true;
|
||||
_timeScaleVideoTime = _controlInterface.GetCurrentTime();
|
||||
}
|
||||
|
||||
if (_timeScaleIsControlling)
|
||||
{
|
||||
// Progress time
|
||||
_timeScaleVideoTime += Time.deltaTime;
|
||||
|
||||
// Handle looping
|
||||
if (_controlInterface.IsLooping() && _timeScaleVideoTime >= Info.GetDuration())
|
||||
{
|
||||
// TODO: really we should seek to (_timeScaleVideoTime % Info.GetDuration())
|
||||
_timeScaleVideoTime = 0.0;
|
||||
}
|
||||
|
||||
int preSeekFrameCount = TextureProducer.GetTextureFrameCount();
|
||||
|
||||
// Seek to the new time
|
||||
{
|
||||
double preSeekTime = Control.GetCurrentTime();
|
||||
|
||||
// Seek
|
||||
_controlInterface.Seek(_timeScaleVideoTime);
|
||||
|
||||
// Early out, if after the seek the time hasn't changed, the seek was probably too small to go to the next frame.
|
||||
// TODO: This behaviour may be different on other platforms (not Windows) and needs more testing.
|
||||
if (Mathf.Approximately((float)preSeekTime, (float)_controlInterface.GetCurrentTime()))
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for the new frame to arrive
|
||||
if (!_controlInterface.WaitForNextFrame(GetDummyCamera(), preSeekFrameCount))
|
||||
{
|
||||
// If WaitForNextFrame fails (e.g. in android single threaded), we run the below code to asynchronously wait for the frame
|
||||
System.DateTime startTime = System.DateTime.Now;
|
||||
int lastFrameCount = TextureProducer.GetTextureFrameCount();
|
||||
|
||||
while (_controlInterface != null && (System.DateTime.Now - startTime).TotalMilliseconds < (double)TimeScaleTimeoutMs)
|
||||
{
|
||||
_playerInterface.Update();
|
||||
_playerInterface.Render();
|
||||
GetDummyCamera().Render();
|
||||
if (lastFrameCount != TextureProducer.GetTextureFrameCount())
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Restore playback when timeScale becomes 1
|
||||
if (_timeScaleIsControlling)
|
||||
{
|
||||
_controlInterface.Play();
|
||||
_timeScaleIsControlling = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#endregion // Support for Time Scale
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: cdb92d6bab7106944bcd3cd7a034df6e
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,83 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour, ISerializationCallbackReceiver
|
||||
{
|
||||
#region Upgrade from Version 1.x
|
||||
[SerializeField, HideInInspector]
|
||||
private string m_VideoPath;
|
||||
[SerializeField, HideInInspector]
|
||||
private FileLocation m_VideoLocation = FileLocation.RelativeToStreamingAssetsFolder;
|
||||
|
||||
private enum FileLocation
|
||||
{
|
||||
AbsolutePathOrURL,
|
||||
RelativeToProjectFolder,
|
||||
RelativeToStreamingAssetsFolder,
|
||||
RelativeToDataFolder,
|
||||
RelativeToPersistentDataFolder,
|
||||
}
|
||||
|
||||
/*
|
||||
[SerializeField, HideInInspector]
|
||||
private StereoPacking m_StereoPacking;
|
||||
[SerializeField, HideInInspector]
|
||||
private AlphaPacking m_AlphaPacking;
|
||||
*/
|
||||
|
||||
void ISerializationCallbackReceiver.OnBeforeSerialize()
|
||||
{
|
||||
/*
|
||||
m_StereoPacking = _fallbackMediaHints.stereoPacking;
|
||||
m_AlphaPacking = _fallbackMediaHints.alphaPacking;
|
||||
*/
|
||||
}
|
||||
|
||||
void ISerializationCallbackReceiver.OnAfterDeserialize()
|
||||
{
|
||||
if (!string.IsNullOrEmpty(m_VideoPath))
|
||||
{
|
||||
MediaPathType mediaPathType = MediaPathType.AbsolutePathOrURL;
|
||||
switch (m_VideoLocation)
|
||||
{
|
||||
default:
|
||||
case FileLocation.AbsolutePathOrURL:
|
||||
mediaPathType = MediaPathType.AbsolutePathOrURL;
|
||||
break;
|
||||
case FileLocation.RelativeToProjectFolder:
|
||||
mediaPathType = MediaPathType.RelativeToProjectFolder;
|
||||
break;
|
||||
case FileLocation.RelativeToStreamingAssetsFolder:
|
||||
mediaPathType = MediaPathType.RelativeToStreamingAssetsFolder;
|
||||
break;
|
||||
case FileLocation.RelativeToDataFolder:
|
||||
mediaPathType = MediaPathType.RelativeToDataFolder;
|
||||
break;
|
||||
case FileLocation.RelativeToPersistentDataFolder:
|
||||
mediaPathType = MediaPathType.RelativeToPersistentDataFolder;
|
||||
break;
|
||||
}
|
||||
_mediaPath = new MediaPath(m_VideoPath, mediaPathType);
|
||||
_mediaSource = MediaSource.Path;
|
||||
m_VideoPath = null;
|
||||
}
|
||||
|
||||
/*
|
||||
if (m_StereoPacking != _fallbackMediaHints.stereoPacking)
|
||||
{
|
||||
_fallbackMediaHints.stereoPacking = m_StereoPacking;
|
||||
}
|
||||
if (m_AlphaPacking != _fallbackMediaHints.alphaPacking)
|
||||
{
|
||||
_fallbackMediaHints.alphaPacking = m_AlphaPacking;
|
||||
}
|
||||
*/
|
||||
}
|
||||
#endregion // Upgrade from Version 1.x
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 2e1421b74b1861b42ba7287d322c2f19
|
||||
timeCreated: 1614963169
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
1015
Assets/AVProVideo/Runtime/Scripts/Components/PlaylistMediaPlayer.cs
Normal file
1015
Assets/AVProVideo/Runtime/Scripts/Components/PlaylistMediaPlayer.cs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,17 @@
|
||||
fileFormatVersion: 2
|
||||
guid: e9ea31f33222f4b418e4e051a8a5ed24
|
||||
timeCreated: 1588679963
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences:
|
||||
- m_AudioHeadTransform: {instanceID: 0}
|
||||
- m_AudioFocusTransform: {instanceID: 0}
|
||||
- _transitionShader: {fileID: 4800000, guid: 73f378cafe7b4a745907b70e76bb3259, type: 3}
|
||||
- _playerA: {instanceID: 0}
|
||||
- _playerB: {instanceID: 0}
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,235 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2019-2023 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// Renders the video texture to a RenderTexture - either one provided by the user (external) or to an internal one.
|
||||
/// The video frames can optionally be "resolved" to unpack packed alpha, display a single stereo eye, generate mip maps, and apply colorspace conversions
|
||||
[AddComponentMenu("AVPro Video/Resolve To RenderTexture", 330)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public class ResolveToRenderTexture : MonoBehaviour
|
||||
{
|
||||
[SerializeField] MediaPlayer _mediaPlayer = null;
|
||||
[SerializeField] VideoResolveOptions _options = VideoResolveOptions.Create();
|
||||
[SerializeField] VideoRender.ResolveFlags _resolveFlags = (VideoRender.ResolveFlags.ColorspaceSRGB | VideoRender.ResolveFlags.Mipmaps | VideoRender.ResolveFlags.PackedAlpha | VideoRender.ResolveFlags.StereoLeft);
|
||||
[SerializeField] RenderTexture _externalTexture = null;
|
||||
|
||||
private Material _materialResolve;
|
||||
private bool _isMaterialSetup;
|
||||
private bool _isMaterialDirty;
|
||||
private bool _isMaterialOES;
|
||||
private RenderTexture _internalTexture;
|
||||
private int _textureFrameCount = -1;
|
||||
|
||||
// Material used for blitting the texture as we need a shader to provide clamp to border colour style texture sampling
|
||||
private Material _materialBlit;
|
||||
private int _srcTexId;
|
||||
|
||||
public MediaPlayer MediaPlayer
|
||||
{
|
||||
get
|
||||
{
|
||||
return _mediaPlayer;
|
||||
}
|
||||
set
|
||||
{
|
||||
ChangeMediaPlayer(value);
|
||||
}
|
||||
}
|
||||
|
||||
public VideoResolveOptions VideoResolveOptions
|
||||
{
|
||||
get
|
||||
{
|
||||
return _options;
|
||||
}
|
||||
set
|
||||
{
|
||||
_options = value;
|
||||
_isMaterialDirty = true;
|
||||
}
|
||||
}
|
||||
|
||||
public RenderTexture ExternalTexture
|
||||
{
|
||||
get
|
||||
{
|
||||
return _externalTexture;
|
||||
}
|
||||
set
|
||||
{
|
||||
_externalTexture = value;
|
||||
}
|
||||
}
|
||||
|
||||
public RenderTexture TargetTexture
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_externalTexture == null)
|
||||
return _internalTexture;
|
||||
return _externalTexture;
|
||||
}
|
||||
}
|
||||
|
||||
public void SetMaterialDirty()
|
||||
{
|
||||
_isMaterialDirty = true;
|
||||
}
|
||||
|
||||
private void ChangeMediaPlayer(MediaPlayer mediaPlayer)
|
||||
{
|
||||
if (_mediaPlayer != mediaPlayer)
|
||||
{
|
||||
_mediaPlayer = mediaPlayer;
|
||||
_textureFrameCount = -1;
|
||||
_isMaterialSetup = false;
|
||||
_isMaterialDirty = true;
|
||||
Resolve();
|
||||
}
|
||||
}
|
||||
|
||||
void Start()
|
||||
{
|
||||
_isMaterialOES = _mediaPlayer != null ? _mediaPlayer.IsUsingAndroidOESPath() : false;
|
||||
_materialResolve = VideoRender.CreateResolveMaterial(_isMaterialOES);
|
||||
VideoRender.SetupMaterialForMedia(_materialResolve, _mediaPlayer, -1);
|
||||
|
||||
_materialBlit = new Material(Shader.Find("AVProVideo/Internal/Blit"));
|
||||
_srcTexId = Shader.PropertyToID("_SrcTex");
|
||||
}
|
||||
|
||||
void LateUpdate()
|
||||
{
|
||||
Resolve();
|
||||
}
|
||||
|
||||
public void Resolve()
|
||||
{
|
||||
ITextureProducer textureProducer = _mediaPlayer != null ? _mediaPlayer.TextureProducer : null;
|
||||
if (textureProducer == null)
|
||||
return;
|
||||
|
||||
if (textureProducer.GetTexture())
|
||||
{
|
||||
// Check for a swap between OES and none-OES
|
||||
bool playerIsOES = _mediaPlayer.IsUsingAndroidOESPath();
|
||||
if (_isMaterialOES != playerIsOES)
|
||||
{
|
||||
_isMaterialOES = playerIsOES;
|
||||
_materialResolve = VideoRender.CreateResolveMaterial(playerIsOES);
|
||||
}
|
||||
|
||||
if (!_isMaterialSetup)
|
||||
{
|
||||
VideoRender.SetupMaterialForMedia(_materialResolve, _mediaPlayer, -1);
|
||||
_isMaterialSetup = true;
|
||||
_isMaterialDirty = true;
|
||||
}
|
||||
|
||||
if (_isMaterialDirty)
|
||||
{
|
||||
VideoRender.SetupResolveMaterial(_materialResolve, _options);
|
||||
_isMaterialDirty = false;
|
||||
}
|
||||
|
||||
int textureFrameCount = textureProducer.GetTextureFrameCount();
|
||||
if (textureFrameCount != _textureFrameCount)
|
||||
{
|
||||
_internalTexture = VideoRender.ResolveVideoToRenderTexture(_materialResolve, _internalTexture, textureProducer, _resolveFlags);
|
||||
_textureFrameCount = textureFrameCount;
|
||||
|
||||
if (_internalTexture && _externalTexture)
|
||||
{
|
||||
float srcAspectRatio = (float)_internalTexture.width / (float)_internalTexture.height;
|
||||
float dstAspectRatio = (float)_externalTexture.width / (float)_externalTexture.height;
|
||||
|
||||
Vector2 offset = Vector2.zero;
|
||||
Vector2 scale = new Vector2(1.0f, 1.0f);
|
||||
|
||||
// No point in handling the aspect ratio if the textures dimension's are the same
|
||||
if (srcAspectRatio != dstAspectRatio)
|
||||
{
|
||||
switch (_options.aspectRatio)
|
||||
{
|
||||
case VideoResolveOptions.AspectRatio.NoScaling:
|
||||
scale.x = (float)_externalTexture.width / (float)_internalTexture.width;
|
||||
scale.y = (float)_externalTexture.height / (float)_internalTexture.height;
|
||||
offset.x = (1.0f - scale.x) * 0.5f;
|
||||
offset.y = (1.0f - scale.y) * 0.5f;
|
||||
break;
|
||||
|
||||
case VideoResolveOptions.AspectRatio.FitVertically:
|
||||
scale.x = (float)_internalTexture.height / (float)_internalTexture.width * dstAspectRatio;
|
||||
offset.x = (1.0f - scale.x) * 0.5f;
|
||||
break;
|
||||
|
||||
case VideoResolveOptions.AspectRatio.FitHorizontally:
|
||||
scale.y = (float)_externalTexture.height / (float)_externalTexture.width * srcAspectRatio;
|
||||
offset.y = (1.0f - scale.y) * 0.5f;
|
||||
break;
|
||||
|
||||
case VideoResolveOptions.AspectRatio.FitInside:
|
||||
{
|
||||
if (srcAspectRatio > dstAspectRatio)
|
||||
goto case VideoResolveOptions.AspectRatio.FitHorizontally;
|
||||
else if (srcAspectRatio < dstAspectRatio)
|
||||
goto case VideoResolveOptions.AspectRatio.FitVertically;
|
||||
} break;
|
||||
|
||||
case VideoResolveOptions.AspectRatio.FitOutside:
|
||||
{
|
||||
if (srcAspectRatio > dstAspectRatio)
|
||||
goto case VideoResolveOptions.AspectRatio.FitVertically;
|
||||
else if (srcAspectRatio < dstAspectRatio)
|
||||
goto case VideoResolveOptions.AspectRatio.FitHorizontally;
|
||||
} break;
|
||||
|
||||
case VideoResolveOptions.AspectRatio.Stretch:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: This blit can be removed once we can ResolveVideoToRenderTexture is made not to recreate textures
|
||||
// NOTE: This blit probably doesn't do correct linear/srgb conversion if the colorspace settings differ, may have to use GL.sRGBWrite
|
||||
// NOTE: Cannot use _MainTex as Graphics.Blit replaces the texture offset and scale when using a material
|
||||
_materialBlit.SetTexture(_srcTexId, _internalTexture);
|
||||
_materialBlit.SetTextureOffset(_srcTexId, offset);
|
||||
_materialBlit.SetTextureScale(_srcTexId, scale);
|
||||
Graphics.Blit(null, _externalTexture, _materialBlit, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void OnDisable()
|
||||
{
|
||||
if (_internalTexture)
|
||||
{
|
||||
RenderTexture.ReleaseTemporary(_internalTexture);
|
||||
_internalTexture = null;
|
||||
}
|
||||
}
|
||||
|
||||
void OnDestroy()
|
||||
{
|
||||
if (_materialResolve)
|
||||
{
|
||||
Destroy(_materialResolve);
|
||||
_materialResolve = null;
|
||||
}
|
||||
}
|
||||
#if false
|
||||
void OnGUI()
|
||||
{
|
||||
if (TargetTexture)
|
||||
{
|
||||
GUI.DrawTexture(new Rect(0f, 0f, Screen.width * 0.8f, Screen.height * 0.8f), TargetTexture, ScaleMode.ScaleToFit, true);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 448e5e4039505584c852da1a7cc5c361
|
||||
timeCreated: 1654790987
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,182 @@
|
||||
#if !UNITY_OPENHARMONY
|
||||
|
||||
#if UNITY_ANDROID
|
||||
#if USING_URP
|
||||
#define ANDROID_URP
|
||||
#endif
|
||||
#endif
|
||||
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEngine.Rendering;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// This script is needed to send the camera position to the stereo shader so that
|
||||
/// it can determine which eye it is rendering. This is only needed for multi-pass
|
||||
/// rendering, as single pass has a built-in shader variable
|
||||
/// </summary>
|
||||
[AddComponentMenu("AVPro Video/Update Multi-Pass Stereo", 320)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public class UpdateMultiPassStereo : MonoBehaviour
|
||||
{
|
||||
[Header("Stereo camera")]
|
||||
[SerializeField] Camera _camera = null;
|
||||
|
||||
public Camera Camera
|
||||
{
|
||||
get { return _camera; }
|
||||
set { _camera = value; }
|
||||
}
|
||||
|
||||
private static readonly LazyShaderProperty PropWorldCameraPosition = new LazyShaderProperty("_WorldCameraPosition");
|
||||
private static readonly LazyShaderProperty PropWorldCameraRight = new LazyShaderProperty("_WorldCameraRight");
|
||||
|
||||
// State
|
||||
|
||||
private Camera _foundCamera;
|
||||
|
||||
void Awake()
|
||||
{
|
||||
if (_camera == null)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] No camera set for UpdateMultiPassStereo component. If you are rendering in multi-pass stereo then it is recommended to set this.");
|
||||
}
|
||||
}
|
||||
|
||||
void Start()
|
||||
{
|
||||
LogXRDeviceDetails();
|
||||
|
||||
#if ANDROID_URP
|
||||
if( GetComponent<Camera>() == null )
|
||||
{
|
||||
throw new MissingComponentException("[AVProVideo] When using URP the UpdateMultiPassStereo component must be on the Camera gameobject. This component is not required on all VR devices, but if it is then stereo eye rendering may not work correctly.");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
private void LogXRDeviceDetails()
|
||||
{
|
||||
#if UNITY_2019_1_OR_NEWER && !UNITY_TVOS
|
||||
string logOutput = "[AVProVideo] XR Device details: UnityEngine.XR.XRSettings.loadedDeviceName = " + UnityEngine.XR.XRSettings.loadedDeviceName + " | supportedDevices = ";
|
||||
|
||||
string[] aSupportedDevices = UnityEngine.XR.XRSettings.supportedDevices;
|
||||
int supportedDeviceCount = aSupportedDevices.Length;
|
||||
for (int i = 0; i < supportedDeviceCount; i++)
|
||||
{
|
||||
logOutput += aSupportedDevices[i];
|
||||
if( i < (supportedDeviceCount - 1 ))
|
||||
{
|
||||
logOutput += ", ";
|
||||
}
|
||||
}
|
||||
|
||||
List<UnityEngine.XR.InputDevice> inputDevices = new List<UnityEngine.XR.InputDevice>();
|
||||
UnityEngine.XR.InputDevices.GetDevices(inputDevices);
|
||||
int deviceCount = inputDevices.Count;
|
||||
if (deviceCount > 0)
|
||||
{
|
||||
logOutput += " | XR Devices = ";
|
||||
|
||||
for (int i = 0; i < deviceCount; i++)
|
||||
{
|
||||
logOutput += inputDevices[i].name;
|
||||
if( i < (deviceCount -1 ))
|
||||
{
|
||||
logOutput += ", ";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
UnityEngine.XR.InputDevice headDevice = UnityEngine.XR.InputDevices.GetDeviceAtXRNode(UnityEngine.XR.XRNode.Head);
|
||||
if( headDevice != null )
|
||||
{
|
||||
logOutput += " | headDevice name = " + headDevice.name + ", manufacturer = " + headDevice.manufacturer;
|
||||
}
|
||||
|
||||
Debug.Log(logOutput);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
#if ANDROID_URP
|
||||
void OnEnable()
|
||||
{
|
||||
RenderPipelineManager.beginCameraRendering += RenderPipelineManager_beginCameraRendering;
|
||||
}
|
||||
void OnDisable()
|
||||
{
|
||||
RenderPipelineManager.beginCameraRendering -= RenderPipelineManager_beginCameraRendering;
|
||||
}
|
||||
#endif
|
||||
|
||||
private static bool IsMultiPassVrEnabled()
|
||||
{
|
||||
#if UNITY_TVOS
|
||||
return false;
|
||||
#else
|
||||
#if UNITY_2017_2_OR_NEWER
|
||||
if (!UnityEngine.XR.XRSettings.enabled) return false;
|
||||
#endif
|
||||
#if UNITY_2018_3_OR_NEWER
|
||||
if (UnityEngine.XR.XRSettings.stereoRenderingMode != UnityEngine.XR.XRSettings.StereoRenderingMode.MultiPass) return false;
|
||||
#endif
|
||||
return true;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
// We do a LateUpdate() to allow for any changes in the camera position that may have happened in Update()
|
||||
#if ANDROID_URP
|
||||
// Android URP
|
||||
private void RenderPipelineManager_beginCameraRendering(ScriptableRenderContext context, Camera camera)
|
||||
#else
|
||||
// Normal render pipeline
|
||||
private void LateUpdate()
|
||||
#endif
|
||||
{
|
||||
if (!IsMultiPassVrEnabled())
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (_camera != null && _foundCamera != _camera)
|
||||
{
|
||||
_foundCamera = _camera;
|
||||
}
|
||||
if (_foundCamera == null)
|
||||
{
|
||||
_foundCamera = Camera.main;
|
||||
if (_foundCamera == null)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] Cannot find main camera for UpdateMultiPassStereo, this can lead to eyes flickering");
|
||||
if (Camera.allCameras.Length > 0)
|
||||
{
|
||||
_foundCamera = Camera.allCameras[0];
|
||||
Debug.LogWarning("[AVProVideo] UpdateMultiPassStereo using camera " + _foundCamera.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_foundCamera != null)
|
||||
{
|
||||
#if ANDROID_URP
|
||||
Shader.EnableKeyword("USING_URP");
|
||||
#else
|
||||
Shader.DisableKeyword("USING_URP");
|
||||
#endif
|
||||
|
||||
Shader.SetGlobalVector(PropWorldCameraPosition.Id, _foundCamera.transform.position);
|
||||
Shader.SetGlobalVector(PropWorldCameraRight.Id, _foundCamera.transform.right);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 8b2366b5575fcba46a0f97038fb6c5fb
|
||||
timeCreated: 1611065944
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
Reference in New Issue
Block a user