first commit
This commit is contained in:
9
Assets/AVProVideo/Runtime/Scripts/AssetTypes.meta
Normal file
9
Assets/AVProVideo/Runtime/Scripts/AssetTypes.meta
Normal file
@@ -0,0 +1,9 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 9c5f6cc7a68822c4c906fad89505801a
|
||||
folderAsset: yes
|
||||
timeCreated: 1592333515
|
||||
licenseType: Store
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
124
Assets/AVProVideo/Runtime/Scripts/AssetTypes/MediaReference.cs
Normal file
124
Assets/AVProVideo/Runtime/Scripts/AssetTypes/MediaReference.cs
Normal file
@@ -0,0 +1,124 @@
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
[System.Serializable]
|
||||
[CreateAssetMenu(fileName = "MediaReference", menuName = "AVPro Video/Media Reference", order = 51)]
|
||||
public class MediaReference : ScriptableObject
|
||||
{
|
||||
[SerializeField] string _alias = string.Empty;
|
||||
public string Alias { get { return _alias; } set { _alias = value; } }
|
||||
|
||||
[SerializeField] MediaPath _mediaPath = new MediaPath();
|
||||
public MediaPath MediaPath { get { return _mediaPath; } set { _mediaPath = value; } }
|
||||
|
||||
[Header("Media Hints")]
|
||||
|
||||
[SerializeField] MediaHints _hints = MediaHints.Default;
|
||||
public MediaHints Hints { get { return _hints; } set { _hints = value; } }
|
||||
|
||||
[Header("Platform Overrides")]
|
||||
|
||||
[SerializeField] MediaReference _macOS = null;
|
||||
[SerializeField] MediaReference _windows = null;
|
||||
[SerializeField] MediaReference _android = null;
|
||||
[SerializeField] MediaReference _openharmony = null;
|
||||
[SerializeField] MediaReference _iOS = null;
|
||||
[SerializeField] MediaReference _tvOS = null;
|
||||
[SerializeField] MediaReference _windowsUWP = null;
|
||||
[SerializeField] MediaReference _webGL = null;
|
||||
|
||||
#if UNITY_EDITOR
|
||||
[SerializeField, HideInInspector] byte[] _preview = null;
|
||||
|
||||
public Texture2D GeneratePreview(Texture2D texture)
|
||||
{
|
||||
_preview = null;
|
||||
if (texture)
|
||||
{
|
||||
texture.Apply(true, false);
|
||||
_preview = texture.GetRawTextureData();
|
||||
}
|
||||
UnityEditor.EditorUtility.SetDirty(this);
|
||||
return texture;
|
||||
}
|
||||
|
||||
public bool GetPreview(Texture2D texture)
|
||||
{
|
||||
if (_preview != null && _preview.Length > 0 && _preview.Length > 128*128*4)
|
||||
{
|
||||
texture.LoadRawTextureData(_preview);
|
||||
texture.Apply(true, false);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
public MediaReference GetCurrentPlatformMediaReference()
|
||||
{
|
||||
MediaReference result = null;
|
||||
|
||||
#if (UNITY_EDITOR_OSX && UNITY_IOS) || (!UNITY_EDITOR && UNITY_IOS)
|
||||
result = GetPlatformMediaReference(Platform.iOS);
|
||||
#elif (UNITY_EDITOR_OSX && UNITY_TVOS) || (!UNITY_EDITOR && UNITY_TVOS)
|
||||
result = GetPlatformMediaReference(Platform.tvOS);
|
||||
#elif (UNITY_EDITOR_OSX || (!UNITY_EDITOR && UNITY_STANDALONE_OSX))
|
||||
result = GetPlatformMediaReference(Platform.macOS);
|
||||
#elif (UNITY_EDITOR_WIN) || (!UNITY_EDITOR && UNITY_STANDALONE_WIN)
|
||||
result = GetPlatformMediaReference(Platform.Windows);
|
||||
#elif (!UNITY_EDITOR && UNITY_WSA_10_0)
|
||||
result = GetPlatformMediaReference(Platform.WindowsUWP);
|
||||
#elif (!UNITY_EDITOR && UNITY_ANDROID)
|
||||
result = GetPlatformMediaReference(Platform.Android);
|
||||
#elif (!UNITY_EDITOR && UNITY_OPENHARMONY)
|
||||
result = GetPlatformMediaReference(Platform.OpenHarmony);
|
||||
#elif (!UNITY_EDITOR && UNITY_WEBGL)
|
||||
result = GetPlatformMediaReference(Platform.WebGL);
|
||||
#endif
|
||||
|
||||
if (result == null)
|
||||
{
|
||||
result = this;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public MediaReference GetPlatformMediaReference(Platform platform)
|
||||
{
|
||||
MediaReference result = null;
|
||||
|
||||
switch (platform)
|
||||
{
|
||||
case Platform.iOS:
|
||||
result = _iOS;
|
||||
break;
|
||||
case Platform.tvOS:
|
||||
result = _tvOS;
|
||||
break;
|
||||
case Platform.macOS:
|
||||
result = _macOS;
|
||||
break;
|
||||
case Platform.Windows:
|
||||
result = _windows;
|
||||
break;
|
||||
case Platform.WindowsUWP:
|
||||
result = _windowsUWP;
|
||||
break;
|
||||
case Platform.Android:
|
||||
result = _android;
|
||||
break;
|
||||
case Platform.OpenHarmony:
|
||||
result = _openharmony;
|
||||
break;
|
||||
case Platform.WebGL:
|
||||
result = _webGL;
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 8b1c70b7e7502564e93d418de9017d1f
|
||||
timeCreated: 1592337480
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
9
Assets/AVProVideo/Runtime/Scripts/Components.meta
Normal file
9
Assets/AVProVideo/Runtime/Scripts/Components.meta
Normal file
@@ -0,0 +1,9 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 40d7664ce355730488a96ff5305f1b5d
|
||||
folderAsset: yes
|
||||
timeCreated: 1438698284
|
||||
licenseType: Store
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
565
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToFarPlane.cs
Normal file
565
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToFarPlane.cs
Normal file
@@ -0,0 +1,565 @@
|
||||
using System;
|
||||
using UnityEngine;
|
||||
using UnityEngine.Rendering;
|
||||
using UnityEngine.UIElements;
|
||||
using UnityEngine.Video;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2024 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// displays the video to the far camera plane
|
||||
/// </summary>
|
||||
// Note:
|
||||
// - This will not work if the camera ClearFlag is set to Skybox because of how it is rendered.
|
||||
// the skybox is rendered at position 2000.5 between the Opaque and Transparent objects
|
||||
// with a unique sphere scaled to the camera far plane, meaning that it will only render where
|
||||
// nothing has been written to the depth bufffer. <- that is where the issue arrises, we are
|
||||
// not writing to the depth buffer when rendering the video so the skybox will think nothing
|
||||
// is their and draw over the top.
|
||||
|
||||
[AddComponentMenu("AVPro Video/Apply To Far Plane", 300)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public sealed class ApplyToFarPlane : ApplyToBase
|
||||
{
|
||||
[Header("Shader Options")]
|
||||
[Tooltip("The color override to apply to the material")]
|
||||
[SerializeField] Color _mainColor;
|
||||
public Color MainColor
|
||||
{
|
||||
get { return _mainColor; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetColor("_Color", value); _mainColor = value; }
|
||||
}
|
||||
[Tooltip("The Main Texture that is being written to by the Media Player")]
|
||||
[SerializeField] Texture _texture;
|
||||
public Texture Texture
|
||||
{
|
||||
get { return _texture; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetTexture("_MainTex", value); _texture = value; }
|
||||
}
|
||||
[Tooltip("The Chroma Texture to apply to the material")]
|
||||
[SerializeField] Texture _chroma;
|
||||
public Texture Chroma
|
||||
{
|
||||
get { return _chroma; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetTexture("_ChromaTex", value); _chroma = value; }
|
||||
}
|
||||
[Tooltip("Alpha of the far plane that is drawn")]
|
||||
[SerializeField] float _alpha = 1f;
|
||||
public float Alpha
|
||||
{
|
||||
get { return _alpha; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetFloat("_Alpha", value); _alpha = value; }
|
||||
}
|
||||
[Tooltip("The Camera far plane to draw to, if left empty main cam will be selected")]
|
||||
[SerializeField] Camera _camera;
|
||||
public Camera Camera
|
||||
{
|
||||
get { return _camera; }
|
||||
set { _camera = value; if (!_material) CreateMaterial(); _material.SetFloat("_TargetCamID", value.GetInstanceID());
|
||||
}
|
||||
}
|
||||
[Tooltip("The aspect ratio of the video shown, not used when a custom scaling is set")]
|
||||
[SerializeField] VideoAspectRatio _aspectRatio = VideoAspectRatio.Stretch;
|
||||
public VideoAspectRatio VideoAspectRatio
|
||||
{
|
||||
get { return _aspectRatio; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetFloat("_Aspect", (int)value); _aspectRatio = value; }
|
||||
}
|
||||
[Tooltip("How much to offset the image by")]
|
||||
public Vector2 _drawOffset;
|
||||
public Vector2 DrawOffset
|
||||
{
|
||||
get { return _drawOffset; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetVector("_DrawOffset", value); _drawOffset = value; }
|
||||
}
|
||||
[Tooltip("Will replace the Aspect Ratio with custom scaling for the video, when both values are non-zero")]
|
||||
public Vector2 _customScaling;
|
||||
public Vector2 CustomScaling
|
||||
{
|
||||
get { return _customScaling; }
|
||||
set { if (!_material) CreateMaterial(); _material.SetVector("_CustomScale", value); _customScaling = value; }
|
||||
}
|
||||
|
||||
// the object that is active as the holder for the camera far plane
|
||||
private GameObject _renderedObject;
|
||||
private bool _changedSkybox;
|
||||
|
||||
public void Awake()
|
||||
{
|
||||
// if the camera was then set the camera to the main camera in the scene
|
||||
if (!_camera)
|
||||
_camera = Camera.main;
|
||||
if (_material)
|
||||
_material.SetFloat("_TargetCamID", _camera.GetInstanceID());
|
||||
}
|
||||
|
||||
protected override void OnDisable()
|
||||
{
|
||||
// need to set background back to skybox if we disabled it
|
||||
if (_changedSkybox && _camera)
|
||||
_camera.clearFlags = CameraClearFlags.Skybox;
|
||||
|
||||
base.OnDisable();
|
||||
if (_renderedObject)
|
||||
_renderedObject.SetActive(false);
|
||||
}
|
||||
|
||||
private void OnDestroy()
|
||||
{
|
||||
// ensure to destroy the created object
|
||||
Destroy(_renderedObject);
|
||||
}
|
||||
|
||||
public void Update()
|
||||
{
|
||||
// move the rendered object to ensure that it will allways be rendered by the camera,
|
||||
// ensuring that the shader is allways running to display the output on the far plane of the camera
|
||||
_renderedObject.transform.position = new Vector3(0, 0, _camera.nearClipPlane) + _camera.transform.position + _camera.transform.forward;
|
||||
_renderedObject.transform.rotation = _camera.transform.rotation;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Creates a Quad mesh used for basic rendering
|
||||
/// </summary>
|
||||
/// <returns>Quad created</returns>
|
||||
public Mesh CreateQuadMesh()
|
||||
{
|
||||
var width = 1;
|
||||
var height = 1;
|
||||
Mesh mesh = new Mesh();
|
||||
// verts
|
||||
Vector3[] vertices = new Vector3[4]
|
||||
{
|
||||
new Vector3(0, 0, 0),
|
||||
new Vector3(width, 0, 0),
|
||||
new Vector3(0, height, 0),
|
||||
new Vector3(width, height, 0)
|
||||
};
|
||||
mesh.vertices = vertices;
|
||||
// tris
|
||||
int[] tris = new int[6]
|
||||
{
|
||||
0, 2, 1,
|
||||
2, 3, 1
|
||||
};
|
||||
mesh.triangles = tris;
|
||||
// normals
|
||||
Vector3[] normals = new Vector3[4]
|
||||
{
|
||||
-_camera.transform.forward,
|
||||
-_camera.transform.forward,
|
||||
-_camera.transform.forward,
|
||||
-_camera.transform.forward
|
||||
};
|
||||
mesh.normals = normals;
|
||||
// uv's
|
||||
Vector2[] uv = new Vector2[4]
|
||||
{
|
||||
new Vector2(0, 0),
|
||||
new Vector2(1, 0),
|
||||
new Vector2(0, 1),
|
||||
new Vector2(1, 1)
|
||||
};
|
||||
mesh.uv = uv;
|
||||
return mesh;
|
||||
}
|
||||
|
||||
public void CreateMaterial()
|
||||
{
|
||||
_material = new Material(Shader.Find("AVProVideo/Background/AVProVideo-ApplyToFarPlane"));
|
||||
if (_renderedObject)
|
||||
{
|
||||
if (_renderedObject.TryGetComponent(out ApplyToFarPlane_CameraApplier applier))
|
||||
applier.Material = _material;
|
||||
else
|
||||
{
|
||||
var applier2 = _renderedObject.AddComponent<ApplyToFarPlane_CameraApplier>();
|
||||
applier2.Material = _material;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Below this point is basically the same as ApplyToMaterial, with a few unecessary functions
|
||||
removed.
|
||||
This is because other than the quad with fancy shader, this is just taking the video
|
||||
to a material, then applying it.
|
||||
*/
|
||||
|
||||
[Header("Display")]
|
||||
[Tooltip("Default texture to display when the video texture is preparing")]
|
||||
[SerializeField]
|
||||
Texture2D _defaultTexture = null;
|
||||
public Texture2D DefaultTexture
|
||||
{
|
||||
get { return _defaultTexture; }
|
||||
set
|
||||
{
|
||||
if (_defaultTexture != value)
|
||||
{
|
||||
_defaultTexture = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Tooltip("The Material to use when rendering the video, if not set will use internal " +
|
||||
"\n Note: Material must use the AVProVideo/Background/AVProVideo-ApplyToFarPlane shader")]
|
||||
// this material must use the AVProVideo/Background/AVProVideo-ApplyToFarPlane shader
|
||||
// otherwise it will not render correctly
|
||||
[SerializeField] Material _material = null;
|
||||
|
||||
[SerializeField]
|
||||
string _texturePropertyName = Helper.UnityBaseTextureName;
|
||||
public string TexturePropertyName
|
||||
{
|
||||
get { return _texturePropertyName; }
|
||||
set
|
||||
{
|
||||
if (_texturePropertyName != value)
|
||||
{
|
||||
_texturePropertyName = value;
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _offset = Vector2.zero;
|
||||
public Vector2 Offset
|
||||
{
|
||||
get { return _offset; }
|
||||
set
|
||||
{
|
||||
if (_offset != value)
|
||||
{
|
||||
_offset = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _scale = Vector2.one;
|
||||
public Vector2 Scale
|
||||
{
|
||||
get { return _scale; }
|
||||
set
|
||||
{
|
||||
if (_scale != value)
|
||||
{
|
||||
_scale = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Texture _lastTextureApplied;
|
||||
private LazyShaderProperty _propTexture;
|
||||
private LazyShaderProperty _propTexture_R;
|
||||
|
||||
private Texture _originalTexture;
|
||||
private Vector2 _originalScale = Vector2.one;
|
||||
private Vector2 _originalOffset = Vector2.zero;
|
||||
|
||||
|
||||
private Vector2 ImageSize
|
||||
{
|
||||
get { return new Vector2(_media.Info.GetVideoWidth(), _media.Info.GetVideoHeight()); }
|
||||
}
|
||||
|
||||
protected override void OnEnable()
|
||||
{
|
||||
base.OnEnable();
|
||||
if (!_material)
|
||||
{
|
||||
CreateMaterial();
|
||||
}
|
||||
// if the rendered object already exists just enable it otherwise
|
||||
// create a new one and set it up to be used correctly
|
||||
if (_renderedObject)
|
||||
_renderedObject.SetActive(true);
|
||||
else
|
||||
{
|
||||
_renderedObject = new GameObject("Display Background Object");
|
||||
//_renderedObject.hideFlags = HideFlags.HideAndDontSave;
|
||||
var rend = _renderedObject.AddComponent<MeshRenderer>();
|
||||
var filt = _renderedObject.AddComponent<MeshFilter>();
|
||||
Mesh mesh = CreateQuadMesh();
|
||||
filt.sharedMesh = mesh;
|
||||
//rend.sharedMaterial = _material;
|
||||
var applier = _renderedObject.AddComponent<ApplyToFarPlane_CameraApplier>();
|
||||
if (_camera)
|
||||
_material.SetFloat("_TargetCamID", _camera.GetInstanceID());
|
||||
applier.Material = _material;
|
||||
rend.sharedMaterial = _material;
|
||||
}
|
||||
|
||||
// ApplyToFarPlane does not work if the background clear mode is set to skybox, so if it is then change it to color
|
||||
if (_camera.clearFlags == CameraClearFlags.Skybox)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] Warning: ApplyToFarPlane does not work with the background clear mode set to skybox, automatically changed to color, this will be undone when the object is disabled");
|
||||
_changedSkybox = true;
|
||||
_camera.clearFlags = CameraClearFlags.Color;
|
||||
}
|
||||
}
|
||||
|
||||
// We do a LateUpdate() to allow for any changes in the texture that may have happened in Update()
|
||||
private void LateUpdate()
|
||||
{
|
||||
Apply();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Called via the Editor compoenent, this will allow updating of the material
|
||||
/// properties when they are changed rather than updating them each frame
|
||||
/// </summary>
|
||||
/// <param name="target">Which material property was effected</param>
|
||||
public void UpdateMaterialProperties(int target)
|
||||
{
|
||||
if (_material == null)
|
||||
CreateMaterial();
|
||||
switch (target)
|
||||
{
|
||||
case 0:
|
||||
_material.SetColor("_Color", _mainColor);
|
||||
break;
|
||||
case 3:
|
||||
_material.SetTexture("_MainTex", _texture);
|
||||
break;
|
||||
case 4:
|
||||
_material.SetTexture("_ChromaTex", _chroma);
|
||||
break;
|
||||
case 5:
|
||||
_material.SetFloat("_Alpha", _alpha);
|
||||
break;
|
||||
case 7:
|
||||
_material.SetFloat("_Aspect", (int)_aspectRatio);
|
||||
break;
|
||||
case 8:
|
||||
_material.SetVector("_DrawOffset", _drawOffset);
|
||||
break;
|
||||
case 9:
|
||||
_material.SetVector("_CustomScale", _customScaling);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public override void Apply()
|
||||
{
|
||||
bool applied = false;
|
||||
|
||||
if (_media != null && _media.TextureProducer != null)
|
||||
{
|
||||
Texture resamplerTex = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[0];
|
||||
Texture texture = _media.UseResampler ? resamplerTex : _media.TextureProducer.GetTexture(0);
|
||||
if (texture != null)
|
||||
{
|
||||
// Check for changing texture
|
||||
if (texture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
|
||||
if (_isDirty)
|
||||
{
|
||||
bool requiresVerticalFlip = _media.TextureProducer.RequiresVerticalFlip();
|
||||
StereoPacking stereoPacking = _media.TextureProducer.GetTextureStereoPacking();
|
||||
bool isMultiview = stereoPacking == StereoPacking.MultiviewLeftPrimary || stereoPacking == StereoPacking.MultiviewRightPrimary;
|
||||
|
||||
int planeCount = 1;
|
||||
if (!_media.UseResampler)
|
||||
{
|
||||
// We're not using the resampler so the number of planes will be the texture count
|
||||
planeCount = _media.TextureProducer.GetTextureCount();
|
||||
if (isMultiview)
|
||||
{
|
||||
// Unless we're using two texture stereo in which case it'll be half the texture count
|
||||
planeCount /= 2;
|
||||
}
|
||||
}
|
||||
|
||||
for (int plane = 0; plane < planeCount; ++plane)
|
||||
{
|
||||
Texture resamplerTexPlane = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[plane];
|
||||
texture = _media.UseResampler ? resamplerTexPlane : _media.TextureProducer.GetTexture(plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, requiresVerticalFlip, plane);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle the right eye if we're using two texture stereo packing
|
||||
if (isMultiview)
|
||||
{
|
||||
for (int plane = 0; plane < planeCount; ++plane)
|
||||
{
|
||||
texture = _media.TextureProducer.GetTexture(planeCount + plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, requiresVerticalFlip, plane, Eye.Right);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
applied = true;
|
||||
}
|
||||
}
|
||||
|
||||
// If the media didn't apply a texture, then try to apply the default texture
|
||||
if (!applied)
|
||||
{
|
||||
if (_defaultTexture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
if (_isDirty)
|
||||
{
|
||||
#if UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
if (_material != null && _material.HasProperty(VideoRender.PropUseYpCbCr.Id))
|
||||
{
|
||||
_material.DisableKeyword(VideoRender.Keyword_UseYpCbCr);
|
||||
}
|
||||
#endif
|
||||
ApplyMapping(_defaultTexture, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Eye
|
||||
{
|
||||
Left,
|
||||
Right
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="texture"></param>
|
||||
/// <param name="requiresYFlip"></param>
|
||||
/// <param name="plane"></param>
|
||||
/// <param name="eye">Which eye we're mapping, defaults to the left eye</param>
|
||||
private void ApplyMapping(Texture texture, bool requiresYFlip, int plane = 0, Eye eye = Eye.Left)
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
_isDirty = false;
|
||||
|
||||
if (plane == 0)
|
||||
{
|
||||
int propTextureId = _propTexture.Id;
|
||||
if (eye == Eye.Left)
|
||||
{
|
||||
VideoRender.SetupMaterialForMedia(_material, _media, propTextureId, texture, texture == _defaultTexture);
|
||||
_lastTextureApplied = texture;
|
||||
#if !UNITY_EDITOR && UNITY_ANDROID
|
||||
if (texture == _defaultTexture)
|
||||
{
|
||||
_material.EnableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.DisableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
propTextureId = _propTexture_R.Id;
|
||||
_material.SetTexture(propTextureId, texture);
|
||||
}
|
||||
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
if (_material.HasProperty(propTextureId)) // editor error on not being initilised on first run
|
||||
{
|
||||
_material.SetTextureScale(propTextureId, new Vector2(_scale.x, -_scale.y));
|
||||
_material.SetTextureOffset(propTextureId, Vector2.up + _offset);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTextureScale(propTextureId, _scale);
|
||||
_material.SetTextureOffset(propTextureId, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (plane == 1)
|
||||
{
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
_material.SetTextureScale(VideoRender.PropChromaTex.Id, new Vector2(_scale.x, -_scale.y));
|
||||
_material.SetTextureOffset(VideoRender.PropChromaTex.Id, Vector2.up + _offset);
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTextureScale(VideoRender.PropChromaTex.Id, _scale);
|
||||
_material.SetTextureOffset(VideoRender.PropChromaTex.Id, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
CreateMaterial();
|
||||
}
|
||||
|
||||
protected override void SaveProperties()
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(_texturePropertyName))
|
||||
{
|
||||
_originalTexture = _material.mainTexture;
|
||||
_originalScale = _material.mainTextureScale;
|
||||
_originalOffset = _material.mainTextureOffset;
|
||||
}
|
||||
else
|
||||
{
|
||||
_originalTexture = _material.GetTexture(_texturePropertyName);
|
||||
_originalScale = _material.GetTextureScale(_texturePropertyName);
|
||||
_originalOffset = _material.GetTextureOffset(_texturePropertyName);
|
||||
}
|
||||
}
|
||||
else
|
||||
CreateMaterial();
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
}
|
||||
|
||||
protected override void RestoreProperties()
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(_texturePropertyName))
|
||||
{
|
||||
_material.mainTexture = _originalTexture;
|
||||
_material.mainTextureScale = _originalScale;
|
||||
_material.mainTextureOffset = _originalOffset;
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTexture(_texturePropertyName, _originalTexture);
|
||||
_material.SetTextureScale(_texturePropertyName, _originalScale);
|
||||
_material.SetTextureOffset(_texturePropertyName, _originalOffset);
|
||||
}
|
||||
}
|
||||
else
|
||||
CreateMaterial();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 98c7fe5a0f3343d45ad618b4612b65f1
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,27 @@
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
/// <summary>
|
||||
/// This class will be attached to the object created by the ApplyToFar plane to register the camera that is currently, trying to render,
|
||||
/// this is needed so we only render to the correct camera in the shader
|
||||
/// </summary>
|
||||
public class ApplyToFarPlane_CameraApplier : MonoBehaviour
|
||||
{
|
||||
[SerializeField] private Material _material;
|
||||
public Material Material
|
||||
{
|
||||
get { return _material; }
|
||||
set { _material = value; }
|
||||
}
|
||||
|
||||
// this is called before the rendering of the object, by a specific camera, Camera.current is also changed
|
||||
// to be the camera currently rendering at the time.
|
||||
void OnWillRenderObject()
|
||||
{
|
||||
if (_material)
|
||||
{
|
||||
_material.SetFloat("_CurrentCamID", Camera.current.GetInstanceID());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 5b662008ef07b3b4aab4042e13a7ae8f
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
335
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToMaterial.cs
Normal file
335
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToMaterial.cs
Normal file
@@ -0,0 +1,335 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2024 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#if UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_TVOS || UNITY_VISIONOS
|
||||
#define UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
#endif
|
||||
|
||||
using UnityEngine;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Sets up a material to display the video from a MediaPlayer
|
||||
/// </summary>
|
||||
[AddComponentMenu("AVPro Video/Apply To Material", 300)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public sealed class ApplyToMaterial : ApplyToBase
|
||||
{
|
||||
[Header("Display")]
|
||||
|
||||
[Tooltip("Default texture to display when the video texture is preparing")]
|
||||
[SerializeField]
|
||||
Texture2D _defaultTexture = null;
|
||||
|
||||
public Texture2D DefaultTexture
|
||||
{
|
||||
get
|
||||
{
|
||||
return _defaultTexture;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_defaultTexture != value)
|
||||
{
|
||||
_defaultTexture = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Space(8f)]
|
||||
[Header("Material Target")]
|
||||
|
||||
[SerializeField]
|
||||
Material _material = null;
|
||||
|
||||
public Material Material
|
||||
{
|
||||
get
|
||||
{
|
||||
return _material;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_material != value)
|
||||
{
|
||||
_material = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
string _texturePropertyName = Helper.UnityBaseTextureName;
|
||||
|
||||
public string TexturePropertyName
|
||||
{
|
||||
get
|
||||
{
|
||||
return _texturePropertyName;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_texturePropertyName != value)
|
||||
{
|
||||
_texturePropertyName = value;
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _offset = Vector2.zero;
|
||||
|
||||
public Vector2 Offset
|
||||
{
|
||||
get
|
||||
{
|
||||
return _offset;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_offset != value)
|
||||
{
|
||||
_offset = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _scale = Vector2.one;
|
||||
|
||||
public Vector2 Scale
|
||||
{
|
||||
get
|
||||
{
|
||||
return _scale;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_scale != value)
|
||||
{
|
||||
_scale = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Texture _lastTextureApplied;
|
||||
private LazyShaderProperty _propTexture;
|
||||
private LazyShaderProperty _propTexture_R; // Default property for the right-eye texture
|
||||
|
||||
private Texture _originalTexture;
|
||||
private Vector2 _originalScale = Vector2.one;
|
||||
private Vector2 _originalOffset = Vector2.zero;
|
||||
|
||||
// We do a LateUpdate() to allow for any changes in the texture that may have happened in Update()
|
||||
private void LateUpdate()
|
||||
{
|
||||
Apply();
|
||||
}
|
||||
|
||||
public override void Apply()
|
||||
{
|
||||
bool applied = false;
|
||||
|
||||
if (_media != null && _media.TextureProducer != null)
|
||||
{
|
||||
Texture resamplerTex = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[0];
|
||||
Texture texture = _media.UseResampler ? resamplerTex : _media.TextureProducer.GetTexture(0);
|
||||
if (texture != null)
|
||||
{
|
||||
// Check for changing texture
|
||||
if (texture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
|
||||
if (_isDirty)
|
||||
{
|
||||
bool requiresVerticalFlip = _media.TextureProducer.RequiresVerticalFlip();
|
||||
StereoPacking stereoPacking = _media.TextureProducer.GetTextureStereoPacking();
|
||||
bool isMultiview = stereoPacking == StereoPacking.MultiviewLeftPrimary || stereoPacking == StereoPacking.MultiviewRightPrimary;
|
||||
|
||||
int planeCount = 1;
|
||||
if (!_media.UseResampler)
|
||||
{
|
||||
// We're not using the resampler so the number of planes will be the texture count
|
||||
planeCount = _media.TextureProducer.GetTextureCount();
|
||||
if (isMultiview)
|
||||
{
|
||||
// Unless we're using two texture stereo in which case it'll be half the texture count
|
||||
planeCount /= 2;
|
||||
}
|
||||
}
|
||||
|
||||
for (int plane = 0; plane < planeCount; ++plane)
|
||||
{
|
||||
Texture resamplerTexPlane = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[plane];
|
||||
texture = _media.UseResampler ? resamplerTexPlane : _media.TextureProducer.GetTexture(plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, requiresVerticalFlip, plane);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle the right eye if we're using two texture stereo packing
|
||||
if (isMultiview)
|
||||
{
|
||||
for (int plane = 0; plane < planeCount; ++plane)
|
||||
{
|
||||
texture = _media.TextureProducer.GetTexture(planeCount + plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, requiresVerticalFlip, plane, Eye.Right);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
applied = true;
|
||||
}
|
||||
}
|
||||
|
||||
// If the media didn't apply a texture, then try to apply the default texture
|
||||
if (!applied)
|
||||
{
|
||||
if (_defaultTexture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
if (_isDirty)
|
||||
{
|
||||
#if UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
if (_material != null && _material.HasProperty(VideoRender.PropUseYpCbCr.Id))
|
||||
{
|
||||
_material.DisableKeyword(VideoRender.Keyword_UseYpCbCr);
|
||||
}
|
||||
#endif
|
||||
ApplyMapping(_defaultTexture, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Eye
|
||||
{
|
||||
Left,
|
||||
Right
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="texture"></param>
|
||||
/// <param name="requiresYFlip"></param>
|
||||
/// <param name="plane"></param>
|
||||
/// <param name="eye">Which eye we're mapping, defaults to the left eye</param>
|
||||
private void ApplyMapping(Texture texture, bool requiresYFlip, int plane = 0, Eye eye = Eye.Left)
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
_isDirty = false;
|
||||
|
||||
if (plane == 0)
|
||||
{
|
||||
int propTextureId = _propTexture.Id;
|
||||
if (eye == Eye.Left)
|
||||
{
|
||||
VideoRender.SetupMaterialForMedia(_material, _media, propTextureId, texture, texture == _defaultTexture);
|
||||
_lastTextureApplied = texture;
|
||||
#if !UNITY_EDITOR && UNITY_ANDROID
|
||||
if (texture == _defaultTexture)
|
||||
{
|
||||
_material.EnableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.DisableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
propTextureId = _propTexture_R.Id;
|
||||
_material.SetTexture(propTextureId, texture);
|
||||
}
|
||||
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
_material.SetTextureScale(propTextureId, new Vector2(_scale.x, -_scale.y));
|
||||
_material.SetTextureOffset(propTextureId, Vector2.up + _offset);
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTextureScale(propTextureId, _scale);
|
||||
_material.SetTextureOffset(propTextureId, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (plane == 1)
|
||||
{
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
_material.SetTextureScale(VideoRender.PropChromaTex.Id, new Vector2(_scale.x, -_scale.y));
|
||||
_material.SetTextureOffset(VideoRender.PropChromaTex.Id, Vector2.up + _offset);
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTextureScale(VideoRender.PropChromaTex.Id, _scale);
|
||||
_material.SetTextureOffset(VideoRender.PropChromaTex.Id, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override void SaveProperties()
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(_texturePropertyName))
|
||||
{
|
||||
_originalTexture = _material.mainTexture;
|
||||
_originalScale = _material.mainTextureScale;
|
||||
_originalOffset = _material.mainTextureOffset;
|
||||
}
|
||||
else
|
||||
{
|
||||
_originalTexture = _material.GetTexture(_texturePropertyName);
|
||||
_originalScale = _material.GetTextureScale(_texturePropertyName);
|
||||
_originalOffset = _material.GetTextureOffset(_texturePropertyName);
|
||||
}
|
||||
}
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
}
|
||||
|
||||
protected override void RestoreProperties()
|
||||
{
|
||||
if (_material != null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(_texturePropertyName))
|
||||
{
|
||||
_material.mainTexture = _originalTexture;
|
||||
_material.mainTextureScale = _originalScale;
|
||||
_material.mainTextureOffset = _originalOffset;
|
||||
}
|
||||
else
|
||||
{
|
||||
_material.SetTexture(_texturePropertyName, _originalTexture);
|
||||
_material.SetTextureScale(_texturePropertyName, _originalScale);
|
||||
_material.SetTextureOffset(_texturePropertyName, _originalOffset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: d2feedce2e2e63647b8f875ec0894a15
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
364
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToMesh.cs
Normal file
364
Assets/AVProVideo/Runtime/Scripts/Components/ApplyToMesh.cs
Normal file
@@ -0,0 +1,364 @@
|
||||
using System;
|
||||
using UnityEngine;
|
||||
using UnityEngine.Serialization;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Sets up a mesh to display the video from a MediaPlayer
|
||||
/// </summary>
|
||||
[AddComponentMenu("AVPro Video/Apply To Mesh", 300)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public sealed class ApplyToMesh : ApplyToBase
|
||||
{
|
||||
// TODO: add specific material / material index to target in the mesh if there are multiple materials
|
||||
|
||||
[Space(8f)]
|
||||
[Header("Display")]
|
||||
|
||||
[Tooltip("Default texture to display when the video texture is preparing")]
|
||||
[SerializeField] Texture2D _defaultTexture = null;
|
||||
|
||||
public Texture2D DefaultTexture
|
||||
{
|
||||
get
|
||||
{
|
||||
return _defaultTexture;
|
||||
}
|
||||
set
|
||||
{
|
||||
ChangeDefaultTexture(value);
|
||||
}
|
||||
}
|
||||
|
||||
[Space(8f)]
|
||||
[FormerlySerializedAs("_mesh")]
|
||||
[Header("Renderer Target")]
|
||||
[SerializeField] Renderer _renderer = null;
|
||||
|
||||
public Renderer MeshRenderer
|
||||
{
|
||||
get
|
||||
{
|
||||
return _renderer;
|
||||
}
|
||||
set
|
||||
{
|
||||
ChangeRenderer(value);
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
int _materialIndex = -1;
|
||||
|
||||
public int MaterialIndex
|
||||
{
|
||||
get
|
||||
{
|
||||
return _materialIndex;
|
||||
}
|
||||
set
|
||||
{
|
||||
_materialIndex = value;
|
||||
}
|
||||
}
|
||||
|
||||
private void ChangeDefaultTexture(Texture2D texture)
|
||||
{
|
||||
if (_defaultTexture != texture)
|
||||
{
|
||||
_defaultTexture = texture;
|
||||
ForceUpdate();
|
||||
}
|
||||
}
|
||||
|
||||
private void ChangeRenderer(Renderer renderer)
|
||||
{
|
||||
if (_renderer != renderer)
|
||||
{
|
||||
if (_renderer)
|
||||
{
|
||||
// TODO: Remove from renderer
|
||||
}
|
||||
_renderer = renderer;
|
||||
if (_renderer)
|
||||
{
|
||||
ForceUpdate();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
string _texturePropertyName = Helper.UnityBaseTextureName;
|
||||
|
||||
public string TexturePropertyName
|
||||
{
|
||||
get
|
||||
{
|
||||
return _texturePropertyName;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_texturePropertyName != value)
|
||||
{
|
||||
_texturePropertyName = value;
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _offset = Vector2.zero;
|
||||
|
||||
public Vector2 Offset
|
||||
{
|
||||
get
|
||||
{
|
||||
return _offset;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_offset != value)
|
||||
{
|
||||
_offset = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Vector2 _scale = Vector2.one;
|
||||
|
||||
public Vector2 Scale
|
||||
{
|
||||
get
|
||||
{
|
||||
return _scale;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_scale != value)
|
||||
{
|
||||
_scale = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Texture _lastTextureApplied;
|
||||
private LazyShaderProperty _propTexture;
|
||||
private LazyShaderProperty _propTexture_R; // Default property for the right-eye texture
|
||||
|
||||
// We do a LateUpdate() to allow for any changes in the texture that may have happened in Update()
|
||||
private void LateUpdate()
|
||||
{
|
||||
Apply();
|
||||
}
|
||||
|
||||
public override void Apply()
|
||||
{
|
||||
bool applied = false;
|
||||
|
||||
// Try to apply texture from media
|
||||
if (_media != null && _media.TextureProducer != null)
|
||||
{
|
||||
Texture resamplerTex = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[0];
|
||||
Texture texture = _media.UseResampler ? resamplerTex : _media.TextureProducer.GetTexture(0);
|
||||
if (texture != null)
|
||||
{
|
||||
// Check for changing texture
|
||||
if (texture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
|
||||
if (_isDirty)
|
||||
{
|
||||
bool requiresVerticalFlip = _media.TextureProducer.RequiresVerticalFlip();
|
||||
StereoPacking stereoPacking = _media.TextureProducer.GetTextureStereoPacking();
|
||||
bool isMultiview = stereoPacking == StereoPacking.MultiviewLeftPrimary || stereoPacking == StereoPacking.MultiviewRightPrimary;
|
||||
|
||||
int planeCount = 1;
|
||||
if (!_media.UseResampler)
|
||||
{
|
||||
// We're not using the resampler so the number of planes will be the texture count
|
||||
planeCount = _media.TextureProducer.GetTextureCount();
|
||||
if (isMultiview)
|
||||
{
|
||||
// Unless we're using two texture stereo in which case it'll be half the texture count
|
||||
planeCount /= 2;
|
||||
}
|
||||
}
|
||||
|
||||
for (int plane = 0; plane < planeCount; plane++)
|
||||
{
|
||||
Texture resamplerTexPlane = _media.FrameResampler == null || _media.FrameResampler.OutputTexture == null ? null : _media.FrameResampler.OutputTexture[plane];
|
||||
texture = _media.UseResampler ? resamplerTexPlane : _media.TextureProducer.GetTexture(plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, _media.TextureProducer.RequiresVerticalFlip(), plane, materialIndex: _materialIndex);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle the right eye if we're using two texture stereo packing
|
||||
if (isMultiview)
|
||||
{
|
||||
for (int plane = 0; plane < planeCount; ++plane)
|
||||
{
|
||||
texture = _media.TextureProducer.GetTexture(planeCount + plane);
|
||||
if (texture != null)
|
||||
{
|
||||
ApplyMapping(texture, requiresVerticalFlip, plane, Eye.Right);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
applied = true;
|
||||
}
|
||||
}
|
||||
|
||||
// If the media didn't apply a texture, then try to apply the default texture
|
||||
if (!applied)
|
||||
{
|
||||
if (_defaultTexture != _lastTextureApplied)
|
||||
{
|
||||
_isDirty = true;
|
||||
}
|
||||
if (_isDirty)
|
||||
{
|
||||
ApplyMapping(_defaultTexture, false, 0, materialIndex: _materialIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Eye
|
||||
{
|
||||
Left,
|
||||
Right
|
||||
}
|
||||
|
||||
private void ApplyMapping(Texture texture, bool requiresYFlip, int plane, Eye eye = Eye.Left, int materialIndex = -1)
|
||||
{
|
||||
if (_renderer != null)
|
||||
{
|
||||
_isDirty = false;
|
||||
#if UNITY_EDITOR
|
||||
Material[] meshMaterials = _renderer.sharedMaterials;
|
||||
#else
|
||||
Material[] meshMaterials = _renderer.materials;
|
||||
#endif
|
||||
|
||||
if (meshMaterials != null)
|
||||
{
|
||||
for (int i = 0; i < meshMaterials.Length; i++)
|
||||
{
|
||||
if (_materialIndex < 0 || i == _materialIndex)
|
||||
{
|
||||
Material mat = meshMaterials[i];
|
||||
if (mat != null)
|
||||
{
|
||||
if (StereoRedGreenTint)
|
||||
{
|
||||
mat.EnableKeyword("STEREO_DEBUG");
|
||||
}
|
||||
else
|
||||
{
|
||||
mat.DisableKeyword("STEREO_DEBUG");
|
||||
}
|
||||
|
||||
if (plane == 0)
|
||||
{
|
||||
int propTextureId = _propTexture.Id;
|
||||
if (eye == Eye.Left)
|
||||
{
|
||||
VideoRender.SetupMaterialForMedia(mat, _media, _propTexture.Id, texture, texture == _defaultTexture);
|
||||
_lastTextureApplied = texture;
|
||||
|
||||
#if !UNITY_EDITOR && UNITY_ANDROID
|
||||
if (texture == _defaultTexture)
|
||||
{
|
||||
mat.EnableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
else
|
||||
{
|
||||
mat.DisableKeyword("USING_DEFAULT_TEXTURE");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
propTextureId = _propTexture_R.Id;
|
||||
mat.SetTexture(propTextureId, texture);
|
||||
}
|
||||
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
mat.SetTextureScale(_propTexture.Id, new Vector2(_scale.x, -_scale.y));
|
||||
mat.SetTextureOffset(_propTexture.Id, Vector2.up + _offset);
|
||||
}
|
||||
else
|
||||
{
|
||||
mat.SetTextureScale(_propTexture.Id, _scale);
|
||||
mat.SetTextureOffset(_propTexture.Id, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (plane == 1)
|
||||
{
|
||||
if (texture != null)
|
||||
{
|
||||
if (requiresYFlip)
|
||||
{
|
||||
mat.SetTextureScale(VideoRender.PropChromaTex.Id, new Vector2(_scale.x, -_scale.y));
|
||||
mat.SetTextureOffset(VideoRender.PropChromaTex.Id, Vector2.up + _offset);
|
||||
}
|
||||
else
|
||||
{
|
||||
mat.SetTextureScale(VideoRender.PropChromaTex.Id, _scale);
|
||||
mat.SetTextureOffset(VideoRender.PropChromaTex.Id, _offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override void OnEnable()
|
||||
{
|
||||
if (_renderer == null)
|
||||
{
|
||||
_renderer = this.GetComponent<MeshRenderer>();
|
||||
if (_renderer == null)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] No MeshRenderer set or found in gameobject");
|
||||
}
|
||||
}
|
||||
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
|
||||
ForceUpdate();
|
||||
}
|
||||
|
||||
protected override void OnDisable()
|
||||
{
|
||||
ApplyMapping(_defaultTexture, false, 0, materialIndex: _materialIndex);
|
||||
}
|
||||
|
||||
protected override void SaveProperties()
|
||||
{
|
||||
_propTexture = new LazyShaderProperty(_texturePropertyName);
|
||||
_propTexture_R = new LazyShaderProperty(_texturePropertyName + "_R");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: f6d1977a52888584496b1acc7e998011
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
@@ -0,0 +1,81 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2019-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// Allows per-channel volume control
|
||||
/// Currently supported on Windows and UWP (Media Foundation API only), macOS, iOS, tvOS and Android (ExoPlayer API only)
|
||||
[AddComponentMenu("AVPro Video/Audio Channel Mixer", 401)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public class AudioChannelMixer : MonoBehaviour
|
||||
{
|
||||
const int MaxChannels = 8;
|
||||
|
||||
[Range(0f, 1f)]
|
||||
[SerializeField] float[] _channels = null;
|
||||
|
||||
/// Range 0.0 to 1.0
|
||||
public float[] Channel
|
||||
{
|
||||
get { return _channels; }
|
||||
set { _channels = value; }
|
||||
}
|
||||
|
||||
void Reset()
|
||||
{
|
||||
_channels = new float[MaxChannels];
|
||||
for (int i = 0; i < MaxChannels; i++)
|
||||
{
|
||||
_channels[i] = 1f;
|
||||
}
|
||||
}
|
||||
|
||||
void ChangeChannelCount(int numChannels)
|
||||
{
|
||||
float[] channels = new float[numChannels];
|
||||
if (_channels != null && _channels.Length != 0)
|
||||
{
|
||||
for (int i = 0; i < channels.Length; i++)
|
||||
{
|
||||
if (i < _channels.Length)
|
||||
{
|
||||
channels[i] = _channels[i];
|
||||
}
|
||||
else
|
||||
{
|
||||
channels[i] = 1f;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int i = 0; i < numChannels; i++)
|
||||
{
|
||||
channels[i] = 1f;
|
||||
}
|
||||
}
|
||||
_channels = channels;
|
||||
}
|
||||
|
||||
void OnAudioFilterRead(float[] data, int channels)
|
||||
{
|
||||
if (channels != _channels.Length)
|
||||
{
|
||||
ChangeChannelCount(channels);
|
||||
}
|
||||
int k = 0;
|
||||
int numSamples = data.Length / channels;
|
||||
for (int j = 0; j < numSamples; j++)
|
||||
{
|
||||
for (int i = 0; i < channels; i++)
|
||||
{
|
||||
data[k] *= _channels[i];
|
||||
k++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 383a68f1e3e94be4b84df59dd26074db
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
180
Assets/AVProVideo/Runtime/Scripts/Components/AudioOutput.cs
Normal file
180
Assets/AVProVideo/Runtime/Scripts/Components/AudioOutput.cs
Normal file
@@ -0,0 +1,180 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2026 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
using UnityEngine;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Audio is grabbed from the MediaPlayer and rendered via Unity AudioSource
|
||||
/// This allows audio to have 3D spatial control, effects applied and to be spatialised for VR
|
||||
/// Currently supported on Windows and UWP (Media Foundation API only), macOS, iOS, tvOS and Android (ExoPlayer API only)
|
||||
/// </summary>
|
||||
[RequireComponent(typeof(AudioSource))]
|
||||
[AddComponentMenu("AVPro Video/Audio Output", 400)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public class AudioOutput : MonoBehaviour
|
||||
{
|
||||
public enum AudioOutputMode
|
||||
{
|
||||
OneToAllChannels,
|
||||
MultipleChannels
|
||||
}
|
||||
|
||||
[SerializeField] MediaPlayer _mediaPlayer = null;
|
||||
[SerializeField] AudioOutputMode _audioOutputMode = AudioOutputMode.MultipleChannels;
|
||||
[HideInInspector, SerializeField] int _channelMask = 0xffff;
|
||||
[SerializeField] bool _supportPositionalAudio = false;
|
||||
|
||||
private int _mediaPlayerInstanceID = 0;
|
||||
|
||||
public MediaPlayer Player
|
||||
{
|
||||
get { return _mediaPlayer; }
|
||||
set { ChangeMediaPlayer(value); }
|
||||
}
|
||||
|
||||
public AudioOutputMode OutputMode
|
||||
{
|
||||
get { return _audioOutputMode; }
|
||||
set { _audioOutputMode = value; }
|
||||
}
|
||||
|
||||
public int ChannelMask
|
||||
{
|
||||
get { return _channelMask; }
|
||||
set { _channelMask = value; }
|
||||
}
|
||||
|
||||
public bool SupportPositionalAudio
|
||||
{
|
||||
get { return _supportPositionalAudio; }
|
||||
set { _supportPositionalAudio = value; }
|
||||
}
|
||||
|
||||
private AudioSource _audioSource;
|
||||
|
||||
void Awake()
|
||||
{
|
||||
_audioSource = this.GetComponent<AudioSource>();
|
||||
Debug.Assert(_audioSource != null);
|
||||
}
|
||||
|
||||
void Start()
|
||||
{
|
||||
AudioSettings.OnAudioConfigurationChanged += OnAudioConfigurationChanged;
|
||||
ChangeMediaPlayer(_mediaPlayer);
|
||||
}
|
||||
|
||||
void OnAudioConfigurationChanged(bool deviceChanged)
|
||||
{
|
||||
if (_mediaPlayer == null || _mediaPlayer.Control == null)
|
||||
return;
|
||||
_mediaPlayer.Control.AudioConfigurationChanged(deviceChanged);
|
||||
}
|
||||
|
||||
void OnDestroy()
|
||||
{
|
||||
ChangeMediaPlayer(null);
|
||||
}
|
||||
|
||||
void Update()
|
||||
{
|
||||
if (_mediaPlayer != null && _mediaPlayer.Control != null && _mediaPlayer.Control.IsPlaying())
|
||||
{
|
||||
ApplyAudioSettings(_mediaPlayer, _audioSource);
|
||||
}
|
||||
}
|
||||
|
||||
public AudioSource GetAudioSource()
|
||||
{
|
||||
return _audioSource;
|
||||
}
|
||||
public void SetAudioSource(AudioSource source)
|
||||
{
|
||||
_audioSource = source;
|
||||
if (_mediaPlayer)
|
||||
_mediaPlayer.AudioSource = source;
|
||||
}
|
||||
|
||||
public void ChangeMediaPlayer(MediaPlayer newPlayer)
|
||||
{
|
||||
// When changing the media player, handle event subscriptions
|
||||
if (_mediaPlayer != null)
|
||||
{
|
||||
_mediaPlayer.AudioSource = null;
|
||||
_mediaPlayer.Events.RemoveListener(OnMediaPlayerEvent);
|
||||
AudioOutputManager.Instance.RemovePlayerInstance(_mediaPlayerInstanceID);
|
||||
_mediaPlayer = null;
|
||||
_mediaPlayerInstanceID = 0;
|
||||
}
|
||||
|
||||
_mediaPlayer = newPlayer;
|
||||
if (_mediaPlayer != null)
|
||||
{
|
||||
_mediaPlayer.Events.AddListener(OnMediaPlayerEvent);
|
||||
_mediaPlayer.AudioSource = _audioSource;
|
||||
_mediaPlayerInstanceID = _mediaPlayer.GetInstanceID();
|
||||
AudioOutputManager.Instance.AddPlayerInstance(_mediaPlayerInstanceID);
|
||||
}
|
||||
|
||||
if (_supportPositionalAudio)
|
||||
{
|
||||
if (_audioSource.clip == null)
|
||||
{
|
||||
// Position audio is implemented from hints found on this thread:
|
||||
// https://forum.unity.com/threads/onaudiofilterread-sound-spatialisation.362782/
|
||||
int frameCount = 2048 * 10;
|
||||
int sampleCount = frameCount * Helper.GetUnityAudioSpeakerCount();
|
||||
AudioClip clip = AudioClip.Create("dummy", frameCount, Helper.GetUnityAudioSpeakerCount(), Helper.GetUnityAudioSampleRate(), false);
|
||||
float[] samples = new float[sampleCount];
|
||||
for (int i = 0; i < samples.Length; i++) { samples[i] = 1f; }
|
||||
clip.SetData(samples, 0);
|
||||
_audioSource.clip = clip;
|
||||
_audioSource.loop = true;
|
||||
}
|
||||
}
|
||||
else if (_audioSource.clip != null)
|
||||
{
|
||||
_audioSource.clip = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Callback function to handle events
|
||||
private void OnMediaPlayerEvent(MediaPlayer mp, MediaPlayerEvent.EventType et, ErrorCode errorCode)
|
||||
{
|
||||
switch (et)
|
||||
{
|
||||
case MediaPlayerEvent.EventType.Closing:
|
||||
_audioSource.Stop();
|
||||
break;
|
||||
case MediaPlayerEvent.EventType.Started:
|
||||
ApplyAudioSettings(_mediaPlayer, _audioSource);
|
||||
_audioSource.Play();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private static void ApplyAudioSettings(MediaPlayer player, AudioSource audioSource)
|
||||
{
|
||||
// Apply volume and mute from the MediaPlayer to the AudioSource
|
||||
if (audioSource != null && player != null && player.Control != null)
|
||||
{
|
||||
float volume = player.Control.GetVolume();
|
||||
bool isMuted = player.Control.IsMuted();
|
||||
float rate = player.Control.GetPlaybackRate();
|
||||
audioSource.volume = volume;
|
||||
audioSource.mute = isMuted;
|
||||
audioSource.pitch = rate;
|
||||
}
|
||||
}
|
||||
|
||||
#if (UNITY_EDITOR_WIN || UNITY_EDITOR_OSX) || (!UNITY_EDITOR && (UNITY_STANDALONE_WIN || UNITY_WSA_10_0 || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_TVOS || UNITY_VISIONOS || UNITY_ANDROID))
|
||||
void OnAudioFilterRead(float[] audioData, int channelCount)
|
||||
{
|
||||
AudioOutputManager.Instance.RequestAudio(this, _mediaPlayer, _mediaPlayerInstanceID, audioData, channelCount, _channelMask, _audioOutputMode, _supportPositionalAudio);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 3b05a64a5de3f8546bf586f42e37b979
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
448
Assets/AVProVideo/Runtime/Scripts/Components/DisplayIMGUI.cs
Normal file
448
Assets/AVProVideo/Runtime/Scripts/Components/DisplayIMGUI.cs
Normal file
@@ -0,0 +1,448 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2025 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#define UNITY_PLATFORM_SUPPORTS_LINEAR
|
||||
|
||||
#if UNITY_EDITOR_WIN || (!UNITY_EDITOR && UNITY_STANDALONE_WIN)
|
||||
#define UNITY_PLATFORM_SUPPORTS_VIDEOASPECTRATIO
|
||||
#endif
|
||||
|
||||
using UnityEngine;
|
||||
using UnityEngine.Serialization;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Displays the video from MediaPlayer component using IMGUI
|
||||
/// </summary>
|
||||
[AddComponentMenu("AVPro Video/Display IMGUI", 200)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
[ExecuteInEditMode]
|
||||
public class DisplayIMGUI : MonoBehaviour
|
||||
{
|
||||
[SerializeField]
|
||||
MediaPlayer _mediaPlayer = null;
|
||||
public MediaPlayer Player
|
||||
{
|
||||
get
|
||||
{
|
||||
return _mediaPlayer;
|
||||
}
|
||||
set
|
||||
{
|
||||
_mediaPlayer = value;
|
||||
Update();
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
ScaleMode _scaleMode = ScaleMode.ScaleToFit;
|
||||
public ScaleMode ScaleMode
|
||||
{
|
||||
get
|
||||
{
|
||||
return _scaleMode;
|
||||
}
|
||||
set
|
||||
{
|
||||
_scaleMode = value;
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
Color _color = UnityEngine.Color.white;
|
||||
public Color Color
|
||||
{
|
||||
get
|
||||
{
|
||||
return _color;
|
||||
}
|
||||
set
|
||||
{
|
||||
_color = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_alphaBlend")]
|
||||
[SerializeField] bool _allowTransparency = false;
|
||||
public bool AllowTransparency
|
||||
{
|
||||
get
|
||||
{
|
||||
return _allowTransparency;
|
||||
}
|
||||
set
|
||||
{
|
||||
_allowTransparency = value;
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
bool _useDepth = false;
|
||||
public bool UseDepth
|
||||
{
|
||||
get
|
||||
{
|
||||
return _useDepth;
|
||||
}
|
||||
set
|
||||
{
|
||||
_useDepth = value;
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
int _depth = 0;
|
||||
public int Depth
|
||||
{
|
||||
get
|
||||
{
|
||||
return _depth;
|
||||
}
|
||||
set
|
||||
{
|
||||
_depth = value;
|
||||
}
|
||||
}
|
||||
|
||||
[Header("Area")]
|
||||
|
||||
[FormerlySerializedAs("_fullScreen")]
|
||||
[SerializeField]
|
||||
bool _isAreaFullScreen = true;
|
||||
public bool IsAreaFullScreen
|
||||
{
|
||||
get
|
||||
{
|
||||
return _isAreaFullScreen;
|
||||
}
|
||||
set
|
||||
{
|
||||
_isAreaFullScreen = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_x")]
|
||||
[Range(0f, 1f)]
|
||||
[SerializeField]
|
||||
float _areaX = 0f;
|
||||
public float AreaX
|
||||
{
|
||||
get
|
||||
{
|
||||
return _areaX;
|
||||
}
|
||||
set
|
||||
{
|
||||
_areaX = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_y")]
|
||||
[Range(0f, 1f)]
|
||||
[SerializeField]
|
||||
float _areaY = 0f;
|
||||
public float AreaY
|
||||
{
|
||||
get
|
||||
{
|
||||
return _areaY;
|
||||
}
|
||||
set
|
||||
{
|
||||
_areaY = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_width")]
|
||||
[Range(0f, 1f)]
|
||||
[SerializeField] float _areaWidth = 1f;
|
||||
public float AreaWidth
|
||||
{
|
||||
get
|
||||
{
|
||||
return _areaWidth;
|
||||
}
|
||||
set
|
||||
{
|
||||
_areaWidth = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_height")]
|
||||
[Range(0f, 1f)]
|
||||
[SerializeField] float _areaHeight = 1f;
|
||||
public float AreaHeight
|
||||
{
|
||||
get
|
||||
{
|
||||
return _areaHeight;
|
||||
}
|
||||
set
|
||||
{
|
||||
_areaHeight = value;
|
||||
}
|
||||
}
|
||||
|
||||
[FormerlySerializedAs("_displayInEditor")]
|
||||
[SerializeField] bool _showAreaInEditor = false;
|
||||
public bool ShowAreaInEditor
|
||||
{
|
||||
get
|
||||
{
|
||||
return _showAreaInEditor;
|
||||
}
|
||||
set
|
||||
{
|
||||
_showAreaInEditor = value;
|
||||
}
|
||||
}
|
||||
|
||||
private static Shader _shaderAlphaPacking;
|
||||
private Material _material;
|
||||
|
||||
void Start()
|
||||
{
|
||||
// Disabling useGUILayout lets you skip the GUI layout phase which helps performance, but this also breaks the GUI.depth usage.
|
||||
if (!_useDepth)
|
||||
{
|
||||
this.useGUILayout = false;
|
||||
}
|
||||
|
||||
if (!_shaderAlphaPacking)
|
||||
{
|
||||
_shaderAlphaPacking = Shader.Find("AVProVideo/Internal/IMGUI/Texture Transparent");
|
||||
if (!_shaderAlphaPacking)
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Missing shader 'AVProVideo/Internal/IMGUI/Texture Transparent'");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void Update()
|
||||
{
|
||||
if (_mediaPlayer != null)
|
||||
{
|
||||
SetupMaterial();
|
||||
}
|
||||
}
|
||||
|
||||
void OnDestroy()
|
||||
{
|
||||
// Destroy existing material
|
||||
if (_material != null)
|
||||
{
|
||||
#if UNITY_EDITOR
|
||||
Material.DestroyImmediate(_material);
|
||||
#else
|
||||
Material.Destroy(_material);
|
||||
#endif
|
||||
_material = null;
|
||||
}
|
||||
}
|
||||
|
||||
private Shader GetRequiredShader()
|
||||
{
|
||||
// [MOZ] Always default to the alpha packed shader for now to force using the material rendering path
|
||||
// in OnGUI. This fixed issues with incorrect colourisation and orientation/cropping for certain videos.
|
||||
Shader result = _shaderAlphaPacking;
|
||||
#if false
|
||||
if (result == null && _mediaPlayer.TextureProducer != null)
|
||||
{
|
||||
switch (_mediaPlayer.TextureProducer.GetTextureAlphaPacking())
|
||||
{
|
||||
case AlphaPacking.None:
|
||||
break;
|
||||
case AlphaPacking.LeftRight:
|
||||
case AlphaPacking.TopBottom:
|
||||
result = _shaderAlphaPacking;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
#if UNITY_PLATFORM_SUPPORTS_LINEAR
|
||||
if (result == null && _mediaPlayer.Info != null)
|
||||
{
|
||||
// If the player does support generating sRGB textures then we need to use a shader to convert them for display via IMGUI
|
||||
if (QualitySettings.activeColorSpace == ColorSpace.Linear && !_mediaPlayer.Info.PlayerSupportsLinearColorSpace())
|
||||
{
|
||||
result = _shaderAlphaPacking;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
if (result == null && _mediaPlayer.TextureProducer != null)
|
||||
{
|
||||
if (_mediaPlayer.TextureProducer.GetTextureCount() == 2)
|
||||
{
|
||||
result = _shaderAlphaPacking;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
private void SetupMaterial()
|
||||
{
|
||||
// Get required shader
|
||||
Shader currentShader = null;
|
||||
if (_material != null)
|
||||
{
|
||||
currentShader = _material.shader;
|
||||
}
|
||||
Shader nextShader = GetRequiredShader();
|
||||
|
||||
// If the shader requirement has changed
|
||||
if (currentShader != nextShader)
|
||||
{
|
||||
// Destroy existing material
|
||||
if (_material != null)
|
||||
{
|
||||
#if UNITY_EDITOR
|
||||
Material.DestroyImmediate(_material);
|
||||
#else
|
||||
Material.Destroy(_material);
|
||||
#endif
|
||||
_material = null;
|
||||
}
|
||||
|
||||
// Create new material
|
||||
if (nextShader != null)
|
||||
{
|
||||
_material = new Material(nextShader);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if UNITY_EDITOR
|
||||
private void DrawArea()
|
||||
{
|
||||
Rect rect = GetAreaRect();
|
||||
Rect uv = rect;
|
||||
uv.x /= Screen.width;
|
||||
uv.width /= Screen.width;
|
||||
uv.y /= Screen.height;
|
||||
uv.height /= Screen.height;
|
||||
uv.width *= 16f;
|
||||
uv.height *= 16f;
|
||||
uv.x += 0.5f;
|
||||
uv.y += 0.5f;
|
||||
Texture2D icon = Resources.Load<Texture2D>("AVProVideoIcon");
|
||||
GUI.depth = _depth;
|
||||
GUI.color = _color;
|
||||
GUI.DrawTextureWithTexCoords(rect, icon, uv);
|
||||
}
|
||||
#endif
|
||||
|
||||
void OnGUI()
|
||||
{
|
||||
#if UNITY_EDITOR
|
||||
if (_showAreaInEditor && !Application.isPlaying)
|
||||
{
|
||||
DrawArea();
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (_mediaPlayer == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
Texture texture = null;
|
||||
if (_showAreaInEditor)
|
||||
{
|
||||
#if UNITY_EDITOR
|
||||
texture = Texture2D.whiteTexture;
|
||||
#endif
|
||||
}
|
||||
texture = VideoRender.GetTexture(_mediaPlayer, 0);
|
||||
if (_mediaPlayer.Info != null && !_mediaPlayer.Info.HasVideo())
|
||||
{
|
||||
texture = null;
|
||||
}
|
||||
|
||||
if (texture != null)
|
||||
{
|
||||
bool isTextureVisible = (_color.a > 0f || !_allowTransparency);
|
||||
if (isTextureVisible)
|
||||
{
|
||||
GUI.depth = _depth;
|
||||
GUI.color = _color;
|
||||
|
||||
Rect rect = GetAreaRect();
|
||||
|
||||
// TODO: change this to a material-only path so we only have a single drawing path
|
||||
if (_material != null)
|
||||
{
|
||||
// TODO: Only setup material when needed
|
||||
VideoRender.SetupMaterialForMedia(_material, _mediaPlayer);
|
||||
|
||||
// NOTE: It seems that Graphics.DrawTexture() behaves differently than GUI.DrawTexture() when it comes to sRGB writing
|
||||
// on newer versions of Unity (at least 2018.2.19 and above), so now we have to force the conversion to sRGB on writing
|
||||
bool restoreSRGBWrite = false;
|
||||
#if UNITY_EDITOR_WIN || (!UNITY_EDITOR && UNITY_STANDALONE_WIN)
|
||||
if (QualitySettings.activeColorSpace == ColorSpace.Linear && !GL.sRGBWrite)
|
||||
{
|
||||
restoreSRGBWrite = true;
|
||||
}
|
||||
#endif
|
||||
if (restoreSRGBWrite)
|
||||
{
|
||||
GL.sRGBWrite = true;
|
||||
}
|
||||
|
||||
VideoRender.DrawTexture(rect, texture, _scaleMode, _mediaPlayer.TextureProducer.GetTextureAlphaPacking(), _mediaPlayer.TextureProducer.GetTexturePixelAspectRatio(), _material);
|
||||
|
||||
if (restoreSRGBWrite)
|
||||
{
|
||||
GL.sRGBWrite = false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
bool requiresVerticalFlip = false;
|
||||
if (_mediaPlayer.TextureProducer != null)
|
||||
{
|
||||
requiresVerticalFlip = _mediaPlayer.TextureProducer.RequiresVerticalFlip();
|
||||
}
|
||||
if (requiresVerticalFlip)
|
||||
{
|
||||
GUIUtility.ScaleAroundPivot(new Vector2(1f, -1f), new Vector2(0f, rect.y + (rect.height / 2f)));
|
||||
}
|
||||
#if UNITY_PLATFORM_SUPPORTS_VIDEOASPECTRATIO
|
||||
float par = _mediaPlayer.TextureProducer.GetTexturePixelAspectRatio();
|
||||
if (par > 0f)
|
||||
{
|
||||
if (par > 1f)
|
||||
{
|
||||
GUIUtility.ScaleAroundPivot(new Vector2(par, 1f), new Vector2(rect.x + (rect.width / 2f), rect.y + (rect.height / 2f)));
|
||||
}
|
||||
else
|
||||
{
|
||||
GUIUtility.ScaleAroundPivot(new Vector2(1f, 1f/par), new Vector2(rect.x + (rect.width / 2f), rect.y + (rect.height / 2f)));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
GUI.DrawTexture(rect, texture, _scaleMode, _allowTransparency);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Rect GetAreaRect()
|
||||
{
|
||||
Rect rect;
|
||||
if (_isAreaFullScreen)
|
||||
{
|
||||
rect = new Rect(0.0f, 0.0f, Screen.width, Screen.height);
|
||||
}
|
||||
else
|
||||
{
|
||||
rect = new Rect(_areaX * (Screen.width - 1), _areaY * (Screen.height - 1), _areaWidth * Screen.width, _areaHeight * Screen.height);
|
||||
}
|
||||
|
||||
return rect;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 75f3b319d2d69934d8bf545ab45c918d
|
||||
timeCreated: 1544813301
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
1571
Assets/AVProVideo/Runtime/Scripts/Components/MediaPlayer.cs
Normal file
1571
Assets/AVProVideo/Runtime/Scripts/Components/MediaPlayer.cs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 638c870cac4da414fba921606d504407
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,65 @@
|
||||
#if !(UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_TVOS)
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
#region Application Focus and Pausing
|
||||
#if !UNITY_EDITOR
|
||||
void OnApplicationFocus(bool focusStatus)
|
||||
{
|
||||
#if !(UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN)
|
||||
// Debug.Log("OnApplicationFocus: focusStatus: " + focusStatus);
|
||||
|
||||
if (focusStatus && (isActiveAndEnabled && enabled))
|
||||
{
|
||||
if (Control != null && _wasPlayingOnPause)
|
||||
{
|
||||
_wasPlayingOnPause = false;
|
||||
Control.Play();
|
||||
|
||||
Helper.LogInfo("OnApplicationFocus: playing video again");
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void OnApplicationPause(bool pauseStatus)
|
||||
{
|
||||
#if !(UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN)
|
||||
// Debug.Log("OnApplicationPause: pauseStatus: " + pauseStatus);
|
||||
|
||||
if (pauseStatus)
|
||||
{
|
||||
if (_pauseMediaOnAppPause)
|
||||
{
|
||||
if (Control!= null && Control.IsPlaying())
|
||||
{
|
||||
_wasPlayingOnPause = true;
|
||||
#if !UNITY_IPHONE
|
||||
Control.Pause();
|
||||
#endif
|
||||
Helper.LogInfo("OnApplicationPause: pausing video");
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (_playMediaOnAppUnpause)
|
||||
{
|
||||
// Catch coming back from power off state when no lock screen
|
||||
OnApplicationFocus(true);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#endregion // Application Focus and Pausing
|
||||
}
|
||||
}
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 3a3464021ab2fb14a81d5d35b3097023
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,29 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
#region Audio Mute Support for Unity Editor
|
||||
#if UNITY_EDITOR
|
||||
private bool _unityAudioMasterMute = false;
|
||||
private void CheckEditorAudioMute()
|
||||
{
|
||||
// Detect a change
|
||||
if (UnityEditor.EditorUtility.audioMasterMute != _unityAudioMasterMute)
|
||||
{
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
_unityAudioMasterMute = UnityEditor.EditorUtility.audioMasterMute;
|
||||
_controlInterface.MuteAudio(_audioMuted || _unityAudioMasterMute);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#endregion // Audio Mute Support for Unity Editor
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 16be519f584387149bd75947276c3a72
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,83 @@
|
||||
using UnityEngine;
|
||||
|
||||
#if UNITY_EDITOR
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
#region Play/Pause Support for Unity Editor
|
||||
// This code handles the pause/play buttons in the editor
|
||||
private static void SetupEditorPlayPauseSupport()
|
||||
{
|
||||
#if UNITY_2017_2_OR_NEWER
|
||||
UnityEditor.EditorApplication.pauseStateChanged -= OnUnityPauseModeChanged;
|
||||
UnityEditor.EditorApplication.pauseStateChanged += OnUnityPauseModeChanged;
|
||||
#else
|
||||
UnityEditor.EditorApplication.playmodeStateChanged -= OnUnityPlayModeChanged;
|
||||
UnityEditor.EditorApplication.playmodeStateChanged += OnUnityPlayModeChanged;
|
||||
#endif
|
||||
}
|
||||
|
||||
#if UNITY_2017_2_OR_NEWER
|
||||
private static void OnUnityPauseModeChanged(UnityEditor.PauseState state)
|
||||
{
|
||||
OnUnityPlayModeChanged();
|
||||
}
|
||||
#endif
|
||||
|
||||
private static void OnUnityPlayModeChanged()
|
||||
{
|
||||
if (UnityEditor.EditorApplication.isPlaying)
|
||||
{
|
||||
bool isPaused = UnityEditor.EditorApplication.isPaused;
|
||||
MediaPlayer[] players = Resources.FindObjectsOfTypeAll<MediaPlayer>();
|
||||
foreach (MediaPlayer player in players)
|
||||
{
|
||||
if (isPaused)
|
||||
{
|
||||
player.EditorPause();
|
||||
}
|
||||
else
|
||||
{
|
||||
player.EditorUnpause();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void EditorPause()
|
||||
{
|
||||
if (this.isActiveAndEnabled)
|
||||
{
|
||||
if (_controlInterface != null && _controlInterface.IsPlaying())
|
||||
{
|
||||
_wasPlayingOnPause = true;
|
||||
_controlInterface.Pause();
|
||||
}
|
||||
StopRenderCoroutine();
|
||||
}
|
||||
}
|
||||
|
||||
private void EditorUnpause()
|
||||
{
|
||||
if (this.isActiveAndEnabled)
|
||||
{
|
||||
if (_controlInterface != null && _wasPlayingOnPause)
|
||||
{
|
||||
_autoPlayOnStart = true;
|
||||
_wasPlayingOnPause = false;
|
||||
_autoPlayOnStartTriggered = false;
|
||||
}
|
||||
StartRenderCoroutine();
|
||||
}
|
||||
}
|
||||
#endregion // Play/Pause Support for Unity Editor
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 083c5ace9dbfda84cb8b4afaa19bdcde
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,323 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
// Event state
|
||||
private bool _eventFired_MetaDataReady = false;
|
||||
private bool _eventFired_ReadyToPlay = false;
|
||||
private bool _eventFired_Started = false;
|
||||
private bool _eventFired_FirstFrameReady = false;
|
||||
private bool _eventFired_FinishedPlaying = false;
|
||||
private bool _eventState_PlaybackBuffering = false;
|
||||
private bool _eventState_PlaybackSeeking = false;
|
||||
private bool _eventState_PlaybackStalled = false;
|
||||
private int _eventState_PreviousWidth = 0;
|
||||
private int _eventState_PreviousHeight = 0;
|
||||
private int _previousSubtitleIndex = -1;
|
||||
private bool _finishedFrameOpenCheck = false;
|
||||
private bool _eventState_Paused = false;
|
||||
|
||||
#if UNITY_EDITOR
|
||||
public static MediaPlayerLoadEvent InternalMediaLoadedEvent = new MediaPlayerLoadEvent();
|
||||
#endif
|
||||
|
||||
private void ResetEvents()
|
||||
{
|
||||
_eventFired_MetaDataReady = false;
|
||||
_eventFired_ReadyToPlay = false;
|
||||
_eventFired_Started = false;
|
||||
_eventFired_FirstFrameReady = false;
|
||||
_eventFired_FinishedPlaying = false;
|
||||
_eventState_PlaybackBuffering = false;
|
||||
_eventState_PlaybackSeeking = false;
|
||||
_eventState_PlaybackStalled = false;
|
||||
_eventState_PreviousWidth = 0;
|
||||
_eventState_PreviousHeight = 0;
|
||||
_previousSubtitleIndex = -1;
|
||||
_finishedFrameOpenCheck = false;
|
||||
}
|
||||
|
||||
private void CheckAndClearStartedAndFinishedEvents()
|
||||
{
|
||||
//NOTE: Fixes a bug where the event was being fired immediately, so when a file is opened, the finishedPlaying fired flag gets set but
|
||||
//is then set to true immediately afterwards due to the returned value
|
||||
_finishedFrameOpenCheck = false;
|
||||
if (IsHandleEvent(MediaPlayerEvent.EventType.FinishedPlaying))
|
||||
{
|
||||
if (FireEventIfPossible(MediaPlayerEvent.EventType.FinishedPlaying, _eventFired_FinishedPlaying))
|
||||
{
|
||||
_eventFired_FinishedPlaying = !_finishedFrameOpenCheck;
|
||||
}
|
||||
}
|
||||
|
||||
if (_eventFired_FinishedPlaying &&
|
||||
IsHandleEvent(MediaPlayerEvent.EventType.FinishedPlaying) &&
|
||||
_controlInterface.IsPlaying() &&
|
||||
!_controlInterface.IsFinished())
|
||||
{
|
||||
bool reset = true;
|
||||
// RJT NOTE: Commented out for now as seems over-aggressive and can lead to freeze conditions as seen in: https://github.com/RenderHeads/UnityPlugin-AVProVideo/issues/1692
|
||||
// - If we need to reinstate then we'd likely need considerably more tolerance, especially on slower machines
|
||||
#if false//UNITY_EDITOR_WIN || (!UNITY_EDITOR && (UNITY_STANDALONE_WIN || UNITY_WSA))
|
||||
reset = false;
|
||||
if (_infoInterface.HasVideo())
|
||||
{
|
||||
// Some streaming HLS/Dash content don't provide a frame rate
|
||||
if (_infoInterface.GetVideoFrameRate() > 0f)
|
||||
{
|
||||
// Don't reset if within a frame of the end of the video, important for time > duration workaround
|
||||
float secondsPerFrame = 1f / _infoInterface.GetVideoFrameRate();
|
||||
if (_infoInterface.GetDuration() - _controlInterface.GetCurrentTime() > secondsPerFrame)
|
||||
{
|
||||
reset = true;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Just check if we're not beyond the duration
|
||||
if (_controlInterface.GetCurrentTime() < _infoInterface.GetDuration())
|
||||
{
|
||||
reset = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// For audio only media just check if we're not beyond the duration
|
||||
if (_controlInterface.GetCurrentTime() < _infoInterface.GetDuration())
|
||||
{
|
||||
reset = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
if (reset)
|
||||
{
|
||||
//Debug.Log("Reset");
|
||||
_eventFired_FinishedPlaying = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void HandleOneShotEvents()
|
||||
{
|
||||
_eventFired_MetaDataReady = FireEventIfPossible(MediaPlayerEvent.EventType.MetaDataReady, _eventFired_MetaDataReady);
|
||||
_eventFired_ReadyToPlay = FireEventIfPossible(MediaPlayerEvent.EventType.ReadyToPlay, _eventFired_ReadyToPlay);
|
||||
_eventFired_Started = FireEventIfPossible(MediaPlayerEvent.EventType.Started, _eventFired_Started);
|
||||
_eventFired_FirstFrameReady = FireEventIfPossible(MediaPlayerEvent.EventType.FirstFrameReady, _eventFired_FirstFrameReady);
|
||||
}
|
||||
|
||||
private void HandleRecurringEvents()
|
||||
{
|
||||
// Subtitle changing
|
||||
if (FireEventIfPossible(MediaPlayerEvent.EventType.SubtitleChange, false))
|
||||
{
|
||||
_previousSubtitleIndex = _subtitlesInterface.GetSubtitleIndex();
|
||||
}
|
||||
|
||||
// Resolution changing
|
||||
if (FireEventIfPossible(MediaPlayerEvent.EventType.ResolutionChanged, false))
|
||||
{
|
||||
_eventState_PreviousWidth = _infoInterface.GetVideoWidth();
|
||||
_eventState_PreviousHeight = _infoInterface.GetVideoHeight();
|
||||
}
|
||||
|
||||
// Timed Metadata
|
||||
if (FireEventIfPossible(MediaPlayerEvent.EventType.TimedMetadataChanged, false))
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Stalling
|
||||
if (IsHandleEvent(MediaPlayerEvent.EventType.Stalled))
|
||||
{
|
||||
bool newState = _infoInterface.IsPlaybackStalled();
|
||||
if (newState != _eventState_PlaybackStalled)
|
||||
{
|
||||
_eventState_PlaybackStalled = newState;
|
||||
|
||||
var newEvent = _eventState_PlaybackStalled ? MediaPlayerEvent.EventType.Stalled : MediaPlayerEvent.EventType.Unstalled;
|
||||
FireEventIfPossible(newEvent, false);
|
||||
}
|
||||
}
|
||||
|
||||
// Seeking
|
||||
if (IsHandleEvent(MediaPlayerEvent.EventType.StartedSeeking))
|
||||
{
|
||||
bool newState = _controlInterface.IsSeeking();
|
||||
if (newState != _eventState_PlaybackSeeking)
|
||||
{
|
||||
_eventState_PlaybackSeeking = newState;
|
||||
|
||||
var newEvent = _eventState_PlaybackSeeking ? MediaPlayerEvent.EventType.StartedSeeking : MediaPlayerEvent.EventType.FinishedSeeking;
|
||||
FireEventIfPossible(newEvent, false);
|
||||
}
|
||||
}
|
||||
|
||||
// Buffering
|
||||
if (IsHandleEvent(MediaPlayerEvent.EventType.StartedBuffering))
|
||||
{
|
||||
bool newState = _controlInterface.IsBuffering();
|
||||
if (newState != _eventState_PlaybackBuffering)
|
||||
{
|
||||
_eventState_PlaybackBuffering = newState;
|
||||
|
||||
var newEvent = _eventState_PlaybackBuffering ? MediaPlayerEvent.EventType.StartedBuffering : MediaPlayerEvent.EventType.FinishedBuffering;
|
||||
FireEventIfPossible(newEvent, false);
|
||||
}
|
||||
}
|
||||
|
||||
// Pausing
|
||||
if (IsHandleEvent(MediaPlayerEvent.EventType.Paused))
|
||||
{
|
||||
bool newState = _controlInterface.IsPaused();
|
||||
if (newState != _eventState_Paused)
|
||||
{
|
||||
_eventState_Paused = newState;
|
||||
var newEvent = _eventState_Paused ? MediaPlayerEvent.EventType.Paused : MediaPlayerEvent.EventType.Unpaused;
|
||||
FireEventIfPossible(newEvent, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void UpdateEvents()
|
||||
{
|
||||
if (_controlInterface == null)
|
||||
return;
|
||||
if (_events == null || !_events.HasListeners())
|
||||
return;
|
||||
|
||||
// Reset some event states that can reset during playback
|
||||
CheckAndClearStartedAndFinishedEvents();
|
||||
|
||||
// Events that can only fire once
|
||||
HandleOneShotEvents();
|
||||
|
||||
// Events that can fire multiple times
|
||||
HandleRecurringEvents();
|
||||
}
|
||||
|
||||
protected bool IsHandleEvent(MediaPlayerEvent.EventType eventType)
|
||||
{
|
||||
return ((uint)_eventMask & (1 << (int)eventType)) != 0;
|
||||
}
|
||||
|
||||
private bool FireEventIfPossible(MediaPlayerEvent.EventType eventType, bool hasFired)
|
||||
{
|
||||
if (CanFireEvent(eventType, hasFired))
|
||||
{
|
||||
#if UNITY_EDITOR
|
||||
// Special internal global event, called when media is loaded
|
||||
// Currently used by the RecentItem class
|
||||
if (eventType == MediaPlayerEvent.EventType.Started)
|
||||
{
|
||||
string fullPath = GetResolvedFilePath(_mediaPath.Path, _mediaPath.PathType);
|
||||
InternalMediaLoadedEvent.Invoke(fullPath);
|
||||
}
|
||||
#endif
|
||||
|
||||
hasFired = true;
|
||||
_events.Invoke(this, eventType, ErrorCode.None);
|
||||
}
|
||||
return hasFired;
|
||||
}
|
||||
|
||||
private bool CanFireEvent(MediaPlayerEvent.EventType et, bool hasFired)
|
||||
{
|
||||
if (_controlInterface == null)
|
||||
return false;
|
||||
if (_events == null)
|
||||
return false;
|
||||
if (hasFired)
|
||||
return false;
|
||||
if (!IsHandleEvent(et))
|
||||
return false;
|
||||
|
||||
bool result = false;
|
||||
switch (et)
|
||||
{
|
||||
case MediaPlayerEvent.EventType.FinishedPlaying:
|
||||
result = (!_controlInterface.IsLooping() && _controlInterface.CanPlay() && _controlInterface.IsFinished());
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.MetaDataReady:
|
||||
result = (_controlInterface.HasMetaData());
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.FirstFrameReady:
|
||||
// [MOZ 20/1/21] Removed HasMetaData check as preventing the event from being triggered on (i|mac|tv)OS
|
||||
result = (_textureInterface != null && _controlInterface.CanPlay() /*&& _controlInterface.HasMetaData()*/ && _textureInterface.GetTextureFrameCount() > 0);
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.ReadyToPlay:
|
||||
result = (!_controlInterface.IsPlaying() && _controlInterface.CanPlay() && !_autoPlayOnStart);
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.Started:
|
||||
result = (_controlInterface.IsPlaying());
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.SubtitleChange:
|
||||
{
|
||||
result = _previousSubtitleIndex != _subtitlesInterface.GetSubtitleIndex();
|
||||
if (!result)
|
||||
{
|
||||
result = _baseMediaPlayer.InternalIsChangedTextCue();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case MediaPlayerEvent.EventType.Stalled:
|
||||
result = _infoInterface.IsPlaybackStalled();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.Unstalled:
|
||||
result = !_infoInterface.IsPlaybackStalled();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.StartedSeeking:
|
||||
result = _controlInterface.IsSeeking();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.FinishedSeeking:
|
||||
result = !_controlInterface.IsSeeking();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.StartedBuffering:
|
||||
result = _controlInterface.IsBuffering();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.FinishedBuffering:
|
||||
result = !_controlInterface.IsBuffering();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.ResolutionChanged:
|
||||
result = (_infoInterface != null && (_eventState_PreviousWidth != _infoInterface.GetVideoWidth() || _eventState_PreviousHeight != _infoInterface.GetVideoHeight()));
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.Paused:
|
||||
result = _controlInterface.IsPaused();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.Unpaused:
|
||||
result = !_controlInterface.IsPaused();
|
||||
break;
|
||||
|
||||
case MediaPlayerEvent.EventType.TimedMetadataChanged:
|
||||
result = _baseMediaPlayer.HasNewTimedMetadataItem();
|
||||
break;
|
||||
|
||||
default:
|
||||
Debug.LogWarning("[AVProVideo] Unhandled event type");
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace RenderHeads.Media.AVProVideo
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 6be886b3f1f953843bda70e505701ee3
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,211 @@
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
#region Extract Frame
|
||||
|
||||
private bool ForceWaitForNewFrame(int lastFrameCount, float timeoutMs)
|
||||
{
|
||||
bool result = false;
|
||||
// Wait for the frame to change, or timeout to happen (for the case that there is no new frame for this time)
|
||||
System.DateTime startTime = System.DateTime.Now;
|
||||
int iterationCount = 0;
|
||||
while (Control != null && (System.DateTime.Now - startTime).TotalMilliseconds < (double)timeoutMs)
|
||||
{
|
||||
_playerInterface.Update();
|
||||
|
||||
// TODO: check if Seeking has completed! Then we don't have to wait
|
||||
|
||||
// If frame has changed we can continue
|
||||
// NOTE: this will never happen because GL.IssuePlugin.Event is never called in this loop
|
||||
if (lastFrameCount != TextureProducer.GetTextureFrameCount())
|
||||
{
|
||||
result = true;
|
||||
break;
|
||||
}
|
||||
|
||||
iterationCount++;
|
||||
|
||||
// NOTE: we tried to add Sleep for 1ms but it was very slow, so switched to this time based method which burns more CPU but about double the speed
|
||||
// NOTE: had to add the Sleep back in as after too many iterations (over 1000000) of GL.IssuePluginEvent Unity seems to lock up
|
||||
// NOTE: seems that GL.IssuePluginEvent can't be called if we're stuck in a while loop and they just stack up
|
||||
//System.Threading.Thread.Sleep(0);
|
||||
}
|
||||
|
||||
_playerInterface.Render();
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create or return (if cached) a camera that is inactive and renders nothing
|
||||
/// This camera is used to call .Render() on which causes the render thread to run
|
||||
/// This is useful for forcing GL.IssuePluginEvent() to run and is used for
|
||||
/// wait for frames to render for ExtractFrame() and UpdateTimeScale()
|
||||
/// </summary>
|
||||
private static Camera GetDummyCamera()
|
||||
{
|
||||
if (_dummyCamera == null)
|
||||
{
|
||||
const string goName = "AVPro Video Dummy Camera";
|
||||
GameObject go = GameObject.Find(goName);
|
||||
if (go == null)
|
||||
{
|
||||
go = new GameObject(goName);
|
||||
go.hideFlags = HideFlags.HideInHierarchy | HideFlags.DontSave;
|
||||
go.SetActive(false);
|
||||
Object.DontDestroyOnLoad(go);
|
||||
|
||||
_dummyCamera = go.AddComponent<Camera>();
|
||||
_dummyCamera.hideFlags = HideFlags.HideInInspector | HideFlags.DontSave;
|
||||
_dummyCamera.cullingMask = 0;
|
||||
_dummyCamera.clearFlags = CameraClearFlags.Nothing;
|
||||
_dummyCamera.enabled = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
_dummyCamera = go.GetComponent<Camera>();
|
||||
}
|
||||
}
|
||||
//Debug.Assert(_dummyCamera != null);
|
||||
return _dummyCamera;
|
||||
}
|
||||
|
||||
private IEnumerator ExtractFrameCoroutine(Texture2D target, ProcessExtractedFrame callback, double timeSeconds = -1.0, bool accurateSeek = true, int timeoutMs = 1000, int timeThresholdMs = 100)
|
||||
{
|
||||
#if (!UNITY_EDITOR && UNITY_ANDROID) || UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN || UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX || UNITY_IOS || UNITY_TVOS
|
||||
Texture2D result = target;
|
||||
|
||||
Texture frame = null;
|
||||
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
if (timeSeconds >= 0f)
|
||||
{
|
||||
Pause();
|
||||
|
||||
// If the right frame is already available (or close enough) just grab it
|
||||
if (TextureProducer.GetTexture() != null && (System.Math.Abs(_controlInterface.GetCurrentTime() - timeSeconds) < (timeThresholdMs / 1000.0)))
|
||||
{
|
||||
frame = TextureProducer.GetTexture();
|
||||
}
|
||||
else
|
||||
{
|
||||
int preSeekFrameCount = _textureInterface.GetTextureFrameCount();
|
||||
|
||||
// Seek to the frame
|
||||
if (accurateSeek)
|
||||
{
|
||||
_controlInterface.Seek(timeSeconds);
|
||||
}
|
||||
else
|
||||
{
|
||||
_controlInterface.SeekFast(timeSeconds);
|
||||
}
|
||||
|
||||
// Wait for the new frame to arrive
|
||||
if (!_controlInterface.WaitForNextFrame(GetDummyCamera(), preSeekFrameCount))
|
||||
{
|
||||
// If WaitForNextFrame fails (e.g. in android single threaded), we run the below code to asynchronously wait for the frame
|
||||
int currFc = TextureProducer.GetTextureFrameCount();
|
||||
int iterations = 0;
|
||||
int maxIterations = 50;
|
||||
|
||||
//+1 as often there will be an extra frame produced after pause (so we need to wait for the second frame instead)
|
||||
while((currFc + 1) >= TextureProducer.GetTextureFrameCount() && iterations++ < maxIterations)
|
||||
{
|
||||
yield return null;
|
||||
}
|
||||
}
|
||||
frame = TextureProducer.GetTexture();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
frame = TextureProducer.GetTexture();
|
||||
}
|
||||
}
|
||||
if (frame != null)
|
||||
{
|
||||
result = Helper.GetReadableTexture(frame, TextureProducer.RequiresVerticalFlip(), Helper.GetOrientation(Info.GetTextureTransform()), target);
|
||||
}
|
||||
#else
|
||||
Texture2D result = ExtractFrame(target, timeSeconds, accurateSeek, timeoutMs, timeThresholdMs);
|
||||
#endif
|
||||
callback(result);
|
||||
|
||||
yield return null;
|
||||
}
|
||||
|
||||
public void ExtractFrameAsync(Texture2D target, ProcessExtractedFrame callback, double timeSeconds = -1.0, bool accurateSeek = true, int timeoutMs = 1000, int timeThresholdMs = 100)
|
||||
{
|
||||
StartCoroutine(ExtractFrameCoroutine(target, callback, timeSeconds, accurateSeek, timeoutMs, timeThresholdMs));
|
||||
}
|
||||
|
||||
// "target" can be null or you can pass in an existing texture.
|
||||
public Texture2D ExtractFrame(Texture2D target, double timeSeconds = -1.0, bool accurateSeek = true, int timeoutMs = 1000, int timeThresholdMs = 100)
|
||||
{
|
||||
Texture2D result = target;
|
||||
|
||||
// Extract frames returns the internal frame of the video player
|
||||
Texture frame = ExtractFrame(timeSeconds, accurateSeek, timeoutMs, timeThresholdMs);
|
||||
if (frame != null)
|
||||
{
|
||||
result = Helper.GetReadableTexture(frame, TextureProducer.RequiresVerticalFlip(), Helper.GetOrientation(Info.GetTextureTransform()), target);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private Texture ExtractFrame(double timeSeconds = -1.0, bool accurateSeek = true, int timeoutMs = 1000, int timeThresholdMs = 100)
|
||||
{
|
||||
Texture result = null;
|
||||
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
if (timeSeconds >= 0f)
|
||||
{
|
||||
Pause();
|
||||
|
||||
// If the right frame is already available (or close enough) just grab it
|
||||
if (TextureProducer.GetTexture() != null && (System.Math.Abs(_controlInterface.GetCurrentTime() - timeSeconds) < (timeThresholdMs / 1000.0)))
|
||||
{
|
||||
result = TextureProducer.GetTexture();
|
||||
}
|
||||
else
|
||||
{
|
||||
// Store frame count before seek
|
||||
int frameCount = TextureProducer.GetTextureFrameCount();
|
||||
|
||||
// Seek to the frame
|
||||
if (accurateSeek)
|
||||
{
|
||||
_controlInterface.Seek(timeSeconds);
|
||||
}
|
||||
else
|
||||
{
|
||||
_controlInterface.SeekFast(timeSeconds);
|
||||
}
|
||||
|
||||
// Wait for frame to change
|
||||
ForceWaitForNewFrame(frameCount, timeoutMs);
|
||||
result = TextureProducer.GetTexture();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
result = TextureProducer.GetTexture();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
#endregion // Extract Frame
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 810d3ce69a3b01f409c733c7cfbd119c
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,129 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
public bool OpenMediaFromBuffer(byte[] buffer, bool autoPlay = true)
|
||||
{
|
||||
_mediaPath = new MediaPath("buffer", MediaPathType.AbsolutePathOrURL);
|
||||
_autoPlayOnStart = autoPlay;
|
||||
|
||||
if (_controlInterface == null)
|
||||
{
|
||||
Initialise();
|
||||
}
|
||||
|
||||
return OpenMediaFromBufferInternal(buffer);
|
||||
}
|
||||
|
||||
public bool StartOpenChunkedMediaFromBuffer(ulong length, bool autoPlay = true)
|
||||
{
|
||||
_mediaPath = new MediaPath("buffer", MediaPathType.AbsolutePathOrURL);
|
||||
_autoPlayOnStart = autoPlay;
|
||||
|
||||
if (_controlInterface == null)
|
||||
{
|
||||
Initialise();
|
||||
}
|
||||
|
||||
return StartOpenMediaFromBufferInternal(length);
|
||||
}
|
||||
|
||||
public bool AddChunkToVideoBuffer(byte[] chunk, ulong offset, ulong chunkSize)
|
||||
{
|
||||
return AddChunkToBufferInternal(chunk, offset, chunkSize);
|
||||
}
|
||||
|
||||
public bool EndOpenChunkedVideoFromBuffer()
|
||||
{
|
||||
return EndOpenMediaFromBufferInternal();
|
||||
}
|
||||
|
||||
private bool OpenMediaFromBufferInternal(byte[] buffer)
|
||||
{
|
||||
bool result = false;
|
||||
// Open the video file
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
CloseMedia();
|
||||
|
||||
_isMediaOpened = true;
|
||||
_autoPlayOnStartTriggered = !_autoPlayOnStart;
|
||||
|
||||
Helper.LogInfo("Opening buffer of length " + buffer.Length, this);
|
||||
|
||||
if (!_controlInterface.OpenMediaFromBuffer(buffer))
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to open buffer", this);
|
||||
if (GetCurrentPlatformOptions() != PlatformOptionsWindows || PlatformOptionsWindows.videoApi != Windows.VideoApi.DirectShow)
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Loading from buffer is currently only supported in Windows when using the DirectShow API");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
SetPlaybackOptions();
|
||||
result = true;
|
||||
StartRenderCoroutine();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private bool StartOpenMediaFromBufferInternal(ulong length)
|
||||
{
|
||||
bool result = false;
|
||||
// Open the video file
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
CloseMedia();
|
||||
|
||||
_isMediaOpened = true;
|
||||
_autoPlayOnStartTriggered = !_autoPlayOnStart;
|
||||
|
||||
Helper.LogInfo("Starting Opening buffer of length " + length, this);
|
||||
|
||||
if (!_controlInterface.StartOpenMediaFromBuffer(length))
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to start open video from buffer", this);
|
||||
if (GetCurrentPlatformOptions() != PlatformOptionsWindows || PlatformOptionsWindows.videoApi != Windows.VideoApi.DirectShow)
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Loading from buffer is currently only supported in Windows when using the DirectShow API");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
SetPlaybackOptions();
|
||||
result = true;
|
||||
StartRenderCoroutine();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private bool AddChunkToBufferInternal(byte[] chunk, ulong offset, ulong chunkSize)
|
||||
{
|
||||
if (Control != null)
|
||||
{
|
||||
return Control.AddChunkToMediaBuffer(chunk, offset, chunkSize);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private bool EndOpenMediaFromBufferInternal()
|
||||
{
|
||||
if (Control != null)
|
||||
{
|
||||
return Control.EndOpenMediaFromBuffer();
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: bd1bd18da7d2dc7468c9799e5b02caea
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,60 @@
|
||||
using UnityEngine;
|
||||
#if NETFX_CORE
|
||||
using Windows.Storage.Streams;
|
||||
#endif
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
|
||||
#if NETFX_CORE
|
||||
public bool OpenVideoFromStream(IRandomAccessStream ras, string path, bool autoPlay = true)
|
||||
{
|
||||
_videoLocation = FileLocation.AbsolutePathOrURL;
|
||||
_videoPath = path;
|
||||
_autoPlayOnStart = autoPlay;
|
||||
|
||||
if (_controlInterface == null)
|
||||
{
|
||||
Initialise();
|
||||
}
|
||||
|
||||
return OpenVideoFromStream(ras);
|
||||
}
|
||||
|
||||
private bool OpenVideoFromStream(IRandomAccessStream ras)
|
||||
{
|
||||
bool result = false;
|
||||
// Open the video file
|
||||
if (_controlInterface != null)
|
||||
{
|
||||
CloseVideo();
|
||||
|
||||
_isVideoOpened = true;
|
||||
_autoPlayOnStartTriggered = !_autoPlayOnStart;
|
||||
|
||||
// Potentially override the file location
|
||||
long fileOffset = GetPlatformFileOffset();
|
||||
|
||||
if (!Control.OpenVideoFromFile(ras, _videoPath, fileOffset, null, _manuallySetAudioSourceProperties ? _sourceAudioSampleRate : 0,
|
||||
_manuallySetAudioSourceProperties ? _sourceAudioChannels : 0))
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to open " + _videoPath, this);
|
||||
}
|
||||
else
|
||||
{
|
||||
SetPlaybackOptions();
|
||||
result = true;
|
||||
StartRenderCoroutine();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 4e6c8c5399247d0478ed7ecf17b7d87f
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 1d9536a1e758279489d9add3e1ba26ad
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,150 @@
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
public bool EnableSubtitles(MediaPath mediaPath)
|
||||
{
|
||||
bool result = false;
|
||||
if (_subtitlesInterface != null)
|
||||
{
|
||||
if (mediaPath != null && !string.IsNullOrEmpty(mediaPath.Path))
|
||||
{
|
||||
string fullPath = mediaPath.GetResolvedFullPath();
|
||||
|
||||
bool checkForFileExist = true;
|
||||
if (fullPath.Contains("://"))
|
||||
{
|
||||
checkForFileExist = false;
|
||||
}
|
||||
#if (!UNITY_EDITOR && UNITY_ANDROID)
|
||||
checkForFileExist = false;
|
||||
#endif
|
||||
|
||||
if (checkForFileExist && !System.IO.File.Exists(fullPath))
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Subtitle file not found: " + fullPath, this);
|
||||
}
|
||||
else
|
||||
{
|
||||
Helper.LogInfo("Opening subtitles " + fullPath, this);
|
||||
|
||||
_previousSubtitleIndex = -1;
|
||||
|
||||
try
|
||||
{
|
||||
if (fullPath.Contains("://"))
|
||||
{
|
||||
// Use coroutine and WWW class for loading
|
||||
if (_loadSubtitlesRoutine != null)
|
||||
{
|
||||
StopCoroutine(_loadSubtitlesRoutine);
|
||||
_loadSubtitlesRoutine = null;
|
||||
}
|
||||
_loadSubtitlesRoutine = StartCoroutine(LoadSubtitlesCoroutine(fullPath, mediaPath));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Load directly from file
|
||||
string subtitleData = System.IO.File.ReadAllText(fullPath);
|
||||
if (_subtitlesInterface.LoadSubtitlesSRT(subtitleData))
|
||||
{
|
||||
_subtitlePath = mediaPath;
|
||||
_sideloadSubtitles = false;
|
||||
result = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to load subtitles" + fullPath, this);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
catch (System.Exception e)
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to load subtitles " + fullPath, this);
|
||||
Debug.LogException(e, this);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] No subtitle file path specified", this);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_queueSubtitlePath = mediaPath;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private IEnumerator LoadSubtitlesCoroutine(string url, MediaPath mediaPath)
|
||||
{
|
||||
UnityEngine.Networking.UnityWebRequest www = UnityEngine.Networking.UnityWebRequest.Get(url);
|
||||
#if UNITY_2017_2_OR_NEWER
|
||||
yield return www.SendWebRequest();
|
||||
#else
|
||||
yield return www.Send();
|
||||
#endif
|
||||
|
||||
string subtitleData = string.Empty;
|
||||
|
||||
#if UNITY_2020_1_OR_NEWER
|
||||
if (www.result == UnityEngine.Networking.UnityWebRequest.Result.Success)
|
||||
#elif UNITY_2017_1_OR_NEWER
|
||||
if (!www.isNetworkError)
|
||||
#else
|
||||
if (!www.isError)
|
||||
#endif
|
||||
{
|
||||
subtitleData = ((UnityEngine.Networking.DownloadHandler)www.downloadHandler).text;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Error loading subtitles '" + www.error + "' from " + url);
|
||||
}
|
||||
|
||||
if (_subtitlesInterface.LoadSubtitlesSRT(subtitleData))
|
||||
{
|
||||
_subtitlePath = mediaPath;
|
||||
_sideloadSubtitles = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to load subtitles" + url, this);
|
||||
}
|
||||
|
||||
_loadSubtitlesRoutine = null;
|
||||
|
||||
www.Dispose();
|
||||
}
|
||||
|
||||
public void DisableSubtitles()
|
||||
{
|
||||
if (_loadSubtitlesRoutine != null)
|
||||
{
|
||||
StopCoroutine(_loadSubtitlesRoutine);
|
||||
_loadSubtitlesRoutine = null;
|
||||
}
|
||||
|
||||
if (_subtitlesInterface != null)
|
||||
{
|
||||
_previousSubtitleIndex = -1;
|
||||
_sideloadSubtitles = false;
|
||||
_subtitlesInterface.LoadSubtitlesSRT(string.Empty);
|
||||
}
|
||||
else
|
||||
{
|
||||
_queueSubtitlePath = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: f4ed2744d6ff80845bbbd59e8f6c732b
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,93 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour
|
||||
{
|
||||
|
||||
#region Support for Time Scale
|
||||
#if AVPROVIDEO_BETA_SUPPORT_TIMESCALE
|
||||
// Adjust this value to get faster performance but may drop frames.
|
||||
// Wait longer to ensure there is enough time for frames to process
|
||||
private const float TimeScaleTimeoutMs = 20f;
|
||||
private bool _timeScaleIsControlling;
|
||||
private double _timeScaleVideoTime;
|
||||
|
||||
private void UpdateTimeScale()
|
||||
{
|
||||
if (Time.timeScale != 1f || Time.captureFramerate != 0)
|
||||
{
|
||||
if (_controlInterface.IsPlaying())
|
||||
{
|
||||
_controlInterface.Pause();
|
||||
_timeScaleIsControlling = true;
|
||||
_timeScaleVideoTime = _controlInterface.GetCurrentTime();
|
||||
}
|
||||
|
||||
if (_timeScaleIsControlling)
|
||||
{
|
||||
// Progress time
|
||||
_timeScaleVideoTime += Time.deltaTime;
|
||||
|
||||
// Handle looping
|
||||
if (_controlInterface.IsLooping() && _timeScaleVideoTime >= Info.GetDuration())
|
||||
{
|
||||
// TODO: really we should seek to (_timeScaleVideoTime % Info.GetDuration())
|
||||
_timeScaleVideoTime = 0.0;
|
||||
}
|
||||
|
||||
int preSeekFrameCount = TextureProducer.GetTextureFrameCount();
|
||||
|
||||
// Seek to the new time
|
||||
{
|
||||
double preSeekTime = Control.GetCurrentTime();
|
||||
|
||||
// Seek
|
||||
_controlInterface.Seek(_timeScaleVideoTime);
|
||||
|
||||
// Early out, if after the seek the time hasn't changed, the seek was probably too small to go to the next frame.
|
||||
// TODO: This behaviour may be different on other platforms (not Windows) and needs more testing.
|
||||
if (Mathf.Approximately((float)preSeekTime, (float)_controlInterface.GetCurrentTime()))
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for the new frame to arrive
|
||||
if (!_controlInterface.WaitForNextFrame(GetDummyCamera(), preSeekFrameCount))
|
||||
{
|
||||
// If WaitForNextFrame fails (e.g. in android single threaded), we run the below code to asynchronously wait for the frame
|
||||
System.DateTime startTime = System.DateTime.Now;
|
||||
int lastFrameCount = TextureProducer.GetTextureFrameCount();
|
||||
|
||||
while (_controlInterface != null && (System.DateTime.Now - startTime).TotalMilliseconds < (double)TimeScaleTimeoutMs)
|
||||
{
|
||||
_playerInterface.Update();
|
||||
_playerInterface.Render();
|
||||
GetDummyCamera().Render();
|
||||
if (lastFrameCount != TextureProducer.GetTextureFrameCount())
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Restore playback when timeScale becomes 1
|
||||
if (_timeScaleIsControlling)
|
||||
{
|
||||
_controlInterface.Play();
|
||||
_timeScaleIsControlling = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#endregion // Support for Time Scale
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: cdb92d6bab7106944bcd3cd7a034df6e
|
||||
timeCreated: 1544813302
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,83 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public partial class MediaPlayer : MonoBehaviour, ISerializationCallbackReceiver
|
||||
{
|
||||
#region Upgrade from Version 1.x
|
||||
[SerializeField, HideInInspector]
|
||||
private string m_VideoPath;
|
||||
[SerializeField, HideInInspector]
|
||||
private FileLocation m_VideoLocation = FileLocation.RelativeToStreamingAssetsFolder;
|
||||
|
||||
private enum FileLocation
|
||||
{
|
||||
AbsolutePathOrURL,
|
||||
RelativeToProjectFolder,
|
||||
RelativeToStreamingAssetsFolder,
|
||||
RelativeToDataFolder,
|
||||
RelativeToPersistentDataFolder,
|
||||
}
|
||||
|
||||
/*
|
||||
[SerializeField, HideInInspector]
|
||||
private StereoPacking m_StereoPacking;
|
||||
[SerializeField, HideInInspector]
|
||||
private AlphaPacking m_AlphaPacking;
|
||||
*/
|
||||
|
||||
void ISerializationCallbackReceiver.OnBeforeSerialize()
|
||||
{
|
||||
/*
|
||||
m_StereoPacking = _fallbackMediaHints.stereoPacking;
|
||||
m_AlphaPacking = _fallbackMediaHints.alphaPacking;
|
||||
*/
|
||||
}
|
||||
|
||||
void ISerializationCallbackReceiver.OnAfterDeserialize()
|
||||
{
|
||||
if (!string.IsNullOrEmpty(m_VideoPath))
|
||||
{
|
||||
MediaPathType mediaPathType = MediaPathType.AbsolutePathOrURL;
|
||||
switch (m_VideoLocation)
|
||||
{
|
||||
default:
|
||||
case FileLocation.AbsolutePathOrURL:
|
||||
mediaPathType = MediaPathType.AbsolutePathOrURL;
|
||||
break;
|
||||
case FileLocation.RelativeToProjectFolder:
|
||||
mediaPathType = MediaPathType.RelativeToProjectFolder;
|
||||
break;
|
||||
case FileLocation.RelativeToStreamingAssetsFolder:
|
||||
mediaPathType = MediaPathType.RelativeToStreamingAssetsFolder;
|
||||
break;
|
||||
case FileLocation.RelativeToDataFolder:
|
||||
mediaPathType = MediaPathType.RelativeToDataFolder;
|
||||
break;
|
||||
case FileLocation.RelativeToPersistentDataFolder:
|
||||
mediaPathType = MediaPathType.RelativeToPersistentDataFolder;
|
||||
break;
|
||||
}
|
||||
_mediaPath = new MediaPath(m_VideoPath, mediaPathType);
|
||||
_mediaSource = MediaSource.Path;
|
||||
m_VideoPath = null;
|
||||
}
|
||||
|
||||
/*
|
||||
if (m_StereoPacking != _fallbackMediaHints.stereoPacking)
|
||||
{
|
||||
_fallbackMediaHints.stereoPacking = m_StereoPacking;
|
||||
}
|
||||
if (m_AlphaPacking != _fallbackMediaHints.alphaPacking)
|
||||
{
|
||||
_fallbackMediaHints.alphaPacking = m_AlphaPacking;
|
||||
}
|
||||
*/
|
||||
}
|
||||
#endregion // Upgrade from Version 1.x
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 2e1421b74b1861b42ba7287d322c2f19
|
||||
timeCreated: 1614963169
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
1015
Assets/AVProVideo/Runtime/Scripts/Components/PlaylistMediaPlayer.cs
Normal file
1015
Assets/AVProVideo/Runtime/Scripts/Components/PlaylistMediaPlayer.cs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,17 @@
|
||||
fileFormatVersion: 2
|
||||
guid: e9ea31f33222f4b418e4e051a8a5ed24
|
||||
timeCreated: 1588679963
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences:
|
||||
- m_AudioHeadTransform: {instanceID: 0}
|
||||
- m_AudioFocusTransform: {instanceID: 0}
|
||||
- _transitionShader: {fileID: 4800000, guid: 73f378cafe7b4a745907b70e76bb3259, type: 3}
|
||||
- _playerA: {instanceID: 0}
|
||||
- _playerB: {instanceID: 0}
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,235 @@
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2019-2023 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// Renders the video texture to a RenderTexture - either one provided by the user (external) or to an internal one.
|
||||
/// The video frames can optionally be "resolved" to unpack packed alpha, display a single stereo eye, generate mip maps, and apply colorspace conversions
|
||||
[AddComponentMenu("AVPro Video/Resolve To RenderTexture", 330)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public class ResolveToRenderTexture : MonoBehaviour
|
||||
{
|
||||
[SerializeField] MediaPlayer _mediaPlayer = null;
|
||||
[SerializeField] VideoResolveOptions _options = VideoResolveOptions.Create();
|
||||
[SerializeField] VideoRender.ResolveFlags _resolveFlags = (VideoRender.ResolveFlags.ColorspaceSRGB | VideoRender.ResolveFlags.Mipmaps | VideoRender.ResolveFlags.PackedAlpha | VideoRender.ResolveFlags.StereoLeft);
|
||||
[SerializeField] RenderTexture _externalTexture = null;
|
||||
|
||||
private Material _materialResolve;
|
||||
private bool _isMaterialSetup;
|
||||
private bool _isMaterialDirty;
|
||||
private bool _isMaterialOES;
|
||||
private RenderTexture _internalTexture;
|
||||
private int _textureFrameCount = -1;
|
||||
|
||||
// Material used for blitting the texture as we need a shader to provide clamp to border colour style texture sampling
|
||||
private Material _materialBlit;
|
||||
private int _srcTexId;
|
||||
|
||||
public MediaPlayer MediaPlayer
|
||||
{
|
||||
get
|
||||
{
|
||||
return _mediaPlayer;
|
||||
}
|
||||
set
|
||||
{
|
||||
ChangeMediaPlayer(value);
|
||||
}
|
||||
}
|
||||
|
||||
public VideoResolveOptions VideoResolveOptions
|
||||
{
|
||||
get
|
||||
{
|
||||
return _options;
|
||||
}
|
||||
set
|
||||
{
|
||||
_options = value;
|
||||
_isMaterialDirty = true;
|
||||
}
|
||||
}
|
||||
|
||||
public RenderTexture ExternalTexture
|
||||
{
|
||||
get
|
||||
{
|
||||
return _externalTexture;
|
||||
}
|
||||
set
|
||||
{
|
||||
_externalTexture = value;
|
||||
}
|
||||
}
|
||||
|
||||
public RenderTexture TargetTexture
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_externalTexture == null)
|
||||
return _internalTexture;
|
||||
return _externalTexture;
|
||||
}
|
||||
}
|
||||
|
||||
public void SetMaterialDirty()
|
||||
{
|
||||
_isMaterialDirty = true;
|
||||
}
|
||||
|
||||
private void ChangeMediaPlayer(MediaPlayer mediaPlayer)
|
||||
{
|
||||
if (_mediaPlayer != mediaPlayer)
|
||||
{
|
||||
_mediaPlayer = mediaPlayer;
|
||||
_textureFrameCount = -1;
|
||||
_isMaterialSetup = false;
|
||||
_isMaterialDirty = true;
|
||||
Resolve();
|
||||
}
|
||||
}
|
||||
|
||||
void Start()
|
||||
{
|
||||
_isMaterialOES = _mediaPlayer != null ? _mediaPlayer.IsUsingAndroidOESPath() : false;
|
||||
_materialResolve = VideoRender.CreateResolveMaterial(_isMaterialOES);
|
||||
VideoRender.SetupMaterialForMedia(_materialResolve, _mediaPlayer, -1);
|
||||
|
||||
_materialBlit = new Material(Shader.Find("AVProVideo/Internal/Blit"));
|
||||
_srcTexId = Shader.PropertyToID("_SrcTex");
|
||||
}
|
||||
|
||||
void LateUpdate()
|
||||
{
|
||||
Resolve();
|
||||
}
|
||||
|
||||
public void Resolve()
|
||||
{
|
||||
ITextureProducer textureProducer = _mediaPlayer != null ? _mediaPlayer.TextureProducer : null;
|
||||
if (textureProducer == null)
|
||||
return;
|
||||
|
||||
if (textureProducer.GetTexture())
|
||||
{
|
||||
// Check for a swap between OES and none-OES
|
||||
bool playerIsOES = _mediaPlayer.IsUsingAndroidOESPath();
|
||||
if (_isMaterialOES != playerIsOES)
|
||||
{
|
||||
_isMaterialOES = playerIsOES;
|
||||
_materialResolve = VideoRender.CreateResolveMaterial(playerIsOES);
|
||||
}
|
||||
|
||||
if (!_isMaterialSetup)
|
||||
{
|
||||
VideoRender.SetupMaterialForMedia(_materialResolve, _mediaPlayer, -1);
|
||||
_isMaterialSetup = true;
|
||||
_isMaterialDirty = true;
|
||||
}
|
||||
|
||||
if (_isMaterialDirty)
|
||||
{
|
||||
VideoRender.SetupResolveMaterial(_materialResolve, _options);
|
||||
_isMaterialDirty = false;
|
||||
}
|
||||
|
||||
int textureFrameCount = textureProducer.GetTextureFrameCount();
|
||||
if (textureFrameCount != _textureFrameCount)
|
||||
{
|
||||
_internalTexture = VideoRender.ResolveVideoToRenderTexture(_materialResolve, _internalTexture, textureProducer, _resolveFlags);
|
||||
_textureFrameCount = textureFrameCount;
|
||||
|
||||
if (_internalTexture && _externalTexture)
|
||||
{
|
||||
float srcAspectRatio = (float)_internalTexture.width / (float)_internalTexture.height;
|
||||
float dstAspectRatio = (float)_externalTexture.width / (float)_externalTexture.height;
|
||||
|
||||
Vector2 offset = Vector2.zero;
|
||||
Vector2 scale = new Vector2(1.0f, 1.0f);
|
||||
|
||||
// No point in handling the aspect ratio if the textures dimension's are the same
|
||||
if (srcAspectRatio != dstAspectRatio)
|
||||
{
|
||||
switch (_options.aspectRatio)
|
||||
{
|
||||
case VideoResolveOptions.AspectRatio.NoScaling:
|
||||
scale.x = (float)_externalTexture.width / (float)_internalTexture.width;
|
||||
scale.y = (float)_externalTexture.height / (float)_internalTexture.height;
|
||||
offset.x = (1.0f - scale.x) * 0.5f;
|
||||
offset.y = (1.0f - scale.y) * 0.5f;
|
||||
break;
|
||||
|
||||
case VideoResolveOptions.AspectRatio.FitVertically:
|
||||
scale.x = (float)_internalTexture.height / (float)_internalTexture.width * dstAspectRatio;
|
||||
offset.x = (1.0f - scale.x) * 0.5f;
|
||||
break;
|
||||
|
||||
case VideoResolveOptions.AspectRatio.FitHorizontally:
|
||||
scale.y = (float)_externalTexture.height / (float)_externalTexture.width * srcAspectRatio;
|
||||
offset.y = (1.0f - scale.y) * 0.5f;
|
||||
break;
|
||||
|
||||
case VideoResolveOptions.AspectRatio.FitInside:
|
||||
{
|
||||
if (srcAspectRatio > dstAspectRatio)
|
||||
goto case VideoResolveOptions.AspectRatio.FitHorizontally;
|
||||
else if (srcAspectRatio < dstAspectRatio)
|
||||
goto case VideoResolveOptions.AspectRatio.FitVertically;
|
||||
} break;
|
||||
|
||||
case VideoResolveOptions.AspectRatio.FitOutside:
|
||||
{
|
||||
if (srcAspectRatio > dstAspectRatio)
|
||||
goto case VideoResolveOptions.AspectRatio.FitVertically;
|
||||
else if (srcAspectRatio < dstAspectRatio)
|
||||
goto case VideoResolveOptions.AspectRatio.FitHorizontally;
|
||||
} break;
|
||||
|
||||
case VideoResolveOptions.AspectRatio.Stretch:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: This blit can be removed once we can ResolveVideoToRenderTexture is made not to recreate textures
|
||||
// NOTE: This blit probably doesn't do correct linear/srgb conversion if the colorspace settings differ, may have to use GL.sRGBWrite
|
||||
// NOTE: Cannot use _MainTex as Graphics.Blit replaces the texture offset and scale when using a material
|
||||
_materialBlit.SetTexture(_srcTexId, _internalTexture);
|
||||
_materialBlit.SetTextureOffset(_srcTexId, offset);
|
||||
_materialBlit.SetTextureScale(_srcTexId, scale);
|
||||
Graphics.Blit(null, _externalTexture, _materialBlit, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void OnDisable()
|
||||
{
|
||||
if (_internalTexture)
|
||||
{
|
||||
RenderTexture.ReleaseTemporary(_internalTexture);
|
||||
_internalTexture = null;
|
||||
}
|
||||
}
|
||||
|
||||
void OnDestroy()
|
||||
{
|
||||
if (_materialResolve)
|
||||
{
|
||||
Destroy(_materialResolve);
|
||||
_materialResolve = null;
|
||||
}
|
||||
}
|
||||
#if false
|
||||
void OnGUI()
|
||||
{
|
||||
if (TargetTexture)
|
||||
{
|
||||
GUI.DrawTexture(new Rect(0f, 0f, Screen.width * 0.8f, Screen.height * 0.8f), TargetTexture, ScaleMode.ScaleToFit, true);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 448e5e4039505584c852da1a7cc5c361
|
||||
timeCreated: 1654790987
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,182 @@
|
||||
#if !UNITY_OPENHARMONY
|
||||
|
||||
#if UNITY_ANDROID
|
||||
#if USING_URP
|
||||
#define ANDROID_URP
|
||||
#endif
|
||||
#endif
|
||||
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEngine.Rendering;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2022 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// This script is needed to send the camera position to the stereo shader so that
|
||||
/// it can determine which eye it is rendering. This is only needed for multi-pass
|
||||
/// rendering, as single pass has a built-in shader variable
|
||||
/// </summary>
|
||||
[AddComponentMenu("AVPro Video/Update Multi-Pass Stereo", 320)]
|
||||
[HelpURL("https://www.renderheads.com/products/avpro-video/")]
|
||||
public class UpdateMultiPassStereo : MonoBehaviour
|
||||
{
|
||||
[Header("Stereo camera")]
|
||||
[SerializeField] Camera _camera = null;
|
||||
|
||||
public Camera Camera
|
||||
{
|
||||
get { return _camera; }
|
||||
set { _camera = value; }
|
||||
}
|
||||
|
||||
private static readonly LazyShaderProperty PropWorldCameraPosition = new LazyShaderProperty("_WorldCameraPosition");
|
||||
private static readonly LazyShaderProperty PropWorldCameraRight = new LazyShaderProperty("_WorldCameraRight");
|
||||
|
||||
// State
|
||||
|
||||
private Camera _foundCamera;
|
||||
|
||||
void Awake()
|
||||
{
|
||||
if (_camera == null)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] No camera set for UpdateMultiPassStereo component. If you are rendering in multi-pass stereo then it is recommended to set this.");
|
||||
}
|
||||
}
|
||||
|
||||
void Start()
|
||||
{
|
||||
LogXRDeviceDetails();
|
||||
|
||||
#if ANDROID_URP
|
||||
if( GetComponent<Camera>() == null )
|
||||
{
|
||||
throw new MissingComponentException("[AVProVideo] When using URP the UpdateMultiPassStereo component must be on the Camera gameobject. This component is not required on all VR devices, but if it is then stereo eye rendering may not work correctly.");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
private void LogXRDeviceDetails()
|
||||
{
|
||||
#if UNITY_2019_1_OR_NEWER && !UNITY_TVOS
|
||||
string logOutput = "[AVProVideo] XR Device details: UnityEngine.XR.XRSettings.loadedDeviceName = " + UnityEngine.XR.XRSettings.loadedDeviceName + " | supportedDevices = ";
|
||||
|
||||
string[] aSupportedDevices = UnityEngine.XR.XRSettings.supportedDevices;
|
||||
int supportedDeviceCount = aSupportedDevices.Length;
|
||||
for (int i = 0; i < supportedDeviceCount; i++)
|
||||
{
|
||||
logOutput += aSupportedDevices[i];
|
||||
if( i < (supportedDeviceCount - 1 ))
|
||||
{
|
||||
logOutput += ", ";
|
||||
}
|
||||
}
|
||||
|
||||
List<UnityEngine.XR.InputDevice> inputDevices = new List<UnityEngine.XR.InputDevice>();
|
||||
UnityEngine.XR.InputDevices.GetDevices(inputDevices);
|
||||
int deviceCount = inputDevices.Count;
|
||||
if (deviceCount > 0)
|
||||
{
|
||||
logOutput += " | XR Devices = ";
|
||||
|
||||
for (int i = 0; i < deviceCount; i++)
|
||||
{
|
||||
logOutput += inputDevices[i].name;
|
||||
if( i < (deviceCount -1 ))
|
||||
{
|
||||
logOutput += ", ";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
UnityEngine.XR.InputDevice headDevice = UnityEngine.XR.InputDevices.GetDeviceAtXRNode(UnityEngine.XR.XRNode.Head);
|
||||
if( headDevice != null )
|
||||
{
|
||||
logOutput += " | headDevice name = " + headDevice.name + ", manufacturer = " + headDevice.manufacturer;
|
||||
}
|
||||
|
||||
Debug.Log(logOutput);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
#if ANDROID_URP
|
||||
void OnEnable()
|
||||
{
|
||||
RenderPipelineManager.beginCameraRendering += RenderPipelineManager_beginCameraRendering;
|
||||
}
|
||||
void OnDisable()
|
||||
{
|
||||
RenderPipelineManager.beginCameraRendering -= RenderPipelineManager_beginCameraRendering;
|
||||
}
|
||||
#endif
|
||||
|
||||
private static bool IsMultiPassVrEnabled()
|
||||
{
|
||||
#if UNITY_TVOS
|
||||
return false;
|
||||
#else
|
||||
#if UNITY_2017_2_OR_NEWER
|
||||
if (!UnityEngine.XR.XRSettings.enabled) return false;
|
||||
#endif
|
||||
#if UNITY_2018_3_OR_NEWER
|
||||
if (UnityEngine.XR.XRSettings.stereoRenderingMode != UnityEngine.XR.XRSettings.StereoRenderingMode.MultiPass) return false;
|
||||
#endif
|
||||
return true;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
// We do a LateUpdate() to allow for any changes in the camera position that may have happened in Update()
|
||||
#if ANDROID_URP
|
||||
// Android URP
|
||||
private void RenderPipelineManager_beginCameraRendering(ScriptableRenderContext context, Camera camera)
|
||||
#else
|
||||
// Normal render pipeline
|
||||
private void LateUpdate()
|
||||
#endif
|
||||
{
|
||||
if (!IsMultiPassVrEnabled())
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (_camera != null && _foundCamera != _camera)
|
||||
{
|
||||
_foundCamera = _camera;
|
||||
}
|
||||
if (_foundCamera == null)
|
||||
{
|
||||
_foundCamera = Camera.main;
|
||||
if (_foundCamera == null)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] Cannot find main camera for UpdateMultiPassStereo, this can lead to eyes flickering");
|
||||
if (Camera.allCameras.Length > 0)
|
||||
{
|
||||
_foundCamera = Camera.allCameras[0];
|
||||
Debug.LogWarning("[AVProVideo] UpdateMultiPassStereo using camera " + _foundCamera.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_foundCamera != null)
|
||||
{
|
||||
#if ANDROID_URP
|
||||
Shader.EnableKeyword("USING_URP");
|
||||
#else
|
||||
Shader.DisableKeyword("USING_URP");
|
||||
#endif
|
||||
|
||||
Shader.SetGlobalVector(PropWorldCameraPosition.Id, _foundCamera.transform.position);
|
||||
Shader.SetGlobalVector(PropWorldCameraRight.Id, _foundCamera.transform.right);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 8b2366b5575fcba46a0f97038fb6c5fb
|
||||
timeCreated: 1611065944
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {fileID: 2800000, guid: bb83b41b53a59874692b83eab5873998, type: 3}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
5
Assets/AVProVideo/Runtime/Scripts/Internal.meta
Normal file
5
Assets/AVProVideo/Runtime/Scripts/Internal.meta
Normal file
@@ -0,0 +1,5 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 1bb8f28c4529a1343b4430d732bb5f2a
|
||||
folderAsset: yes
|
||||
DefaultImporter:
|
||||
userData:
|
||||
170
Assets/AVProVideo/Runtime/Scripts/Internal/ApplyToBase.cs
Normal file
170
Assets/AVProVideo/Runtime/Scripts/Internal/ApplyToBase.cs
Normal file
@@ -0,0 +1,170 @@
|
||||
#if UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_TVOS
|
||||
#define UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
#endif
|
||||
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Base class to apply texture from MediaPlayer
|
||||
/// </summary>
|
||||
public abstract class ApplyToBase : MonoBehaviour
|
||||
{
|
||||
[Header("Media Source")]
|
||||
[SerializeField]
|
||||
protected MediaPlayer _media = null;
|
||||
|
||||
public MediaPlayer Player
|
||||
{
|
||||
get
|
||||
{
|
||||
return _media;
|
||||
}
|
||||
set
|
||||
{
|
||||
ChangeMediaPlayer(value);
|
||||
}
|
||||
}
|
||||
|
||||
[Space(8f)]
|
||||
[Header("Display")]
|
||||
[SerializeField]
|
||||
bool _automaticStereoPacking = true;
|
||||
|
||||
public bool AutomaticStereoPacking
|
||||
{
|
||||
get
|
||||
{
|
||||
return _automaticStereoPacking;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_automaticStereoPacking != value)
|
||||
{
|
||||
_automaticStereoPacking = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
StereoPacking _overrideStereoPacking = StereoPacking.Monoscopic;
|
||||
|
||||
public StereoPacking OverrideStereoPacking
|
||||
{
|
||||
get
|
||||
{
|
||||
return _overrideStereoPacking;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_overrideStereoPacking != value)
|
||||
{
|
||||
_overrideStereoPacking = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
bool _stereoRedGreenTint = false;
|
||||
|
||||
public bool StereoRedGreenTint
|
||||
{
|
||||
get
|
||||
{
|
||||
return _stereoRedGreenTint;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (_stereoRedGreenTint != value)
|
||||
{
|
||||
_stereoRedGreenTint = value;
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected bool _isDirty = false;
|
||||
|
||||
void Awake()
|
||||
{
|
||||
ChangeMediaPlayer(_media, force: true);
|
||||
}
|
||||
|
||||
private void ChangeMediaPlayer(MediaPlayer player, bool force = false)
|
||||
{
|
||||
if (_media != player || force)
|
||||
{
|
||||
if (_media != null)
|
||||
{
|
||||
_media.Events.RemoveListener(OnMediaPlayerEvent);
|
||||
}
|
||||
_media = player;
|
||||
if (_media != null)
|
||||
{
|
||||
_media.Events.AddListener(OnMediaPlayerEvent);
|
||||
}
|
||||
_isDirty = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Callback function to handle events
|
||||
private void OnMediaPlayerEvent(MediaPlayer mp, MediaPlayerEvent.EventType et, ErrorCode errorCode)
|
||||
{
|
||||
switch (et)
|
||||
{
|
||||
case MediaPlayerEvent.EventType.FirstFrameReady:
|
||||
case MediaPlayerEvent.EventType.PropertiesChanged:
|
||||
ForceUpdate();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public void ForceUpdate()
|
||||
{
|
||||
_isDirty = true;
|
||||
if (this.isActiveAndEnabled)
|
||||
{
|
||||
Apply();
|
||||
}
|
||||
}
|
||||
|
||||
private void Start()
|
||||
{
|
||||
SaveProperties();
|
||||
Apply();
|
||||
}
|
||||
|
||||
protected virtual void OnEnable()
|
||||
{
|
||||
SaveProperties();
|
||||
ForceUpdate();
|
||||
}
|
||||
|
||||
protected virtual void OnDisable()
|
||||
{
|
||||
RestoreProperties();
|
||||
}
|
||||
|
||||
private void OnDestroy()
|
||||
{
|
||||
ChangeMediaPlayer(null);
|
||||
}
|
||||
|
||||
protected virtual void SaveProperties()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
protected virtual void RestoreProperties()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public abstract void Apply();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 33aa9dfec55e1f6438ee868d02dcabe2
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
233
Assets/AVProVideo/Runtime/Scripts/Internal/AudioOutputManager.cs
Normal file
233
Assets/AVProVideo/Runtime/Scripts/Internal/AudioOutputManager.cs
Normal file
@@ -0,0 +1,233 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2024 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using System;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// A singleton to handle multiple instances of the AudioOutput component
|
||||
/// </summary>
|
||||
public class AudioOutputManager
|
||||
{
|
||||
private static AudioOutputManager _instance = null;
|
||||
|
||||
public static AudioOutputManager Instance
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_instance == null)
|
||||
{
|
||||
_instance = new AudioOutputManager();
|
||||
}
|
||||
return _instance;
|
||||
}
|
||||
}
|
||||
|
||||
protected class PlayerInstance
|
||||
{
|
||||
public HashSet<AudioOutput> outputs;
|
||||
public float[] pcmData;
|
||||
public bool isPcmDataReady;
|
||||
}
|
||||
|
||||
private Dictionary<int, PlayerInstance> _instances;
|
||||
|
||||
private AudioOutputManager()
|
||||
{
|
||||
_instances = new Dictionary<int, PlayerInstance>();
|
||||
}
|
||||
|
||||
public void AddPlayerInstance(int mediaPlayerInstanceID)
|
||||
{
|
||||
_instances[mediaPlayerInstanceID] = new PlayerInstance()
|
||||
{
|
||||
outputs = new HashSet<AudioOutput>(),
|
||||
pcmData = null
|
||||
};
|
||||
}
|
||||
|
||||
public void RemovePlayerInstance(int mediaPlayerInstanceID)
|
||||
{
|
||||
if (_instances.ContainsKey(mediaPlayerInstanceID))
|
||||
{
|
||||
_instances.Remove(mediaPlayerInstanceID);
|
||||
}
|
||||
}
|
||||
|
||||
// [MOZ] mediaPlayerInstanceID is the value returned by mediaPlayer.GetInstanceID() which we cannot call as this method is not called on the main thread.
|
||||
public void RequestAudio(AudioOutput outputComponent, MediaPlayer mediaPlayer, int mediaPlayerInstanceID, float[] audioData, int audioChannelCount, int channelMask, AudioOutput.AudioOutputMode audioOutputMode, bool supportPositionalAudio)
|
||||
{
|
||||
if (mediaPlayer == null || mediaPlayer.Control == null)
|
||||
{
|
||||
if (supportPositionalAudio)
|
||||
{
|
||||
ZeroAudio(audioData, 0);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int channels = mediaPlayer.Control.GetAudioChannelCount();
|
||||
if (channels <= 0)
|
||||
{
|
||||
if (supportPositionalAudio)
|
||||
{
|
||||
ZeroAudio(audioData, 0);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// total samples requested should be multiple of channels
|
||||
Debug.Assert(audioData.Length % audioChannelCount == 0);
|
||||
|
||||
// Find or create an instance
|
||||
PlayerInstance instance = null;
|
||||
if (!_instances.TryGetValue(mediaPlayerInstanceID, out instance))
|
||||
{
|
||||
instance = _instances[mediaPlayerInstanceID] = new PlayerInstance()
|
||||
{
|
||||
outputs = new HashSet<AudioOutput>(),
|
||||
pcmData = null
|
||||
};
|
||||
}
|
||||
|
||||
// requests data if it hasn't been requested yet for the current cycle
|
||||
if (instance.outputs.Count == 0 || instance.outputs.Contains(outputComponent) || instance.pcmData == null)
|
||||
{
|
||||
instance.outputs.Clear();
|
||||
|
||||
int actualDataRequired = (audioData.Length * channels) / audioChannelCount;
|
||||
if (instance.pcmData == null || actualDataRequired != instance.pcmData.Length)
|
||||
{
|
||||
instance.pcmData = new float[actualDataRequired];
|
||||
}
|
||||
|
||||
instance.isPcmDataReady = GrabAudio(mediaPlayer, instance.pcmData, channels);
|
||||
|
||||
instance.outputs.Add(outputComponent);
|
||||
}
|
||||
|
||||
if (instance.isPcmDataReady)
|
||||
{
|
||||
// calculate how many samples and what channels are needed and then copy over the data
|
||||
int samples = Math.Min(audioData.Length / audioChannelCount, instance.pcmData.Length / channels);
|
||||
int storedPos = 0;
|
||||
int requestedPos = 0;
|
||||
|
||||
// multiple mode, copies over audio from desired channels into the same channels on the audiosource
|
||||
if (audioOutputMode == AudioOutput.AudioOutputMode.MultipleChannels)
|
||||
{
|
||||
int lesserChannels = Math.Min(channels, audioChannelCount);
|
||||
|
||||
if (!supportPositionalAudio)
|
||||
{
|
||||
for (int i = 0; i < samples; ++i)
|
||||
{
|
||||
for (int j = 0; j < lesserChannels; ++j)
|
||||
{
|
||||
if ((1 << j & channelMask) > 0)
|
||||
{
|
||||
audioData[requestedPos + j] = instance.pcmData[storedPos + j];
|
||||
}
|
||||
}
|
||||
|
||||
storedPos += channels;
|
||||
requestedPos += audioChannelCount;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int i = 0; i < samples; ++i)
|
||||
{
|
||||
for (int j = 0; j < lesserChannels; ++j)
|
||||
{
|
||||
if ((1 << j & channelMask) > 0)
|
||||
{
|
||||
audioData[requestedPos + j] *= instance.pcmData[storedPos + j];
|
||||
}
|
||||
}
|
||||
|
||||
storedPos += channels;
|
||||
requestedPos += audioChannelCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
//Mono mode, copies over single channel to all output channels
|
||||
else if (audioOutputMode == AudioOutput.AudioOutputMode.OneToAllChannels)
|
||||
{
|
||||
int desiredChannel = 0;
|
||||
|
||||
for (int i = 0; i < 8; ++i)
|
||||
{
|
||||
if ((channelMask & (1 << i)) > 0)
|
||||
{
|
||||
desiredChannel = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (desiredChannel < channels)
|
||||
{
|
||||
if (!supportPositionalAudio)
|
||||
{
|
||||
for (int i = 0; i < samples; ++i)
|
||||
{
|
||||
for (int j = 0; j < audioChannelCount; ++j)
|
||||
{
|
||||
audioData[requestedPos + j] = instance.pcmData[storedPos + desiredChannel];
|
||||
}
|
||||
|
||||
storedPos += channels;
|
||||
requestedPos += audioChannelCount;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int i = 0; i < samples; ++i)
|
||||
{
|
||||
for (int j = 0; j < audioChannelCount; ++j)
|
||||
{
|
||||
audioData[requestedPos + j] *= instance.pcmData[storedPos + desiredChannel];
|
||||
}
|
||||
|
||||
storedPos += channels;
|
||||
requestedPos += audioChannelCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If there is left over audio
|
||||
if (supportPositionalAudio && requestedPos != audioData.Length)
|
||||
{
|
||||
// Zero the remaining audio data otherwise there are pops
|
||||
ZeroAudio(audioData, requestedPos);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (supportPositionalAudio)
|
||||
{
|
||||
// Zero the remaining audio data otherwise there are pops
|
||||
ZeroAudio(audioData, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void ZeroAudio(float[] audioData, int startPosition)
|
||||
{
|
||||
for (int i = startPosition; i < audioData.Length; i++)
|
||||
{
|
||||
audioData[i] = 0f;
|
||||
}
|
||||
}
|
||||
|
||||
private bool GrabAudio(MediaPlayer player, float[] audioData, int channelCount)
|
||||
{
|
||||
return (0 != player.Control.GrabAudio(audioData, audioData.Length, channelCount));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 714026a371bd2d64c86edb3dab5607d9
|
||||
timeCreated: 1495699104
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
636
Assets/AVProVideo/Runtime/Scripts/Internal/BaseMediaPlayer.cs
Normal file
636
Assets/AVProVideo/Runtime/Scripts/Internal/BaseMediaPlayer.cs
Normal file
@@ -0,0 +1,636 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2025 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#define UNITY_PLATFORM_SUPPORTS_LINEAR
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Base class for all platform specific MediaPlayers
|
||||
/// </summary>
|
||||
public abstract partial class BaseMediaPlayer
|
||||
: IMediaPlayer
|
||||
, IMediaControl
|
||||
, IMediaInfo
|
||||
, IMediaCache
|
||||
, ITextureProducer
|
||||
, IMediaSubtitles
|
||||
, IVideoTracks
|
||||
, IAudioTracks
|
||||
, ITextTracks
|
||||
, ITimedMetadata
|
||||
, IVariants
|
||||
, System.IDisposable
|
||||
{
|
||||
public BaseMediaPlayer()
|
||||
{
|
||||
InitTracks();
|
||||
}
|
||||
|
||||
public abstract string GetVersion();
|
||||
public abstract string GetExpectedVersion();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public abstract bool OpenMedia(string path, long offset, string customHttpHeaders, MediaHints mediaHints, int forceFileFormat = 0, bool startWithHighestBitrate = false);
|
||||
|
||||
#if NETFX_CORE
|
||||
/// <inheritdoc/>
|
||||
public virtual bool OpenMedia(Windows.Storage.Streams.IRandomAccessStream ras, string path, long offset, string customHttpHeaders) { return false; }
|
||||
#endif
|
||||
|
||||
/// <inheritdoc/>
|
||||
public virtual bool OpenMediaFromBuffer(byte[] buffer) { return false; }
|
||||
/// <inheritdoc/>
|
||||
public virtual bool StartOpenMediaFromBuffer(ulong length) { return false; }
|
||||
/// <inheritdoc/>
|
||||
public virtual bool AddChunkToMediaBuffer(byte[] chunk, ulong offset, ulong length) { return false; }
|
||||
/// <inheritdoc/>
|
||||
public virtual bool EndOpenMediaFromBuffer() { return false; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public virtual void CloseMedia()
|
||||
{
|
||||
#if UNITY_EDITOR
|
||||
_displayRateLastRealTime = 0f;
|
||||
#endif
|
||||
_displayRateTimer = 0f;
|
||||
_displayRateLastFrameCount = 0;
|
||||
_displayRate = 0f;
|
||||
|
||||
_stallDetectionTimer = 0f;
|
||||
_stallDetectionFrame = 0;
|
||||
_lastError = ErrorCode.None;
|
||||
|
||||
_textTracks.Clear();
|
||||
_audioTracks.Clear();
|
||||
_videoTracks.Clear();
|
||||
_currentTextCue = null;
|
||||
_mediaHints = new MediaHints();
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public abstract void SetLooping(bool looping);
|
||||
/// <inheritdoc/>
|
||||
public abstract bool IsLooping();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public abstract bool HasMetaData();
|
||||
/// <inheritdoc/>
|
||||
public abstract bool CanPlay();
|
||||
/// <inheritdoc/>
|
||||
public abstract void Play();
|
||||
/// <inheritdoc/>
|
||||
public abstract void Pause();
|
||||
/// <inheritdoc/>
|
||||
public abstract void Stop();
|
||||
/// <inheritdoc/>
|
||||
public virtual void Rewind() { SeekFast(0.0); }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public abstract void Seek(double time);
|
||||
/// <inheritdoc/>
|
||||
public abstract void SeekFast(double time);
|
||||
/// <inheritdoc/>
|
||||
public virtual void SeekWithTolerance(double time, double timeDeltaBefore, double timeDeltaAfter) { Seek(time); }
|
||||
/// <inheritdoc/>
|
||||
public abstract double GetCurrentTime();
|
||||
/// <inheritdoc/>
|
||||
public virtual DateTime GetProgramDateTime() { return DateTime.MinValue; }
|
||||
/// <inheritdoc/>
|
||||
public abstract float GetPlaybackRate();
|
||||
/// <inheritdoc/>
|
||||
public abstract void SetPlaybackRate(float rate);
|
||||
|
||||
// Basic Properties
|
||||
/// <inheritdoc/>
|
||||
public abstract double GetDuration();
|
||||
/// <inheritdoc/>
|
||||
public abstract int GetVideoWidth();
|
||||
/// <inheritdoc/>
|
||||
public abstract int GetVideoHeight();
|
||||
/// <inheritdoc/>
|
||||
public abstract float GetVideoFrameRate();
|
||||
/// <inheritdoc/>
|
||||
public virtual float GetVideoDisplayRate() { return _displayRate; }
|
||||
/// <inheritdoc/>
|
||||
public abstract bool HasAudio();
|
||||
/// <inheritdoc/>
|
||||
public abstract bool HasVideo();
|
||||
/// <inheritdoc/>
|
||||
public bool IsVideoStereo() { return GetTextureStereoPacking() != StereoPacking.Monoscopic; }
|
||||
|
||||
// Basic State
|
||||
/// <inheritdoc/>
|
||||
public abstract bool IsSeeking();
|
||||
/// <inheritdoc/>
|
||||
public abstract bool IsPlaying();
|
||||
/// <inheritdoc/>
|
||||
public abstract bool IsPaused();
|
||||
/// <inheritdoc/>
|
||||
public abstract bool IsFinished();
|
||||
/// <inheritdoc/>
|
||||
public abstract bool IsBuffering();
|
||||
/// <inheritdoc/>
|
||||
public virtual bool WaitForNextFrame(Camera dummyCamera, int previousFrameCount) { return false; }
|
||||
|
||||
// Textures
|
||||
/// <inheritdoc/>
|
||||
public virtual int GetTextureCount() { return 1; }
|
||||
/// <inheritdoc/>
|
||||
public abstract Texture GetTexture(int index = 0);
|
||||
/// <inheritdoc/>
|
||||
public abstract int GetTextureFrameCount();
|
||||
/// <inheritdoc/>
|
||||
public virtual bool SupportsTextureFrameCount() { return true; }
|
||||
/// <inheritdoc/>
|
||||
public virtual long GetTextureTimeStamp() { return long.MinValue; }
|
||||
/// <inheritdoc/>
|
||||
public abstract bool RequiresVerticalFlip();
|
||||
/// <inheritdoc/>
|
||||
public virtual float GetTexturePixelAspectRatio() { return 1f; }
|
||||
/// <inheritdoc/>
|
||||
public virtual Matrix4x4 GetYpCbCrTransform() { return Matrix4x4.identity; }
|
||||
/// <inheritdoc/>
|
||||
public virtual float[] GetAffineTransform() { return new float[] { 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f }; }
|
||||
/// <inheritdoc/>
|
||||
public virtual float[] GetTextureTransform() { return GetAffineTransform(); }
|
||||
/// <inheritdoc/>
|
||||
public virtual Matrix4x4 GetTextureMatrix()
|
||||
{
|
||||
float[] transform = GetAffineTransform();
|
||||
if (transform == null || transform.Length != 6)
|
||||
return Matrix4x4.identity;
|
||||
Vector4 v0 = new Vector4(transform[0], transform[1], 0, 0);
|
||||
Vector4 v1 = new Vector4(transform[2], transform[3], 0, 0);
|
||||
Vector4 v2 = new Vector4( 0, 0, 1, 0);
|
||||
Vector4 v3 = new Vector4(transform[4], transform[5], 0, 1);
|
||||
Matrix4x4 xfrm = new Matrix4x4(v0, v1, v2, v3);
|
||||
return xfrm;
|
||||
}
|
||||
/// <inheritdoc/>
|
||||
public virtual RenderTextureFormat GetCompatibleRenderTextureFormat(GetCompatibleRenderTextureFormatOptions options, int plane)
|
||||
{
|
||||
// Just return the default
|
||||
return RenderTextureFormat.Default;
|
||||
}
|
||||
|
||||
public StereoPacking GetTextureStereoPacking()
|
||||
{
|
||||
StereoPacking result = InternalGetTextureStereoPacking();
|
||||
if (result == StereoPacking.Unknown)
|
||||
{
|
||||
// If stereo is unknown, fall back to media hints or no packing
|
||||
result = _mediaHints.stereoPacking;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
internal abstract StereoPacking InternalGetTextureStereoPacking();
|
||||
|
||||
public virtual TransparencyMode GetTextureTransparency()
|
||||
{
|
||||
return _mediaHints.transparency;
|
||||
}
|
||||
|
||||
public AlphaPacking GetTextureAlphaPacking()
|
||||
{
|
||||
if (GetTextureTransparency() == TransparencyMode.Transparent)
|
||||
{
|
||||
return _mediaHints.alphaPacking;
|
||||
}
|
||||
return AlphaPacking.None;
|
||||
}
|
||||
|
||||
// Audio General
|
||||
/// <inheritdoc/>
|
||||
public abstract void MuteAudio(bool bMuted);
|
||||
/// <inheritdoc/>
|
||||
public abstract bool IsMuted();
|
||||
/// <inheritdoc/>
|
||||
public abstract void SetVolume(float volume);
|
||||
/// <inheritdoc/>
|
||||
public virtual void SetBalance(float balance) { }
|
||||
/// <inheritdoc/>
|
||||
public abstract float GetVolume();
|
||||
/// <inheritdoc/>
|
||||
public virtual float GetBalance() { return 0f; }
|
||||
|
||||
// Audio Grabbing
|
||||
/// <inheritdoc/>
|
||||
public virtual int GetAudioChannelCount() { return -1; }
|
||||
/// <inheritdoc/>
|
||||
public virtual AudioChannelMaskFlags GetAudioChannelMask() { return 0; }
|
||||
/// <inheritdoc/>
|
||||
public virtual int GrabAudio(float[] audioData, int audioDataFloatCount, int channelCount) { return 0; }
|
||||
/// <inheritdoc/>
|
||||
public virtual int GetAudioBufferedSampleCount() { return 0; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public virtual void AudioConfigurationChanged(bool deviceChanged) { }
|
||||
|
||||
// 360 Audio
|
||||
/// <inheritdoc/>
|
||||
public virtual void SetAudioHeadRotation(Quaternion q) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void ResetAudioHeadRotation() { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void SetAudioChannelMode(Audio360ChannelMode channelMode) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void SetAudioFocusEnabled(bool enabled) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void SetAudioFocusProperties(float offFocusLevel, float widthDegrees) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void SetAudioFocusRotation(Quaternion q) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void ResetAudioFocus() { }
|
||||
|
||||
// Streaming
|
||||
/// <inheritdoc/>
|
||||
public virtual long GetEstimatedTotalBandwidthUsed() { return -1; }
|
||||
/// <inheritdoc/>
|
||||
public virtual void SetPlayWithoutBuffering(bool playWithoutBuffering) { }
|
||||
|
||||
// Caching
|
||||
/// <inheritdoc/>
|
||||
public virtual bool IsMediaCachingSupported() { return false; }
|
||||
/// <inheritdoc/>
|
||||
public virtual void AddMediaToCache(string url, string headers, MediaCachingOptions options) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void CancelDownloadOfMediaToCache(string url) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void PauseDownloadOfMediaToCache(string url) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void ResumeDownloadOfMediaToCache(string url) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void RemoveMediaFromCache(string url) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual CachedMediaStatus GetCachedMediaStatus(string url, ref float progress) { return CachedMediaStatus.NotCached; }
|
||||
// /// <inheritdoc/>
|
||||
// public virtual bool IsMediaCached() { return false; }
|
||||
|
||||
// External playback
|
||||
/// <inheritdoc/>
|
||||
public virtual bool IsExternalPlaybackSupported() { return false; }
|
||||
/// <inheritdoc/>
|
||||
public virtual bool IsExternalPlaybackActive() { return false; }
|
||||
/// <inheritdoc/>
|
||||
public virtual void SetAllowsExternalPlayback(bool enable) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void SetExternalPlaybackVideoGravity(ExternalPlaybackVideoGravity gravity) { }
|
||||
|
||||
// Authentication
|
||||
//public virtual void SetKeyServerURL(string url) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void SetKeyServerAuthToken(string token) { }
|
||||
/// <inheritdoc/>
|
||||
public virtual void SetOverrideDecryptionKey(byte[] key) { }
|
||||
|
||||
// General
|
||||
/// <inheritdoc/>
|
||||
public abstract void Update();
|
||||
/// <inheritdoc/>
|
||||
public /*abstract*/virtual void BeginRender() { }
|
||||
/// <inheritdoc/>
|
||||
public abstract void Render();
|
||||
/// <inheritdoc/>
|
||||
public abstract void Dispose();
|
||||
|
||||
// Internal method
|
||||
public virtual bool GetDecoderPerformance(ref int activeDecodeThreadCount, ref int decodedFrameCount, ref int droppedFrameCount) { return false; }
|
||||
|
||||
public virtual void EndUpdate() { }
|
||||
|
||||
public virtual IntPtr GetNativePlayerHandle() { return IntPtr.Zero; }
|
||||
|
||||
public ErrorCode GetLastError()
|
||||
{
|
||||
ErrorCode errorCode = _lastError;
|
||||
_lastError = ErrorCode.None;
|
||||
return errorCode;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public virtual long GetLastExtendedErrorCode()
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
public string GetPlayerDescription()
|
||||
{
|
||||
return _playerDescription;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public virtual bool PlayerSupportsLinearColorSpace()
|
||||
{
|
||||
#if UNITY_PLATFORM_SUPPORTS_LINEAR
|
||||
return true;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
protected string _playerDescription = string.Empty;
|
||||
protected ErrorCode _lastError = ErrorCode.None;
|
||||
protected FilterMode _defaultTextureFilterMode = FilterMode.Bilinear;
|
||||
protected TextureWrapMode _defaultTextureWrapMode = TextureWrapMode.Clamp;
|
||||
protected int _defaultTextureAnisoLevel = 1;
|
||||
protected MediaHints _mediaHints;
|
||||
protected TimeRanges _seekableTimes = new TimeRanges();
|
||||
protected TimeRanges _bufferedTimes = new TimeRanges();
|
||||
|
||||
public TimeRanges GetSeekableTimes() { return _seekableTimes; }
|
||||
public TimeRanges GetBufferedTimes() { return _bufferedTimes; }
|
||||
|
||||
public void GetTextureProperties(out FilterMode filterMode, out TextureWrapMode wrapMode, out int anisoLevel)
|
||||
{
|
||||
filterMode = _defaultTextureFilterMode;
|
||||
wrapMode = _defaultTextureWrapMode;
|
||||
anisoLevel = _defaultTextureAnisoLevel;
|
||||
}
|
||||
|
||||
public void SetTextureProperties(FilterMode filterMode = FilterMode.Bilinear, TextureWrapMode wrapMode = TextureWrapMode.Clamp, int anisoLevel = 0)
|
||||
{
|
||||
_defaultTextureFilterMode = filterMode;
|
||||
_defaultTextureWrapMode = wrapMode;
|
||||
_defaultTextureAnisoLevel = anisoLevel;
|
||||
for (int i = 0; i < GetTextureCount(); ++i)
|
||||
{
|
||||
ApplyTextureProperties(GetTexture(i));
|
||||
}
|
||||
}
|
||||
|
||||
protected virtual void ApplyTextureProperties(Texture texture)
|
||||
{
|
||||
if (texture != null)
|
||||
{
|
||||
texture.filterMode = _defaultTextureFilterMode;
|
||||
texture.wrapMode = _defaultTextureWrapMode;
|
||||
texture.anisoLevel = _defaultTextureAnisoLevel;
|
||||
}
|
||||
}
|
||||
|
||||
#region Video Display Rate
|
||||
#if UNITY_EDITOR
|
||||
private float _displayRateLastRealTime = 0f;
|
||||
#endif
|
||||
private float _displayRateTimer;
|
||||
private int _displayRateLastFrameCount;
|
||||
private float _displayRate = 1f;
|
||||
|
||||
protected void UpdateDisplayFrameRate()
|
||||
{
|
||||
const float IntervalSeconds = 0.5f;
|
||||
if (_displayRateTimer >= IntervalSeconds)
|
||||
{
|
||||
int frameCount = GetTextureFrameCount();
|
||||
int frameDelta = (frameCount - _displayRateLastFrameCount);
|
||||
_displayRate = (float)frameDelta / _displayRateTimer;
|
||||
_displayRateTimer -= IntervalSeconds;
|
||||
if (_displayRateTimer >= IntervalSeconds) _displayRateTimer -= IntervalSeconds;
|
||||
if (_displayRateTimer >= IntervalSeconds) _displayRateTimer = 0f;
|
||||
_displayRateLastFrameCount = frameCount;
|
||||
}
|
||||
|
||||
float deltaTime = Time.deltaTime;
|
||||
#if UNITY_EDITOR
|
||||
if (!Application.isPlaying)
|
||||
{
|
||||
// When not playing Time.deltaTime isn't valid so we have to derive it
|
||||
deltaTime = (Time.realtimeSinceStartup - _displayRateLastRealTime);
|
||||
_displayRateLastRealTime = Time.realtimeSinceStartup;
|
||||
}
|
||||
#endif
|
||||
_displayRateTimer += deltaTime;
|
||||
}
|
||||
#endregion // Video Display Rate
|
||||
|
||||
#region Stall Detection
|
||||
protected bool IsExpectingNewVideoFrame()
|
||||
{
|
||||
if (HasVideo())
|
||||
{
|
||||
// If we're playing then we expect a new frame
|
||||
if (!IsFinished() && (!IsPaused() && IsPlaying() && GetPlaybackRate() != 0.0f))
|
||||
{
|
||||
// Check that the video is not a single frame and therefore there is no other frame to display
|
||||
bool isSingleFrame = (GetTextureFrameCount() > 0 && GetDurationFrames() == 1);
|
||||
if (!isSingleFrame)
|
||||
{
|
||||
// NOTE: if a new frame isn't available then we could either be seeking or stalled
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public virtual bool IsPlaybackStalled()
|
||||
{
|
||||
const float StallDetectionDuration = 0.5f;
|
||||
|
||||
// Manually detect stalled video if the platform doesn't have native support to detect it
|
||||
if (SupportsTextureFrameCount() && IsExpectingNewVideoFrame())
|
||||
{
|
||||
// Detect a new video frame
|
||||
int frameCount = GetTextureFrameCount();
|
||||
if (frameCount != _stallDetectionFrame)
|
||||
{
|
||||
_stallDetectionTimer = 0f;
|
||||
_stallDetectionFrame = frameCount;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Update the detection timer, but never more than once a Unity frame
|
||||
if (_stallDetectionGuard != Time.frameCount)
|
||||
{
|
||||
_stallDetectionTimer += Time.deltaTime;
|
||||
}
|
||||
}
|
||||
_stallDetectionGuard = Time.frameCount;
|
||||
|
||||
float thresholdDuration = StallDetectionDuration;
|
||||
|
||||
// Scale by the playback rate, but should be at least StallDetectionDuration
|
||||
thresholdDuration = Mathf.Max(thresholdDuration / Mathf.Abs(GetPlaybackRate()), StallDetectionDuration);
|
||||
|
||||
// If a valid FPS is available then make sure the thresholdDuration
|
||||
// is at least double that. This is mainly for very low FPS
|
||||
// content (eg 1 or 2 FPS)
|
||||
float fps = GetVideoFrameRate();
|
||||
if (fps > 0f && !float.IsNaN(fps))
|
||||
{
|
||||
thresholdDuration = Mathf.Max(thresholdDuration, 2f / fps);
|
||||
}
|
||||
|
||||
return (_stallDetectionTimer > thresholdDuration);
|
||||
}
|
||||
else
|
||||
{
|
||||
_stallDetectionTimer = 0f;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private float _stallDetectionTimer;
|
||||
private int _stallDetectionFrame;
|
||||
private int _stallDetectionGuard;
|
||||
#endregion // Stall Detection
|
||||
|
||||
protected List<Subtitle> _subtitles;
|
||||
protected Subtitle _currentSubtitle;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public bool LoadSubtitlesSRT(string data)
|
||||
{
|
||||
if (string.IsNullOrEmpty(data))
|
||||
{
|
||||
// Disable subtitles
|
||||
_subtitles = null;
|
||||
_currentSubtitle = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
_subtitles = SubtitleUtils.ParseSubtitlesSRT(data);
|
||||
_currentSubtitle = null;
|
||||
}
|
||||
return (_subtitles != null);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public virtual void UpdateSubtitles()
|
||||
{
|
||||
if (_subtitles != null)
|
||||
{
|
||||
double time = GetCurrentTime();
|
||||
|
||||
// TODO: implement a more efficient subtitle index searcher
|
||||
int searchIndex = 0;
|
||||
if (_currentSubtitle != null)
|
||||
{
|
||||
if (!_currentSubtitle.IsTime(time))
|
||||
{
|
||||
if (time > _currentSubtitle.timeEnd)
|
||||
{
|
||||
searchIndex = _currentSubtitle.index + 1;
|
||||
}
|
||||
_currentSubtitle = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (_currentSubtitle == null)
|
||||
{
|
||||
for (int i = searchIndex; i < _subtitles.Count; i++)
|
||||
{
|
||||
if (_subtitles[i].IsTime(time))
|
||||
{
|
||||
_currentSubtitle = _subtitles[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public virtual int GetSubtitleIndex()
|
||||
{
|
||||
int result = -1;
|
||||
if (_currentSubtitle != null)
|
||||
{
|
||||
result = _currentSubtitle.index;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public virtual string GetSubtitleText()
|
||||
{
|
||||
string result = string.Empty;
|
||||
if (_currentSubtitle != null)
|
||||
{
|
||||
result = _currentSubtitle.text;
|
||||
}
|
||||
else if (_currentTextCue != null)
|
||||
{
|
||||
result = _currentTextCue.Text;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public virtual void OnEnable()
|
||||
{
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public int GetCurrentTimeFrames(float overrideFrameRate = 0f)
|
||||
{
|
||||
int result = 0;
|
||||
float frameRate = (overrideFrameRate > 0f) ? overrideFrameRate : GetVideoFrameRate();
|
||||
if (frameRate > 0f)
|
||||
{
|
||||
result = Helper.ConvertTimeSecondsToFrame(GetCurrentTime(), frameRate);
|
||||
result = Mathf.Min(result, GetMaxFrameNumber(overrideFrameRate));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public int GetDurationFrames(float overrideFrameRate = 0f)
|
||||
{
|
||||
int result = 0;
|
||||
float frameRate = (overrideFrameRate > 0f) ? overrideFrameRate : GetVideoFrameRate();
|
||||
if (frameRate > 0f)
|
||||
{
|
||||
result = Helper.ConvertTimeSecondsToFrame(GetDuration(), frameRate);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public int GetMaxFrameNumber(float overrideFrameRate = 0f)
|
||||
{
|
||||
int result = GetDurationFrames(overrideFrameRate);
|
||||
result = Mathf.Max(0, result - 1);
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public void SeekToFrameRelative(int frameOffset, float overrideFrameRate = 0f)
|
||||
{
|
||||
float frameRate = (overrideFrameRate > 0f)?overrideFrameRate:GetVideoFrameRate();
|
||||
if (frameRate > 0f)
|
||||
{
|
||||
int frame = Helper.ConvertTimeSecondsToFrame(GetCurrentTime(), frameRate);
|
||||
frame += frameOffset;
|
||||
frame = Mathf.Clamp(frame, 0, GetMaxFrameNumber(frameRate));
|
||||
double time = Helper.ConvertFrameToTimeSeconds(frame, frameRate);
|
||||
Seek(time);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public void SeekToFrame(int frame, float overrideFrameRate = 0f)
|
||||
{
|
||||
float frameRate = (overrideFrameRate > 0f)?overrideFrameRate:GetVideoFrameRate();
|
||||
if (frameRate > 0f)
|
||||
{
|
||||
frame = Mathf.Clamp(frame, 0, GetMaxFrameNumber(frameRate));
|
||||
double time = Helper.ConvertFrameToTimeSeconds(frame, frameRate);
|
||||
Seek(time);
|
||||
}
|
||||
}
|
||||
|
||||
protected PlaybackQualityStats _playbackQualityStats = new PlaybackQualityStats();
|
||||
|
||||
public PlaybackQualityStats GetPlaybackQualityStats()
|
||||
{
|
||||
return _playbackQualityStats;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 4f59504ca098e7d41b036917f4764ee0
|
||||
timeCreated: 1447782861
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
111
Assets/AVProVideo/Runtime/Scripts/Internal/Events.cs
Normal file
111
Assets/AVProVideo/Runtime/Scripts/Internal/Events.cs
Normal file
@@ -0,0 +1,111 @@
|
||||
using UnityEngine.Events;
|
||||
using System.Collections.Generic;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2021 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
[System.Serializable]
|
||||
public class MediaPlayerLoadEvent : UnityEvent<string> {}
|
||||
|
||||
[System.Serializable]
|
||||
public class MediaPlayerEvent : UnityEvent<MediaPlayer, MediaPlayerEvent.EventType, ErrorCode>
|
||||
{
|
||||
public enum EventType
|
||||
{
|
||||
MetaDataReady, // Triggered when meta data(width, duration etc) is available
|
||||
ReadyToPlay, // Triggered when the video is loaded and ready to play
|
||||
Started, // Triggered when the playback starts
|
||||
FirstFrameReady, // Triggered when the first frame has been rendered
|
||||
FinishedPlaying, // Triggered when a non-looping video has finished playing
|
||||
Closing, // Triggered when the media is closed
|
||||
Error, // Triggered when an error occurs
|
||||
SubtitleChange, // Triggered when the subtitles change
|
||||
Stalled, // Triggered when media is stalled (eg. when lost connection to media stream)
|
||||
Unstalled, // Triggered when media is resumed form a stalled state (eg. when lost connection is re-established)
|
||||
ResolutionChanged, // Triggered when the resolution of the video has changed (including the load) Useful for adaptive streams
|
||||
StartedSeeking, // Triggered when seeking begins
|
||||
FinishedSeeking, // Triggered when seeking has finished
|
||||
StartedBuffering, // Triggered when buffering begins
|
||||
FinishedBuffering, // Triggered when buffering has finished
|
||||
PropertiesChanged, // Triggered when any properties (eg stereo packing are changed) - this has to be triggered manually
|
||||
PlaylistItemChanged, // Triggered when the new item is played in the playlist
|
||||
PlaylistFinished, // Triggered when the playlist reaches the end
|
||||
|
||||
TextTracksChanged, // Triggered when the text tracks are added or removed
|
||||
Paused, // Triggered when the player is paused
|
||||
Unpaused, // Triggered when the player resumes playing
|
||||
|
||||
TimedMetadataChanged, // Triggered when the timed metadata changes
|
||||
|
||||
// TODO:
|
||||
//StartLoop, // Triggered when the video starts and is in loop mode
|
||||
//EndLoop, // Triggered when the video ends and is in loop mode
|
||||
//NewFrame // Trigger when a new video frame is available
|
||||
|
||||
TextCueChanged = SubtitleChange, // Triggered when the text to display changes
|
||||
}
|
||||
|
||||
private List<UnityAction<MediaPlayer, MediaPlayerEvent.EventType, ErrorCode>> _listeners = new List<UnityAction<MediaPlayer, EventType, ErrorCode>>(4);
|
||||
|
||||
public bool HasListeners()
|
||||
{
|
||||
return (_listeners.Count > 0) || (GetPersistentEventCount() > 0);
|
||||
}
|
||||
|
||||
new public void AddListener(UnityAction<MediaPlayer, MediaPlayerEvent.EventType, ErrorCode> call)
|
||||
{
|
||||
if (!_listeners.Contains(call))
|
||||
{
|
||||
_listeners.Add(call);
|
||||
base.AddListener(call);
|
||||
}
|
||||
}
|
||||
|
||||
new public void RemoveListener(UnityAction<MediaPlayer, MediaPlayerEvent.EventType, ErrorCode> call)
|
||||
{
|
||||
int index = _listeners.IndexOf(call);
|
||||
if (index >= 0)
|
||||
{
|
||||
_listeners.RemoveAt(index);
|
||||
base.RemoveListener(call);
|
||||
}
|
||||
}
|
||||
|
||||
new public void RemoveAllListeners()
|
||||
{
|
||||
_listeners.Clear();
|
||||
base.RemoveAllListeners();
|
||||
}
|
||||
}
|
||||
|
||||
#if false
|
||||
public interface IMediaEvents
|
||||
{
|
||||
void AddEventListener(UnityAction<MediaPlayer, MediaPlayerEvent.EventType, ErrorCode> call);
|
||||
void RemoveListener(UnityAction<MediaPlayer, MediaPlayerEvent.EventType, ErrorCode> call);
|
||||
void RemoveAllEventListeners();
|
||||
}
|
||||
|
||||
public partial class BaseMediaPlayer
|
||||
{
|
||||
void AddEventListener(UnityAction<MediaPlayer, MediaPlayerEvent.EventType, ErrorCode> call)
|
||||
{
|
||||
|
||||
}
|
||||
void RemoveListener(UnityAction<MediaPlayer, MediaPlayerEvent.EventType, ErrorCode> call)
|
||||
{
|
||||
|
||||
}
|
||||
void RemoveAllEventListeners()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
private MediaPlayerEvent _eventHandler;
|
||||
|
||||
}
|
||||
#endif
|
||||
}
|
||||
12
Assets/AVProVideo/Runtime/Scripts/Internal/Events.cs.meta
Normal file
12
Assets/AVProVideo/Runtime/Scripts/Internal/Events.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 16a5efbe992a09144ac89dde2b3e0898
|
||||
timeCreated: 1438695622
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
537
Assets/AVProVideo/Runtime/Scripts/Internal/Helper.cs
Normal file
537
Assets/AVProVideo/Runtime/Scripts/Internal/Helper.cs
Normal file
@@ -0,0 +1,537 @@
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2025 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public static class Helper
|
||||
{
|
||||
public const string AVProVideoVersion = "3.3.5";
|
||||
public sealed class ExpectedPluginVersion
|
||||
{
|
||||
public const string Windows = "3.2.6";
|
||||
public const string WinRT = "3.2.6";
|
||||
public const string Android = "3.3.5";
|
||||
public const string Apple = "3.3.5";
|
||||
public const string OpenHarmony = "3.3.5";
|
||||
}
|
||||
|
||||
public const string UnityBaseTextureName = "_MainTex";
|
||||
public const string UnityBaseTextureName_URP = "_BaseMap";
|
||||
public const string UnityBaseTextureName_HDRP = "_BaseColorMap";
|
||||
|
||||
public static string GetPath(MediaPathType location)
|
||||
{
|
||||
string result = string.Empty;
|
||||
switch (location)
|
||||
{
|
||||
case MediaPathType.AbsolutePathOrURL:
|
||||
break;
|
||||
case MediaPathType.RelativeToDataFolder:
|
||||
result = Application.dataPath;
|
||||
break;
|
||||
case MediaPathType.RelativeToPersistentDataFolder:
|
||||
result = Application.persistentDataPath;
|
||||
break;
|
||||
case MediaPathType.RelativeToProjectFolder:
|
||||
#if !UNITY_WINRT_8_1
|
||||
string path = "..";
|
||||
#if UNITY_STANDALONE_OSX && !UNITY_EDITOR_OSX
|
||||
path += "/..";
|
||||
#endif
|
||||
result = System.IO.Path.GetFullPath(System.IO.Path.Combine(Application.dataPath, path));
|
||||
result = result.Replace('\\', '/');
|
||||
#endif
|
||||
break;
|
||||
case MediaPathType.RelativeToStreamingAssetsFolder:
|
||||
result = Application.streamingAssetsPath;
|
||||
#if UNITY_OPENHARMONY && !UNITY_EDITOR
|
||||
// It has been seen that some versions of Tuanjie do not include the 'jar:' prefix on the streamingAssetsPath so add it in here if it is missing
|
||||
string jarUrlPrefix = "jar:";
|
||||
result = result.StartsWith( jarUrlPrefix ) ? result : ( jarUrlPrefix + result );
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static string GetFilePath(string path, MediaPathType location)
|
||||
{
|
||||
string result = string.Empty;
|
||||
if (!string.IsNullOrEmpty(path))
|
||||
{
|
||||
switch (location)
|
||||
{
|
||||
case MediaPathType.AbsolutePathOrURL:
|
||||
result = path;
|
||||
break;
|
||||
case MediaPathType.RelativeToDataFolder:
|
||||
case MediaPathType.RelativeToPersistentDataFolder:
|
||||
case MediaPathType.RelativeToProjectFolder:
|
||||
case MediaPathType.RelativeToStreamingAssetsFolder:
|
||||
result = System.IO.Path.Combine(GetPath(location), path);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static string GetFriendlyResolutionName(int width, int height, float fps)
|
||||
{
|
||||
// List of common 16:9 resolutions
|
||||
int[] areas = { 0, 7680 * 4320, 3840 * 2160, 2560 * 1440, 1920 * 1080, 1280 * 720, 853 * 480, 640 * 360, 426 * 240, 256 * 144 };
|
||||
string[] names = { "Unknown", "8K", "4K", "1440p", "1080p", "720p", "480p", "360p", "240p", "144p" };
|
||||
|
||||
Debug.Assert(areas.Length == names.Length);
|
||||
|
||||
// Find the closest resolution
|
||||
int closestAreaIndex = 0;
|
||||
int area = width * height;
|
||||
int minDelta = int.MaxValue;
|
||||
for (int i = 0; i < areas.Length; i++)
|
||||
{
|
||||
int d = Mathf.Abs(areas[i] - area);
|
||||
// TODO: add a maximum threshold to ignore differences that are too high
|
||||
if (d < minDelta)
|
||||
{
|
||||
closestAreaIndex = i;
|
||||
minDelta = d;
|
||||
// If the exact mode is found, early out
|
||||
if (d == 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
string result = names[closestAreaIndex];
|
||||
|
||||
// Append frame rate if valid
|
||||
if (fps > 0f && !float.IsNaN(fps))
|
||||
{
|
||||
result += fps.ToString("0.##");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static string GetErrorMessage(ErrorCode code)
|
||||
{
|
||||
string result = string.Empty;
|
||||
switch (code)
|
||||
{
|
||||
case ErrorCode.None:
|
||||
result = "No Error";
|
||||
break;
|
||||
case ErrorCode.LoadFailed:
|
||||
result = "Loading failed. File not found, codec not supported, video resolution too high or insufficient system resources.";
|
||||
#if UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN
|
||||
// Add extra information for older Windows versions that don't have support for modern codecs
|
||||
if (SystemInfo.operatingSystem.StartsWith("Windows XP") ||
|
||||
SystemInfo.operatingSystem.StartsWith("Windows Vista"))
|
||||
{
|
||||
result += " NOTE: Windows XP and Vista don't have native support for H.264 codec. Consider using an older codec such as DivX or installing 3rd party codecs such as LAV Filters.";
|
||||
}
|
||||
#endif
|
||||
break;
|
||||
case ErrorCode.DecodeFailed:
|
||||
result = "Decode failed. Possible codec not supported, video resolution/bit-depth too high, or insufficient system resources.";
|
||||
#if UNITY_ANDROID
|
||||
result += " On Android this is generally due to the hardware not having enough resources to decode the video. Most Android devices can only handle a maximum of one 4K video at once.";
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static string GetPlatformName(Platform platform)
|
||||
{
|
||||
string result = "Unknown";
|
||||
switch (platform)
|
||||
{
|
||||
case Platform.WindowsUWP:
|
||||
result = "Windows UWP";
|
||||
break;
|
||||
default:
|
||||
result = platform.ToString();
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static string[] GetPlatformNames()
|
||||
{
|
||||
return new string[] {
|
||||
GetPlatformName(Platform.Windows),
|
||||
GetPlatformName(Platform.macOS),
|
||||
GetPlatformName(Platform.iOS),
|
||||
GetPlatformName(Platform.tvOS),
|
||||
GetPlatformName(Platform.visionOS),
|
||||
GetPlatformName(Platform.Android),
|
||||
GetPlatformName(Platform.WindowsUWP),
|
||||
GetPlatformName(Platform.WebGL),
|
||||
GetPlatformName(Platform.OpenHarmony)
|
||||
};
|
||||
}
|
||||
|
||||
#if AVPROVIDEO_DISABLE_LOGGING
|
||||
[System.Diagnostics.Conditional("ALWAYS_FALSE")]
|
||||
#endif
|
||||
public static void LogInfo(string message, Object context = null)
|
||||
{
|
||||
if (context == null)
|
||||
{
|
||||
Debug.Log("[AVProVideo] " + message);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.Log("[AVProVideo] " + message, context);
|
||||
}
|
||||
}
|
||||
|
||||
public static int GetUnityAudioSampleRate()
|
||||
{
|
||||
// For standalone builds (not in the editor):
|
||||
// In Unity 4.6, 5.0, 5.1 when audio is disabled there is no indication from the API.
|
||||
// But in 5.2.0 and above, it logs an error when trying to call
|
||||
// AudioSettings.GetDSPBufferSize() or AudioSettings.outputSampleRate
|
||||
// So to prevent the error, check if AudioSettings.GetConfiguration().sampleRate == 0
|
||||
return (AudioSettings.GetConfiguration().sampleRate == 0) ? 0 : AudioSettings.outputSampleRate;
|
||||
}
|
||||
|
||||
public static int GetUnityAudioSpeakerCount()
|
||||
{
|
||||
switch (AudioSettings.GetConfiguration().speakerMode)
|
||||
{
|
||||
case AudioSpeakerMode.Mono: return 1;
|
||||
case AudioSpeakerMode.Stereo: return 2;
|
||||
case AudioSpeakerMode.Quad: return 4;
|
||||
case AudioSpeakerMode.Surround: return 5;
|
||||
case AudioSpeakerMode.Mode5point1: return 6;
|
||||
case AudioSpeakerMode.Mode7point1: return 8;
|
||||
case AudioSpeakerMode.Prologic: return 2;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Returns a valid range to use for a timeline display
|
||||
// Either it will return the range 0..duration, or
|
||||
// for live streams it will return first seekable..last seekable time
|
||||
public static TimeRange GetTimelineRange(double duration, TimeRanges seekable)
|
||||
{
|
||||
TimeRange result = new TimeRange();
|
||||
if (duration >= 0.0 && duration < 2e10)
|
||||
{
|
||||
// Duration is valid
|
||||
result.startTime = 0f;
|
||||
result.duration = duration;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Duration is invalid, so it could be a live stream, so derive from seekable range
|
||||
result.startTime = seekable.MinTime;
|
||||
result.duration = seekable.Duration;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public const double SecondsToHNS = 10000000.0;
|
||||
public const double MilliSecondsToHNS = 10000.0;
|
||||
|
||||
public static string GetTimeString(double timeSeconds, bool showMilliseconds = false)
|
||||
{
|
||||
float totalSeconds = (float)timeSeconds;
|
||||
int hours = Mathf.FloorToInt(totalSeconds / (60f * 60f));
|
||||
float usedSeconds = hours * 60f * 60f;
|
||||
|
||||
int minutes = Mathf.FloorToInt((totalSeconds - usedSeconds) / 60f);
|
||||
usedSeconds += minutes * 60f;
|
||||
|
||||
int seconds = Mathf.FloorToInt(totalSeconds - usedSeconds);
|
||||
|
||||
string result;
|
||||
if (hours <= 0)
|
||||
{
|
||||
if (showMilliseconds)
|
||||
{
|
||||
int milliSeconds = (int)((totalSeconds - Mathf.Floor(totalSeconds)) * 1000f);
|
||||
result = string.Format("{0:00}:{1:00}:{2:000}", minutes, seconds, milliSeconds);
|
||||
}
|
||||
else
|
||||
{
|
||||
result = string.Format("{0:00}:{1:00}", minutes, seconds);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (showMilliseconds)
|
||||
{
|
||||
int milliSeconds = (int)((totalSeconds - Mathf.Floor(totalSeconds)) * 1000f);
|
||||
result = string.Format("{2}:{0:00}:{1:00}:{3:000}", minutes, seconds, hours, milliSeconds);
|
||||
}
|
||||
else
|
||||
{
|
||||
result = string.Format("{2}:{0:00}:{1:00}", minutes, seconds, hours);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert texture transform matrix to an enum of orientation types
|
||||
/// </summary>
|
||||
public static Orientation GetOrientation(float[] t)
|
||||
{
|
||||
Orientation result = Orientation.Landscape;
|
||||
if (t != null)
|
||||
{
|
||||
// TODO: check that the Portrait and PortraitFlipped are the right way around
|
||||
if (t[0] == 0f && t[1]== 1f && t[2] == -1f && t[3] == 0f)
|
||||
{
|
||||
result = Orientation.Portrait;
|
||||
} else
|
||||
if (t[0] == 0f && t[1] == -1f && t[2] == 1f && t[3] == 0f)
|
||||
{
|
||||
result = Orientation.PortraitFlipped;
|
||||
} else
|
||||
if (t[0]== 1f && t[1] == 0f && t[2] == 0f && t[3] == 1f)
|
||||
{
|
||||
result = Orientation.Landscape;
|
||||
} else
|
||||
if (t[0] == -1f && t[1] == 0f && t[2] == 0f && t[3] == -1f)
|
||||
{
|
||||
result = Orientation.LandscapeFlipped;
|
||||
}
|
||||
else
|
||||
if (t[0] == 0f && t[1] == 1f && t[2] == 1f && t[3] == 0f)
|
||||
{
|
||||
result = Orientation.PortraitHorizontalMirror;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static Matrix4x4 PortraitMatrix = Matrix4x4.TRS(new Vector3(0f, 1f, 0f), Quaternion.Euler(0f, 0f, -90f), Vector3.one);
|
||||
private static Matrix4x4 PortraitFlippedMatrix = Matrix4x4.TRS(new Vector3(1f, 0f, 0f), Quaternion.Euler(0f, 0f, 90f), Vector3.one);
|
||||
private static Matrix4x4 LandscapeFlippedMatrix = Matrix4x4.TRS(new Vector3(1f, 1f, 0f), Quaternion.Euler(0f, 0f, -180f), Vector3.one);
|
||||
|
||||
public static Matrix4x4 GetMatrixForOrientation(Orientation ori)
|
||||
{
|
||||
Matrix4x4 result;
|
||||
switch (ori)
|
||||
{
|
||||
case Orientation.Landscape:
|
||||
result = Matrix4x4.identity;
|
||||
break;
|
||||
case Orientation.LandscapeFlipped:
|
||||
result = LandscapeFlippedMatrix;
|
||||
break;
|
||||
case Orientation.Portrait:
|
||||
result = PortraitMatrix;
|
||||
break;
|
||||
case Orientation.PortraitFlipped:
|
||||
result = PortraitFlippedMatrix;
|
||||
break;
|
||||
case Orientation.PortraitHorizontalMirror:
|
||||
result = new Matrix4x4();
|
||||
result.SetColumn(0, new Vector4(0f, 1f, 0f, 0f));
|
||||
result.SetColumn(1, new Vector4(1f, 0f, 0f, 0f));
|
||||
result.SetColumn(2, new Vector4(0f, 0f, 1f, 0f));
|
||||
result.SetColumn(3, new Vector4(0f, 0f, 0f, 1f));
|
||||
break;
|
||||
default:
|
||||
throw new System.Exception("Unknown Orientation type");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Matrix4x4 Matrix4x4FromAffineTransform(float[] affineXfrm)
|
||||
{
|
||||
Vector4 v0 = new Vector4(affineXfrm[0], affineXfrm[1], 0, 0);
|
||||
Vector4 v1 = new Vector4(affineXfrm[2], affineXfrm[3], 0, 0);
|
||||
Vector4 v2 = new Vector4( 0, 0, 1, 0);
|
||||
Vector4 v3 = new Vector4(affineXfrm[4], affineXfrm[5], 0, 1);
|
||||
return new Matrix4x4(v0, v1, v2, v3);
|
||||
}
|
||||
|
||||
public static int ConvertTimeSecondsToFrame(double seconds, float frameRate)
|
||||
{
|
||||
// NOTE: Generally you should use RountToInt when converting from time to frame number
|
||||
// but because we're adding a half frame offset (which seems to be the safer thing to do) we need to FloorToInt
|
||||
seconds = System.Math.Max(0.0, seconds);
|
||||
frameRate = Mathf.Max(0f, frameRate);
|
||||
return (int)System.Math.Floor(frameRate * seconds);
|
||||
}
|
||||
|
||||
public static double ConvertFrameToTimeSeconds(int frame, float frameRate)
|
||||
{
|
||||
frame = Mathf.Max(0, frame);
|
||||
frameRate = Mathf.Max(0f, frameRate);
|
||||
double frameDurationSeconds = 1.0 / frameRate;
|
||||
#if !UNITY_EDITOR && UNITY_ANDROID
|
||||
return ((double)frame * frameDurationSeconds) + (frameDurationSeconds * 0.01); // #1999 : Need to bump on the value a little, but not a whole half frame time, to avoid float inaccuracy error
|
||||
#else
|
||||
return ((double)frame * frameDurationSeconds) + (frameDurationSeconds * 0.5); // Add half a frame we that the time lands in the middle of the frame range and not at the edges
|
||||
#endif
|
||||
}
|
||||
|
||||
public static double FindNextKeyFrameTimeSeconds(double seconds, float frameRate, int keyFrameInterval)
|
||||
{
|
||||
seconds = System.Math.Max(0.0, seconds);
|
||||
frameRate = Mathf.Max(0f, frameRate);
|
||||
keyFrameInterval = Mathf.Max(0, keyFrameInterval);
|
||||
int currentFrame = Helper.ConvertTimeSecondsToFrame(seconds, frameRate);
|
||||
// TODO: allow specifying a minimum number of frames so that if currentFrame is too close to nextKeyFrame, it will calculate the next-next keyframe
|
||||
int nextKeyFrame = keyFrameInterval * Mathf.CeilToInt((float)(currentFrame + 1) / (float)keyFrameInterval);
|
||||
return Helper.ConvertFrameToTimeSeconds(nextKeyFrame, frameRate);
|
||||
}
|
||||
|
||||
public static System.DateTime ConvertSecondsSince1970ToDateTime(double secondsSince1970)
|
||||
{
|
||||
System.TimeSpan time = System.TimeSpan.FromSeconds(secondsSince1970);
|
||||
return new System.DateTime(1970, 1, 1).Add(time);
|
||||
}
|
||||
|
||||
#if (UNITY_EDITOR_WIN || (!UNITY_EDITOR && UNITY_STANDALONE_WIN))
|
||||
[System.Runtime.InteropServices.DllImport("kernel32.dll", CharSet = System.Runtime.InteropServices.CharSet.Unicode, EntryPoint = "GetShortPathNameW", SetLastError=true)]
|
||||
private static extern int GetShortPathName([System.Runtime.InteropServices.MarshalAs(System.Runtime.InteropServices.UnmanagedType.LPWStr)] string pathName,
|
||||
[System.Runtime.InteropServices.MarshalAs(System.Runtime.InteropServices.UnmanagedType.LPWStr)] System.Text.StringBuilder shortName,
|
||||
int cbShortName);
|
||||
|
||||
// Handle very long file paths by converting to DOS 8.3 format
|
||||
internal static string ConvertLongPathToShortDOS83Path(string path)
|
||||
{
|
||||
const string pathToken = @"\\?\";
|
||||
string result = pathToken + path.Replace("/","\\");
|
||||
int length = GetShortPathName(result, null, 0);
|
||||
if (length > 0)
|
||||
{
|
||||
System.Text.StringBuilder sb = new System.Text.StringBuilder(length);
|
||||
if (0 != GetShortPathName(result, sb, length))
|
||||
{
|
||||
result = sb.ToString().Replace(pathToken, "");
|
||||
Debug.LogWarning("[AVProVideo] Long path detected. Changing to DOS 8.3 format");
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
#endif
|
||||
|
||||
// Converts a non-readable texture to a readable Texture2D.
|
||||
// "targetTexture" can be null or you can pass in an existing texture.
|
||||
// Remember to Destroy() the returned texture after finished with it
|
||||
public static Texture2D GetReadableTexture(Texture inputTexture, bool requiresVerticalFlip, Orientation ori, Texture2D targetTexture = null)
|
||||
{
|
||||
Texture2D resultTexture = targetTexture;
|
||||
|
||||
RenderTexture prevRT = RenderTexture.active;
|
||||
|
||||
int textureWidth = inputTexture.width;
|
||||
int textureHeight = inputTexture.height;
|
||||
#if UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_IPHONE || UNITY_IOS || UNITY_TVOS
|
||||
if (ori == Orientation.Portrait || ori == Orientation.PortraitFlipped)
|
||||
{
|
||||
textureWidth = inputTexture.height;
|
||||
textureHeight = inputTexture.width;
|
||||
}
|
||||
#endif
|
||||
|
||||
// Blit the texture to a temporary RenderTexture
|
||||
// This handles any format conversion that is required and allows us to use ReadPixels to copy texture from RT to readable texture
|
||||
RenderTexture tempRT = RenderTexture.GetTemporary(textureWidth, textureHeight, 0, RenderTextureFormat.ARGB32);
|
||||
|
||||
if (ori == Orientation.Landscape)
|
||||
{
|
||||
if (!requiresVerticalFlip)
|
||||
{
|
||||
Graphics.Blit(inputTexture, tempRT);
|
||||
}
|
||||
else
|
||||
{
|
||||
// The above Blit can't flip unless using a material, so we use Graphics.DrawTexture instead
|
||||
GL.PushMatrix();
|
||||
RenderTexture.active = tempRT;
|
||||
GL.LoadPixelMatrix(0f, tempRT.width, 0f, tempRT.height);
|
||||
Rect sourceRect = new Rect(0f, 0f, 1f, 1f);
|
||||
// NOTE: not sure why we need to set y to -1, without this there is a 1px gap at the bottom
|
||||
Rect destRect = new Rect(0f, -1f, tempRT.width, tempRT.height);
|
||||
|
||||
Graphics.DrawTexture(destRect, inputTexture, sourceRect, 0, 0, 0, 0);
|
||||
GL.PopMatrix();
|
||||
GL.InvalidateState();
|
||||
}
|
||||
}
|
||||
#if UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_IPHONE || UNITY_IOS || UNITY_TVOS
|
||||
else
|
||||
{
|
||||
Matrix4x4 m = Matrix4x4.identity;
|
||||
switch (ori)
|
||||
{
|
||||
case Orientation.Portrait:
|
||||
m = Matrix4x4.TRS(new Vector3(0f, inputTexture.width, 0f), Quaternion.Euler(0f, 0f, -90f), Vector3.one);
|
||||
break;
|
||||
case Orientation.PortraitFlipped:
|
||||
m = Matrix4x4.TRS(new Vector3(inputTexture.height, 0f, 0f), Quaternion.Euler(0f, 0f, 90f), Vector3.one);
|
||||
break;
|
||||
case Orientation.LandscapeFlipped:
|
||||
m = Matrix4x4.TRS(new Vector3(inputTexture.width, inputTexture.height, 0f), Quaternion.identity, new Vector3(-1f, -1f, 1f));
|
||||
break;
|
||||
}
|
||||
|
||||
// The above Blit can't flip unless using a material, so we use Graphics.DrawTexture instead
|
||||
GL.InvalidateState();
|
||||
RenderTexture.active = tempRT;
|
||||
GL.Clear(false, true, Color.black);
|
||||
GL.PushMatrix();
|
||||
GL.LoadPixelMatrix(0f, tempRT.width, 0f, tempRT.height);
|
||||
Rect sourceRect = new Rect(0f, 0f, 1f, 1f);
|
||||
// NOTE: not sure why we need to set y to -1, without this there is a 1px gap at the bottom
|
||||
Rect destRect = new Rect(0f, -1f, inputTexture.width, inputTexture.height);
|
||||
GL.MultMatrix(m);
|
||||
|
||||
Graphics.DrawTexture(destRect, inputTexture, sourceRect, 0, 0, 0, 0);
|
||||
GL.PopMatrix();
|
||||
GL.InvalidateState();
|
||||
}
|
||||
#endif
|
||||
|
||||
if (resultTexture == null)
|
||||
{
|
||||
resultTexture = new Texture2D(textureWidth, textureHeight, TextureFormat.ARGB32, false);
|
||||
}
|
||||
|
||||
RenderTexture.active = tempRT;
|
||||
resultTexture.ReadPixels(new Rect(0f, 0f, textureWidth, textureHeight), 0, 0, false);
|
||||
resultTexture.Apply(false, false);
|
||||
RenderTexture.ReleaseTemporary(tempRT);
|
||||
|
||||
RenderTexture.active = prevRT;
|
||||
|
||||
return resultTexture;
|
||||
}
|
||||
|
||||
// Converts a non-readable texture to a readable Texture2D.
|
||||
// "targetTexture" can be null or you can pass in an existing texture.
|
||||
// Remember to Destroy() the returned texture after finished with it
|
||||
public static Texture2D GetReadableTexture(RenderTexture inputTexture, Texture2D targetTexture = null)
|
||||
{
|
||||
if (targetTexture == null)
|
||||
{
|
||||
targetTexture = new Texture2D(inputTexture.width, inputTexture.height, TextureFormat.ARGB32, false);
|
||||
}
|
||||
|
||||
RenderTexture prevRT = RenderTexture.active;
|
||||
RenderTexture.active = inputTexture;
|
||||
targetTexture.ReadPixels(new Rect(0f, 0f, inputTexture.width, inputTexture.height), 0, 0, false);
|
||||
targetTexture.Apply(false, false);
|
||||
RenderTexture.active = prevRT;
|
||||
|
||||
return targetTexture;
|
||||
}
|
||||
}
|
||||
}
|
||||
12
Assets/AVProVideo/Runtime/Scripts/Internal/Helper.cs.meta
Normal file
12
Assets/AVProVideo/Runtime/Scripts/Internal/Helper.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 79e446998599e1647804321292c80f42
|
||||
timeCreated: 1600887818
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
985
Assets/AVProVideo/Runtime/Scripts/Internal/Interfaces.cs
Normal file
985
Assets/AVProVideo/Runtime/Scripts/Internal/Interfaces.cs
Normal file
@@ -0,0 +1,985 @@
|
||||
using UnityEngine;
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2021 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public interface IMediaPlayer
|
||||
{
|
||||
void OnEnable();
|
||||
void Update();
|
||||
void EndUpdate();
|
||||
void BeginRender();
|
||||
void Render();
|
||||
IntPtr GetNativePlayerHandle();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for side loading of subtitles in SRT format
|
||||
/// </summary>
|
||||
public interface IMediaSubtitles
|
||||
{
|
||||
bool LoadSubtitlesSRT(string data);
|
||||
int GetSubtitleIndex();
|
||||
string GetSubtitleText();
|
||||
}
|
||||
|
||||
public interface IMediaControl
|
||||
{
|
||||
/// <summary>
|
||||
/// Be careful using this method directly. It is best to instead use the OpenMedia() method in the MediaPlayer component as this will set up the events correctly and also perform other checks
|
||||
/// customHttpHeaders is in the format "key1:value1\r\nkey2:value2\r\n"=
|
||||
/// </summary>
|
||||
bool OpenMedia(string path, long offset, string customHttpHeaders, MediaHints mediahints, int forceFileFormat = 0, bool startWithHighestBitrate = false);
|
||||
bool OpenMediaFromBuffer(byte[] buffer);
|
||||
bool StartOpenMediaFromBuffer(ulong length);
|
||||
bool AddChunkToMediaBuffer(byte[] chunk, ulong offset, ulong length);
|
||||
bool EndOpenMediaFromBuffer();
|
||||
|
||||
#if NETFX_CORE
|
||||
bool OpenMedia(Windows.Storage.Streams.IRandomAccessStream ras, string path, long offset, string customHttpHeaders);
|
||||
#endif
|
||||
|
||||
void CloseMedia();
|
||||
|
||||
void SetLooping(bool bLooping);
|
||||
bool IsLooping();
|
||||
|
||||
bool HasMetaData();
|
||||
bool CanPlay();
|
||||
bool IsPlaying();
|
||||
bool IsSeeking();
|
||||
bool IsPaused();
|
||||
bool IsFinished();
|
||||
bool IsBuffering();
|
||||
|
||||
void Play();
|
||||
void Pause();
|
||||
void Stop();
|
||||
void Rewind();
|
||||
|
||||
/// <summary>
|
||||
/// The time in seconds seeked will be to the exact time
|
||||
/// This can take a long time is the keyframes are far apart
|
||||
/// Some platforms don't support this and instead seek to the closest keyframe
|
||||
/// </summary>
|
||||
void Seek(double time);
|
||||
|
||||
/// <summary>
|
||||
/// The time in seconds seeked will be to the closest keyframe
|
||||
/// </summary>
|
||||
void SeekFast(double time);
|
||||
|
||||
/// <summary>
|
||||
/// The time in seconds seeked to will be within the range [time-timeDeltaBefore, time+timeDeltaAfter] for efficiency.
|
||||
/// Only supported on macOS, iOS and tvOS.
|
||||
/// Other platforms will automatically pass through to Seek()
|
||||
/// </summary>
|
||||
void SeekWithTolerance(double time, double timeDeltaBefore, double timeDeltaAfter);
|
||||
|
||||
/// <summary>
|
||||
/// Seek to a specific frame, range is [0, GetMaxFrameNumber()]
|
||||
/// NOTE: For best results the video should be encoded as keyframes only
|
||||
/// and have no audio track, or an audio track with the same length as the video track
|
||||
/// </summary>
|
||||
void SeekToFrame(int frame, float overrideFrameRate = 0f);
|
||||
|
||||
/// <summary>
|
||||
/// Seek forwards or backwards relative to the current frame
|
||||
/// NOTE: For best results the video should be encoded as keyframes only
|
||||
/// and have no audio track, or an audio track with the same length as the video track
|
||||
/// </summary>
|
||||
void SeekToFrameRelative(int frameOffset, float overrideFrameRate = 0f);
|
||||
|
||||
/// <summary>
|
||||
/// Returns the current video time in seconds
|
||||
/// </summary>
|
||||
double GetCurrentTime();
|
||||
|
||||
/// <summary>
|
||||
/// Returns the current video time in frames, range is [0, GetMaxFrameNumber()]
|
||||
/// NOTE: For best results the video should be encoded as keyframes only
|
||||
/// and have no audio track, or an audio track with the same length as the video track
|
||||
/// </summary>
|
||||
int GetCurrentTimeFrames(float overrideFrameRate = 0f);
|
||||
|
||||
/// <summary>
|
||||
/// Returns the current video date and time usually from the
|
||||
/// EXT-X-PROGRAM-DATE-TIME tag on HLS streams
|
||||
/// Only supported on macOS, iOS, tvOS and Android (using ExoPlayer API)
|
||||
/// And Windows 10 using WinRT API
|
||||
/// </summary>
|
||||
System.DateTime GetProgramDateTime();
|
||||
|
||||
float GetPlaybackRate();
|
||||
void SetPlaybackRate(float rate);
|
||||
|
||||
void MuteAudio(bool bMute);
|
||||
bool IsMuted();
|
||||
void SetVolume(float volume);
|
||||
void SetBalance(float balance);
|
||||
float GetVolume();
|
||||
float GetBalance();
|
||||
|
||||
/*int GetCurrentVideoTrack();
|
||||
void SetVideoTrack(int index);
|
||||
|
||||
int GetCurrentAudioTrack();
|
||||
void SetAudioTrack(int index);*/
|
||||
|
||||
/// <summary>
|
||||
/// Returns a range of time values that can be seeked in seconds
|
||||
/// </summary>
|
||||
TimeRanges GetSeekableTimes();
|
||||
|
||||
/// <summary>
|
||||
/// Returns a range of time values that contain fully downloaded segments,
|
||||
/// which can be seeked to immediately without requiring additional downloading
|
||||
/// </summary>
|
||||
TimeRanges GetBufferedTimes();
|
||||
|
||||
ErrorCode GetLastError();
|
||||
long GetLastExtendedErrorCode();
|
||||
|
||||
void SetTextureProperties(FilterMode filterMode = FilterMode.Bilinear, TextureWrapMode wrapMode = TextureWrapMode.Clamp, int anisoLevel = 1);
|
||||
void GetTextureProperties(out FilterMode filterMode, out TextureWrapMode wrapMode, out int anisoLevel);
|
||||
|
||||
// Audio Grabbing
|
||||
|
||||
/// <summary>
|
||||
/// Copies the specified amount of audio into the buffer
|
||||
/// If the specified amount is not yet available then nothing no samples are copied
|
||||
/// The number of audio samples grabbed are returned
|
||||
/// </summary>
|
||||
int GrabAudio(float[] buffer, int sampleCount, int channelCount);
|
||||
int GetAudioBufferedSampleCount();
|
||||
int GetAudioChannelCount();
|
||||
AudioChannelMaskFlags GetAudioChannelMask();
|
||||
|
||||
void AudioConfigurationChanged(bool deviceChanged);
|
||||
|
||||
// Audio 360
|
||||
|
||||
void SetAudioChannelMode(Audio360ChannelMode channelMode);
|
||||
void SetAudioHeadRotation(Quaternion q);
|
||||
void ResetAudioHeadRotation();
|
||||
void SetAudioFocusEnabled(bool enabled);
|
||||
void SetAudioFocusProperties(float offFocusLevel, float widthDegrees);
|
||||
void SetAudioFocusRotation(Quaternion q);
|
||||
void ResetAudioFocus();
|
||||
|
||||
bool WaitForNextFrame(Camera dummyCamera, int previousFrameCount);
|
||||
|
||||
[Obsolete("SetPlayWithoutBuffering has been deprecated, see platform specific options for how to enable playback without buffering (if supported).")]
|
||||
void SetPlayWithoutBuffering(bool playWithoutBuffering);
|
||||
|
||||
// Encrypted stream support
|
||||
//void SetKeyServerURL(string url);
|
||||
void SetKeyServerAuthToken(string token);
|
||||
void SetOverrideDecryptionKey(byte[] key);
|
||||
|
||||
// External playback support.
|
||||
|
||||
/// <summary>
|
||||
/// Check to see if external playback is currently active on the player.
|
||||
/// </summary>
|
||||
bool IsExternalPlaybackActive();
|
||||
|
||||
/// <summary>
|
||||
/// Set whether the player is allowed to switch to external playback, e.g. AirPlay.
|
||||
/// </summary>
|
||||
void SetAllowsExternalPlayback(bool enable);
|
||||
|
||||
/// <summary>
|
||||
/// Sets the video gravity of the player for external playback only.
|
||||
/// </summary>
|
||||
void SetExternalPlaybackVideoGravity(ExternalPlaybackVideoGravity gravity);
|
||||
}
|
||||
|
||||
public interface IMediaInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// Returns media duration in seconds
|
||||
/// </summary>
|
||||
double GetDuration();
|
||||
|
||||
/// <summary>
|
||||
/// Returns media duration in frames
|
||||
/// NOTE: For best results the video should be encoded as keyframes only
|
||||
/// and have no audio track, or an audio track with the same length as the video track
|
||||
/// </summary>
|
||||
int GetDurationFrames(float overrideFrameRate = 0f);
|
||||
|
||||
/// <summary>
|
||||
/// Returns highest frame number that can be seeked to
|
||||
/// NOTE: For best results the video should be encoded as keyframes only
|
||||
/// and have no audio track, or an audio track with the same length as the video track
|
||||
/// </summary>
|
||||
int GetMaxFrameNumber(float overrideFrameRate = 0f);
|
||||
|
||||
/// <summary>
|
||||
/// Returns video width in pixels
|
||||
/// </summary>
|
||||
int GetVideoWidth();
|
||||
|
||||
/// <summary>
|
||||
/// Returns video height in pixels
|
||||
/// </summary>
|
||||
int GetVideoHeight();
|
||||
|
||||
/// <summary>
|
||||
/// Returns the frame rate of the media.
|
||||
/// </summary>
|
||||
float GetVideoFrameRate();
|
||||
|
||||
/// <summary>
|
||||
/// Returns the current achieved display rate in frames per second
|
||||
/// </summary>
|
||||
float GetVideoDisplayRate();
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the media has a visual track
|
||||
/// </summary>
|
||||
bool HasVideo();
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the media has a audio track
|
||||
/// </summary>
|
||||
bool HasAudio();
|
||||
|
||||
/// <summary>
|
||||
/// Returns the a description of which playback path is used internally.
|
||||
/// This can for example expose whether CPU or GPU decoding is being performed
|
||||
/// For Windows the available player descriptions are:
|
||||
/// "DirectShow" - legacy Microsoft API but still very useful especially with modern filters such as LAV
|
||||
/// "MF-MediaEngine-Software" - uses the Windows 8.1 features of the Microsoft Media Foundation API, but software decoding
|
||||
/// "MF-MediaEngine-Hardware" - uses the Windows 8.1 features of the Microsoft Media Foundation API, but GPU decoding
|
||||
/// Android has "MediaPlayer" and "ExoPlayer"
|
||||
/// macOS / tvOS / iOS just has "AVFoundation"
|
||||
/// </summary>
|
||||
string GetPlayerDescription();
|
||||
|
||||
/// <summary>
|
||||
/// Whether this MediaPlayer instance supports linear color space
|
||||
/// If it doesn't then a correction may have to be made in the shader
|
||||
/// </summary>
|
||||
bool PlayerSupportsLinearColorSpace();
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the playback is in a stalled state
|
||||
/// </summary>
|
||||
bool IsPlaybackStalled();
|
||||
|
||||
/// <summary>
|
||||
/// The affine transform of the texture as an array of six floats: a, b, c, d, tx, ty.
|
||||
/// </summary>
|
||||
float[] GetTextureTransform();
|
||||
|
||||
/// <summary>
|
||||
/// Gets the estimated bandwidth used by all video players (in bits per second)
|
||||
/// Currently only supported on Android when using ExoPlayer API
|
||||
/// </summary>
|
||||
long GetEstimatedTotalBandwidthUsed();
|
||||
|
||||
/*
|
||||
string GetMediaDescription();
|
||||
string GetVideoDescription();
|
||||
string GetAudioDescription();*/
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the media is compatible with external playback, for instance via AirPlay.
|
||||
/// </summary>
|
||||
bool IsExternalPlaybackSupported();
|
||||
|
||||
// Internal method
|
||||
bool GetDecoderPerformance(ref int activeDecodeThreadCount, ref int decodedFrameCount, ref int droppedFrameCount);
|
||||
|
||||
// Internal method
|
||||
PlaybackQualityStats GetPlaybackQualityStats();
|
||||
}
|
||||
|
||||
#region MediaCaching
|
||||
|
||||
/// <summary>Options for configuring media caching.</summary>
|
||||
public class MediaCachingOptions
|
||||
{
|
||||
/// <summary>The minimum bitrate of the media to cache in bits per second.</summary>
|
||||
public double minimumRequiredBitRate;
|
||||
|
||||
/// <summary>The minimum resolution of the media to cache.</summary>
|
||||
/// <remark>Only supported on Android and iOS 14 and later.</remark>
|
||||
public Vector2 minimumRequiredResolution;
|
||||
|
||||
/// <summary>The maximum bitrate of the media to cache in bits per second.</summary>
|
||||
/// <remark>Only supported on Android.</remark>
|
||||
public double maximumRequiredBitRate;
|
||||
|
||||
/// <summary>The maximum resolution of the media to cache.</summary>
|
||||
/// <remark>Only supported on Android.</remark>
|
||||
public Vector2 maximumRequiredResolution;
|
||||
|
||||
/// <summary>Human readable title for the cached media.</summary>
|
||||
/// <remark>iOS: This value will be displayed in the usage pane of the settings app.</remark>
|
||||
public string title;
|
||||
|
||||
/// <summary>Optional artwork for the cached media in PNG format.</summary>
|
||||
/// <remark>iOS: This value will be displayed in the usage pane of the settings app.</remark>
|
||||
public byte[] artwork;
|
||||
}
|
||||
|
||||
/// <summary>Status of the media item in the cache.</summary>
|
||||
public enum CachedMediaStatus: int
|
||||
{
|
||||
/// <summary>The media has not been cached.</summary>
|
||||
NotCached,
|
||||
/// <summary>The media is being cached.</summary>
|
||||
Caching,
|
||||
/// <summary>The media is cached.</summary>
|
||||
Cached,
|
||||
/// <summary>The media is not cached, something went wrong - check the log.</summary>
|
||||
Failed,
|
||||
/// <summary>The media caching is paused.</summary>
|
||||
Paused
|
||||
}
|
||||
|
||||
/// <summary>Interface for the media cache.</summary>
|
||||
public interface IMediaCache
|
||||
{
|
||||
/// <summary>Test to see if the player can cache media.</summary>
|
||||
/// <returns>True if media caching is supported.</returns>
|
||||
bool IsMediaCachingSupported();
|
||||
|
||||
/// <summary>Cache the media specified by url.</summary>
|
||||
/// <param name="url">The url of the media.</param>
|
||||
/// <param name="headers"></param>
|
||||
/// <param name="options"></param>
|
||||
void AddMediaToCache(string url, string headers = null, MediaCachingOptions options = null);
|
||||
|
||||
/// <summary>Cancels the download of the media specified by url.</summary>
|
||||
/// <param name="url">The url of the media.</param>
|
||||
void CancelDownloadOfMediaToCache(string url);
|
||||
|
||||
/// <summary>Pause the download of the media specified by url.</summary>
|
||||
/// <param name="url">The url of the media.</param>
|
||||
void PauseDownloadOfMediaToCache(string url);
|
||||
|
||||
/// <summary>Resume the download of the media specified by url.</summary>
|
||||
/// <param name="url">The url of the media.</param>
|
||||
void ResumeDownloadOfMediaToCache(string url);
|
||||
|
||||
/// <summary>Remove the cached media specified by url.</summary>
|
||||
/// <param name="url">The url of the media.</param>
|
||||
void RemoveMediaFromCache(string url);
|
||||
|
||||
/// <summary>Get the cached status for the media specified.</summary>
|
||||
/// <param name="url">The url of the media.</param>
|
||||
/// <param name="progress">The amount of the media that has been cached in the range [0...1].</param>
|
||||
/// <returns>The status of the media.</returns>
|
||||
CachedMediaStatus GetCachedMediaStatus(string url, ref float progress);
|
||||
|
||||
// /// <summary>Test if the currently open media is cached.</summary>
|
||||
// /// <returns>True if the media is cached, false otherwise.</returns>
|
||||
// bool IsMediaCached();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
public interface ITextureProducer
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the number of textures produced by the media player.
|
||||
/// </summary>
|
||||
int GetTextureCount();
|
||||
|
||||
/// <summary>
|
||||
/// Returns the Unity texture containing the current frame image.
|
||||
/// The texture pointer will return null while the video is loading
|
||||
/// This texture usually remains the same for the duration of the video.
|
||||
/// There are cases when this texture can change, for instance: if the graphics device is recreated,
|
||||
/// a new video is loaded, or if an adaptive stream (eg HLS) is used and it switches video streams.
|
||||
/// </summary>
|
||||
Texture GetTexture(int index = 0);
|
||||
|
||||
/// <summary>
|
||||
/// Returns a count of how many times the texture has been updated
|
||||
/// </summary>
|
||||
int GetTextureFrameCount();
|
||||
|
||||
/// <summary>
|
||||
/// Returns whether this platform supports counting the number of times the texture has been updated
|
||||
/// </summary>
|
||||
bool SupportsTextureFrameCount();
|
||||
|
||||
/// <summary>
|
||||
/// Returns the presentation time stamp of the current texture
|
||||
/// </summary>
|
||||
long GetTextureTimeStamp();
|
||||
|
||||
/// <summary>
|
||||
/// Returns the DAR/SAR ratio
|
||||
/// </summary>
|
||||
float GetTexturePixelAspectRatio();
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the image on the texture is upside-down
|
||||
/// </summary>
|
||||
bool RequiresVerticalFlip();
|
||||
|
||||
/// <summary>
|
||||
/// Returns the type of packing used for stereo content
|
||||
/// </summary>
|
||||
StereoPacking GetTextureStereoPacking();
|
||||
|
||||
/// <summary>
|
||||
/// Returns the whether the texture has transparency
|
||||
/// </summary>
|
||||
TransparencyMode GetTextureTransparency();
|
||||
|
||||
/// <summary>
|
||||
/// Returns the type of packing used for alpha content
|
||||
/// </summary>
|
||||
AlphaPacking GetTextureAlphaPacking();
|
||||
|
||||
/// <summary>
|
||||
/// Returns the current transformation required to convert from YpCbCr to RGB colorspaces.
|
||||
/// </summary>
|
||||
Matrix4x4 GetYpCbCrTransform();
|
||||
|
||||
/// <summary>
|
||||
/// The affine transform of the texture as an array of six floats: [a, b, c, d, tx, ty].
|
||||
/// </summary>
|
||||
float[] GetAffineTransform();
|
||||
|
||||
/// <summary>
|
||||
/// The full 4x4 transform of the texture
|
||||
/// </summary>
|
||||
Matrix4x4 GetTextureMatrix();
|
||||
|
||||
/// <summary>
|
||||
/// Get a render texture format that is compatible with the textures internal format
|
||||
/// </summary>
|
||||
/// <param name="options">Any options that may change the choice of render texture format, defaults to None</param>
|
||||
/// <param name="plane">Index of the plane to get compatible render texture format for, defaults to the first plane</param>
|
||||
/// <returns>A compatible render texture format</returns>
|
||||
RenderTextureFormat GetCompatibleRenderTextureFormat(GetCompatibleRenderTextureFormatOptions options = GetCompatibleRenderTextureFormatOptions.Default, int plane = 0);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for passing into GetCompatibleRenderTextureFormat
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum GetCompatibleRenderTextureFormatOptions
|
||||
{
|
||||
/// <summary>No options, default behaviour based on the texture's format</summary>
|
||||
Default = 0,
|
||||
/// <summary>The format is for a final resolve, i.e. converting from YCbCr to RGBA</summary>
|
||||
ForResolve = 1 << 0,
|
||||
/// <summary>The format requires an alpha channel</summary>
|
||||
RequiresAlpha = 1 << 1,
|
||||
}
|
||||
|
||||
public enum Platform
|
||||
{
|
||||
Windows,
|
||||
macOS,
|
||||
iOS,
|
||||
tvOS,
|
||||
visionOS,
|
||||
Android,
|
||||
WindowsUWP,
|
||||
WebGL,
|
||||
OpenHarmony,
|
||||
Count = 8,
|
||||
Unknown = 100,
|
||||
}
|
||||
|
||||
public enum MediaSource
|
||||
{
|
||||
Reference,
|
||||
Path
|
||||
}
|
||||
|
||||
public enum MediaPathType
|
||||
{
|
||||
AbsolutePathOrURL,
|
||||
RelativeToProjectFolder,
|
||||
RelativeToStreamingAssetsFolder,
|
||||
RelativeToDataFolder,
|
||||
RelativeToPersistentDataFolder,
|
||||
}
|
||||
|
||||
[System.Serializable]
|
||||
public class MediaPath
|
||||
{
|
||||
[SerializeField] MediaPathType _pathType = MediaPathType.RelativeToStreamingAssetsFolder;
|
||||
public MediaPathType PathType { get { return _pathType; } internal set { _pathType = value; } }
|
||||
|
||||
[SerializeField] string _path = string.Empty;
|
||||
public string Path { get { return _path; } internal set { _path = value; } }
|
||||
|
||||
public MediaPath()
|
||||
{
|
||||
_pathType = MediaPathType.RelativeToStreamingAssetsFolder;
|
||||
_path = string.Empty;
|
||||
}
|
||||
public MediaPath(MediaPath copy)
|
||||
{
|
||||
_pathType = copy.PathType;
|
||||
_path = copy.Path;
|
||||
}
|
||||
public MediaPath(string path, MediaPathType pathType)
|
||||
{
|
||||
_pathType = pathType;
|
||||
_path = path;
|
||||
}
|
||||
|
||||
public string GetResolvedFullPath()
|
||||
{
|
||||
string result = Helper.GetFilePath(_path, _pathType);
|
||||
|
||||
#if (UNITY_EDITOR_WIN || (!UNITY_EDITOR && UNITY_STANDALONE_WIN))
|
||||
if (result.Length > 200 && !result.Contains("://"))
|
||||
{
|
||||
result = Helper.ConvertLongPathToShortDOS83Path(result);
|
||||
}
|
||||
#endif
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static implicit operator MediaPath(string s)
|
||||
{
|
||||
return new MediaPath(s, MediaPathType.AbsolutePathOrURL);
|
||||
}
|
||||
|
||||
public static bool operator == (MediaPath a, MediaPath b)
|
||||
{
|
||||
if ((object)a == null)
|
||||
return (object)b == null;
|
||||
|
||||
return a.Equals(b);
|
||||
}
|
||||
public static bool operator != (MediaPath a, MediaPath b)
|
||||
{
|
||||
return !(a == b);
|
||||
}
|
||||
|
||||
public override bool Equals(object obj)
|
||||
{
|
||||
if (obj == null || GetType() != obj.GetType())
|
||||
return false;
|
||||
|
||||
var a = (MediaPath)obj;
|
||||
return (_pathType == a._pathType && _path == a._path);
|
||||
}
|
||||
|
||||
public override int GetHashCode()
|
||||
{
|
||||
return _pathType.GetHashCode() ^ _path.GetHashCode();
|
||||
}
|
||||
}
|
||||
|
||||
public enum OverrideMode
|
||||
{
|
||||
None, // No overide, just use internal logic
|
||||
Override, // Manually override
|
||||
}
|
||||
|
||||
public enum TextureGamma
|
||||
{
|
||||
SRGB,
|
||||
Linear,
|
||||
|
||||
// Future HDR support
|
||||
// PQ,
|
||||
// HLG,
|
||||
}
|
||||
|
||||
public enum StereoPacking : int
|
||||
{
|
||||
Unknown = -1,
|
||||
Monoscopic, // Monoscopic
|
||||
TopBottom, // Top is the left eye, bottom is the right eye
|
||||
LeftRight, // Left is the left eye, right is the right eye
|
||||
CustomUV, // Use the mesh UV to unpack, uv0=left eye, uv1=right eye
|
||||
RightLeft, // Left side is the right eye, right side is the left eye
|
||||
MultiviewLeftPrimary, // First texture left eye, second texture is right eye
|
||||
MultiviewRightPrimary, // First texture right eye, second texture is left eye
|
||||
|
||||
[Obsolete]
|
||||
None = Monoscopic,
|
||||
|
||||
[Obsolete]
|
||||
TwoTextures = MultiviewLeftPrimary
|
||||
}
|
||||
|
||||
[System.Serializable]
|
||||
public struct MediaHints
|
||||
{
|
||||
public TransparencyMode transparency;
|
||||
public AlphaPacking alphaPacking;
|
||||
public StereoPacking stereoPacking;
|
||||
|
||||
private static MediaHints defaultHints = new MediaHints();
|
||||
public static MediaHints Default { get { return defaultHints; } }
|
||||
}
|
||||
|
||||
[System.Serializable]
|
||||
public struct VideoResolveOptions
|
||||
{
|
||||
public enum AspectRatio
|
||||
{
|
||||
NoScaling,
|
||||
FitVertically,
|
||||
FitHorizontally,
|
||||
FitInside,
|
||||
FitOutside,
|
||||
Stretch
|
||||
}
|
||||
|
||||
[SerializeField] public bool applyHSBC;
|
||||
[SerializeField, Range(0f, 1f)] public float hue;
|
||||
[SerializeField, Range(0f, 1f)] public float saturation;
|
||||
[SerializeField, Range(0f, 1f)] public float brightness;
|
||||
[SerializeField, Range(0f, 1f)] public float contrast;
|
||||
[SerializeField, Range(0.0001f, 10f)] public float gamma;
|
||||
[SerializeField] public Color tint;
|
||||
[SerializeField] public bool generateMipmaps;
|
||||
[SerializeField] public AspectRatio aspectRatio;
|
||||
|
||||
public bool IsColourAdjust()
|
||||
{
|
||||
return (applyHSBC && (hue != 0.0f || saturation != 0.5f || brightness != 0.5f || contrast != 0.5f || gamma != 1.0f));
|
||||
}
|
||||
|
||||
internal void ResetColourAdjust()
|
||||
{
|
||||
hue = 0.0f;
|
||||
saturation = 0.5f;
|
||||
brightness = 0.5f;
|
||||
contrast = 0.5f;
|
||||
gamma = 1.0f;
|
||||
}
|
||||
|
||||
public static VideoResolveOptions Create()
|
||||
{
|
||||
VideoResolveOptions result = new VideoResolveOptions()
|
||||
{
|
||||
tint = Color.white,
|
||||
aspectRatio = AspectRatio.Stretch,
|
||||
};
|
||||
result.ResetColourAdjust();
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/// Transparency Mode
|
||||
public enum TransparencyMode
|
||||
{
|
||||
Opaque,
|
||||
Transparent,
|
||||
}
|
||||
|
||||
public enum StereoEye
|
||||
{
|
||||
Both,
|
||||
Left,
|
||||
Right,
|
||||
}
|
||||
|
||||
public enum AlphaPacking
|
||||
{
|
||||
None,
|
||||
TopBottom,
|
||||
LeftRight,
|
||||
}
|
||||
|
||||
public enum ErrorCode
|
||||
{
|
||||
None = 0,
|
||||
LoadFailed = 100,
|
||||
DecodeFailed = 200,
|
||||
}
|
||||
|
||||
public enum Orientation
|
||||
{
|
||||
Landscape, // Landscape Right (0 degrees)
|
||||
LandscapeFlipped, // Landscape Left (180 degrees)
|
||||
Portrait, // Portrait Up (90 degrees)
|
||||
PortraitFlipped, // Portrait Down (-90 degrees)
|
||||
PortraitHorizontalMirror, // Portrait that is mirrored horizontally
|
||||
}
|
||||
|
||||
public enum VideoMapping
|
||||
{
|
||||
Unknown,
|
||||
Normal,
|
||||
EquiRectangular360,
|
||||
EquiRectangular180,
|
||||
CubeMap3x2,
|
||||
}
|
||||
|
||||
public enum FileFormat
|
||||
{
|
||||
Unknown,
|
||||
HLS,
|
||||
DASH,
|
||||
SmoothStreaming,
|
||||
}
|
||||
|
||||
public static class Windows
|
||||
{
|
||||
public enum VideoApi
|
||||
{
|
||||
MediaFoundation, // Windows 8.1 and above
|
||||
DirectShow, // Legacy API
|
||||
WinRT, // Windows 10 and above
|
||||
};
|
||||
|
||||
public enum AudioOutput
|
||||
{
|
||||
System, // Default
|
||||
Unity, // Media Foundation API only
|
||||
FacebookAudio360, // Media Foundation API only
|
||||
None, // Media Foundation API only
|
||||
}
|
||||
|
||||
// WIP: Experimental feature to allow overriding audio device for VR headsets
|
||||
public const string AudioDeviceOutputName_Vive = "HTC VIVE USB Audio";
|
||||
public const string AudioDeviceOutputName_Rift = "Headphones (Rift Audio)";
|
||||
}
|
||||
|
||||
public static class WindowsUWP
|
||||
{
|
||||
public enum VideoApi
|
||||
{
|
||||
MediaFoundation, // UWP 8.1 and above
|
||||
WinRT, // UWP 10 and above
|
||||
};
|
||||
|
||||
public enum AudioOutput
|
||||
{
|
||||
System, // Default
|
||||
Unity, // Media Foundation API only
|
||||
FacebookAudio360, // Media Foundation API only
|
||||
None, // Media Foundation API only
|
||||
}
|
||||
}
|
||||
|
||||
public static class Android
|
||||
{
|
||||
public enum VideoApi
|
||||
{
|
||||
MediaPlayer = 1,
|
||||
ExoPlayer,
|
||||
}
|
||||
|
||||
public enum VideoOutputMode
|
||||
{
|
||||
Texture,
|
||||
#if AVPRO_VIDEO_XR_COMPOSITION_LAYERS
|
||||
XRCompositionLayer
|
||||
#endif
|
||||
}
|
||||
|
||||
public enum AudioOutput
|
||||
{
|
||||
System, // Default
|
||||
Unity, // ExoPlayer API only
|
||||
FacebookAudio360, // ExoPlayer API only
|
||||
}
|
||||
|
||||
public enum TextureFiltering
|
||||
{
|
||||
Point,
|
||||
Bilinear,
|
||||
Trilinear,
|
||||
}
|
||||
|
||||
public const int Default_MinBufferTimeMs = 10000; // [MOZ] lowered as seeing OOM issues on 4k videos
|
||||
public const int Default_MaxBufferTimeMs = 50000; // [MOZ] taken from DefaultLoadControl.DEFAULT_MAX_BUFFER_MS
|
||||
public const int Default_BufferForPlaybackMs = 1000; // [MOZ] taken from DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_MS
|
||||
public const int Default_BufferForPlaybackAfterRebufferMs = 2000; // [MOZ] takan from DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS
|
||||
}
|
||||
|
||||
public static class WebGL
|
||||
{
|
||||
public enum ExternalLibrary
|
||||
{
|
||||
None,
|
||||
DashJs,
|
||||
HlsJs,
|
||||
Custom,
|
||||
}
|
||||
}
|
||||
|
||||
// Facebook Audio 360 channel mapping
|
||||
public enum Audio360ChannelMode
|
||||
{
|
||||
TBE_8_2 = 0, /// 8 channels of hybrid TBE ambisonics and 2 channels of head-locked stereo audio
|
||||
TBE_8, /// 8 channels of hybrid TBE ambisonics. NO head-locked stereo audio
|
||||
TBE_6_2, /// 6 channels of hybrid TBE ambisonics and 2 channels of head-locked stereo audio
|
||||
TBE_6, /// 6 channels of hybrid TBE ambisonics. NO head-locked stereo audio
|
||||
TBE_4_2, /// 4 channels of hybrid TBE ambisonics and 2 channels of head-locked stereo audio
|
||||
TBE_4, /// 4 channels of hybrid TBE ambisonics. NO head-locked stereo audio
|
||||
TBE_8_PAIR0, /// Channels 1 and 2 of TBE hybrid ambisonics
|
||||
TBE_8_PAIR1, /// Channels 3 and 4 of TBE hybrid ambisonics
|
||||
TBE_8_PAIR2, /// Channels 5 and 6 of TBE hybrid ambisonics
|
||||
TBE_8_PAIR3, /// Channels 7 and 8 of TBE hybrid ambisonics
|
||||
TBE_CHANNEL0, /// Channels 1 of TBE hybrid ambisonics
|
||||
TBE_CHANNEL1, /// Channels 2 of TBE hybrid ambisonics
|
||||
TBE_CHANNEL2, /// Channels 3 of TBE hybrid ambisonics
|
||||
TBE_CHANNEL3, /// Channels 4 of TBE hybrid ambisonics
|
||||
TBE_CHANNEL4, /// Channels 5 of TBE hybrid ambisonics
|
||||
TBE_CHANNEL5, /// Channels 6 of TBE hybrid ambisonics
|
||||
TBE_CHANNEL6, /// Channels 7 of TBE hybrid ambisonics
|
||||
TBE_CHANNEL7, /// Channels 8 of TBE hybrid ambisonics
|
||||
HEADLOCKED_STEREO, /// Head-locked stereo audio
|
||||
HEADLOCKED_CHANNEL0, /// Channels 1 or left of head-locked stereo audio
|
||||
HEADLOCKED_CHANNEL1, /// Channels 2 or right of head-locked stereo audio
|
||||
AMBIX_4, /// 4 channels of first order ambiX
|
||||
AMBIX_4_2, /// 4 channels of first order ambiX with 2 channels of head-locked audio
|
||||
AMBIX_9, /// 9 channels of second order ambiX
|
||||
AMBIX_9_2, /// 9 channels of second order ambiX with 2 channels of head-locked audio
|
||||
AMBIX_16, /// 16 channels of third order ambiX
|
||||
AMBIX_16_2, /// 16 channels of third order ambiX with 2 channels of head-locked audio
|
||||
MONO, /// Mono audio
|
||||
STEREO, /// Stereo audio
|
||||
UNKNOWN, /// Unknown channel map
|
||||
INVALID, /// Invalid/unknown map. This must always be last.
|
||||
}
|
||||
|
||||
[System.Flags]
|
||||
public enum AudioChannelMaskFlags : int
|
||||
{
|
||||
Unspecified = 0x0,
|
||||
FrontLeft = 0x1,
|
||||
FrontRight = 0x2,
|
||||
FrontCenter = 0x4,
|
||||
LowFrequency = 0x8,
|
||||
BackLeft = 0x10,
|
||||
BackRight = 0x20,
|
||||
FrontLeftOfCenter = 0x40,
|
||||
FrontRightOfCenter = 0x80,
|
||||
BackCenter = 0x100,
|
||||
SideLeft = 0x200,
|
||||
SideRight = 0x400,
|
||||
TopCenter = 0x800,
|
||||
TopFrontLeft = 0x1000,
|
||||
TopFrontCenter = 0x2000,
|
||||
TopFrontRight = 0x4000,
|
||||
TopBackLeft = 0x8000,
|
||||
TopBackCenter = 0x10000,
|
||||
TopBackRight = 0x20000,
|
||||
}
|
||||
|
||||
public enum TextureFlags : int
|
||||
{
|
||||
Unknown = 0,
|
||||
TopDown = 1 << 0,
|
||||
SamplingIsLinear = 1 << 1,
|
||||
}
|
||||
|
||||
[System.Runtime.InteropServices.StructLayout(System.Runtime.InteropServices.LayoutKind.Sequential, Pack = 1)]
|
||||
public struct TextureFrame
|
||||
{
|
||||
internal System.IntPtr texturePointer;
|
||||
internal System.IntPtr auxTexturePointer;
|
||||
internal System.Int64 timeStamp;
|
||||
internal System.UInt32 frameCounter;
|
||||
internal System.UInt32 writtenFrameCount;
|
||||
internal TextureFlags flags;
|
||||
internal System.IntPtr internalNativePointer;
|
||||
}
|
||||
|
||||
[System.Runtime.InteropServices.StructLayout(System.Runtime.InteropServices.LayoutKind.Sequential, Pack = 1)]
|
||||
public struct TimeRange
|
||||
{
|
||||
public TimeRange(double startTime, double duration)
|
||||
{
|
||||
this.startTime = startTime;
|
||||
this.duration = duration;
|
||||
}
|
||||
|
||||
public double startTime, duration;
|
||||
|
||||
public double StartTime { get { return startTime; } }
|
||||
public double EndTime { get { return startTime + duration; } }
|
||||
public double Duration { get { return duration; } }
|
||||
}
|
||||
|
||||
public class TimeRanges : IEnumerable
|
||||
{
|
||||
internal TimeRanges() {}
|
||||
|
||||
public IEnumerator GetEnumerator()
|
||||
{
|
||||
return _ranges.GetEnumerator();
|
||||
}
|
||||
|
||||
public TimeRange this[int index]
|
||||
{
|
||||
get
|
||||
{
|
||||
return _ranges[index];
|
||||
}
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
return $"TimeRanges: {{ MinTime: {MinTime}, MaxTime: {MaxTime}, Duration: {Duration}, Count: {Count} }}";
|
||||
}
|
||||
|
||||
internal TimeRanges(TimeRange[] ranges)
|
||||
{
|
||||
_ranges = ranges;
|
||||
CalculateRange();
|
||||
}
|
||||
|
||||
internal void CalculateRange()
|
||||
{
|
||||
_minTime = _maxTime = 0.0;
|
||||
if (_ranges != null && _ranges.Length > 0)
|
||||
{
|
||||
double maxTime = 0.0;
|
||||
double minTime = double.MaxValue;
|
||||
for (int i = 0; i < _ranges.Length; i++)
|
||||
{
|
||||
minTime = System.Math.Min(minTime, _ranges[i].startTime);
|
||||
maxTime = System.Math.Max(maxTime, _ranges[i].startTime + _ranges[i].duration);
|
||||
}
|
||||
_minTime = minTime;
|
||||
_maxTime = maxTime;
|
||||
}
|
||||
}
|
||||
|
||||
public int Count { get { return _ranges.Length; } }
|
||||
public double MinTime { get { return _minTime; } }
|
||||
public double MaxTime { get { return _maxTime; } }
|
||||
public double Duration { get { return (_maxTime - _minTime); } }
|
||||
|
||||
internal TimeRange[] _ranges = new TimeRange[0];
|
||||
internal double _minTime = 0.0;
|
||||
internal double _maxTime = 0.0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Video gravity to use with external playback.
|
||||
/// </summary>
|
||||
public enum ExternalPlaybackVideoGravity
|
||||
{
|
||||
/// <summary>Resizes the video to fit the display, may cause stretching.</summary>
|
||||
Resize,
|
||||
/// <summary>Resizes the video whilst preserving the video's aspect ratio to fit the display bounds.</summary>
|
||||
ResizeAspect,
|
||||
/// <summary>Resizes the video whilst preserving aspect to fill the display bounds.</summary>
|
||||
ResizeAspectFill,
|
||||
};
|
||||
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 00407cbf3ca503142903894431082ac6
|
||||
timeCreated: 1438695622
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,227 @@
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2021 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Attempts to give insight into video playback presentation smoothness quality
|
||||
/// Keeps track of skipped and duplicated frames and warns about suboptimal setup
|
||||
/// such as no vsync enabled or video frame rate not being a multiple of the display frame rate
|
||||
/// </summary>
|
||||
public class PlaybackQualityStats
|
||||
{
|
||||
public int SkippedFrames { get; private set; }
|
||||
public int DuplicateFrames { get; private set; }
|
||||
public int UnityDroppedFrames { get; private set; }
|
||||
public float PerfectFramesT { get; private set; }
|
||||
public string VSyncStatus { get; private set; }
|
||||
private int PerfectFrames { get; set; }
|
||||
private int TotalFrames { get; set; }
|
||||
|
||||
public bool LogIssues { get; set; }
|
||||
|
||||
private int _sameFrameCount;
|
||||
private long _lastTimeStamp;
|
||||
private BaseMediaPlayer _player;
|
||||
|
||||
public void Reset()
|
||||
{
|
||||
_sameFrameCount = 0;
|
||||
if (_player != null)
|
||||
{
|
||||
_lastTimeStamp = _player.GetTextureTimeStamp();
|
||||
}
|
||||
|
||||
SkippedFrames = 0;
|
||||
DuplicateFrames = 0;
|
||||
UnityDroppedFrames = 0;
|
||||
TotalFrames = 0;
|
||||
PerfectFrames = 0;
|
||||
PerfectFramesT = 0f;
|
||||
}
|
||||
|
||||
internal void Start(BaseMediaPlayer player)
|
||||
{
|
||||
_player = player;
|
||||
Reset();
|
||||
|
||||
bool vsyncEnabled = true;
|
||||
if (QualitySettings.vSyncCount == 0)
|
||||
{
|
||||
vsyncEnabled = false;
|
||||
if (LogIssues)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo][Quality] VSync is currently disabled in Quality Settings");
|
||||
}
|
||||
}
|
||||
if (!IsGameViewVSyncEnabled())
|
||||
{
|
||||
vsyncEnabled = false;
|
||||
if (LogIssues)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo][Quality] VSync is currently disabled in the Game View");
|
||||
}
|
||||
}
|
||||
|
||||
float frameRate = _player.GetVideoFrameRate();
|
||||
float frameMs = (1000f / frameRate);
|
||||
if (LogIssues)
|
||||
{
|
||||
Debug.Log(string.Format("[AVProVideo][Quality] Video: {0}fps {1}ms", frameRate, frameMs));
|
||||
}
|
||||
|
||||
if (vsyncEnabled)
|
||||
{
|
||||
#if UNITY_2022_2_OR_NEWER
|
||||
float refreshRate = (float)( Screen.currentResolution.refreshRateRatio.value );
|
||||
#else
|
||||
float refreshRate = (float)( Screen.currentResolution.refreshRate );
|
||||
#endif
|
||||
|
||||
float vsyncRate = refreshRate / QualitySettings.vSyncCount;
|
||||
float vsyncMs = (1000f / vsyncRate);
|
||||
|
||||
if (LogIssues)
|
||||
{
|
||||
Debug.Log(string.Format("[AVProVideo][Quality] VSync: {0}fps {1}ms", vsyncRate, vsyncMs));
|
||||
}
|
||||
|
||||
float framesPerVSync = frameMs / vsyncMs;
|
||||
float fractionalframesPerVsync = framesPerVSync - Mathf.FloorToInt(framesPerVSync);
|
||||
if (fractionalframesPerVsync > 0.0001f && LogIssues)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo][Quality] Video is not a multiple of VSync so playback cannot be perfect");
|
||||
}
|
||||
VSyncStatus = "VSync " + framesPerVSync;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (LogIssues)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo][Quality] Running without VSync enabled");
|
||||
}
|
||||
VSyncStatus = "No VSync";
|
||||
}
|
||||
}
|
||||
|
||||
internal void Update()
|
||||
{
|
||||
if (_player == null) return;
|
||||
|
||||
// Don't analyse stats unless real playback is happening
|
||||
if (_player.IsPaused() || _player.IsSeeking() || _player.IsFinished()) return;
|
||||
|
||||
long timeStamp = _player.GetTextureTimeStamp();
|
||||
long frameDuration = (long)(Helper.SecondsToHNS / _player.GetVideoFrameRate());
|
||||
|
||||
bool isPerfectFrame = true;
|
||||
|
||||
// Check for skipped frames
|
||||
long d = (timeStamp - _lastTimeStamp);
|
||||
if (d > 0)
|
||||
{
|
||||
const long threshold = 10000;
|
||||
d -= frameDuration;
|
||||
if (d > threshold)
|
||||
{
|
||||
int skippedFrames = Mathf.FloorToInt((float)d / (float)frameDuration);
|
||||
if (LogIssues)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo][Quality] Possible frame skip, at " + timeStamp + " delta " + d + " = " + skippedFrames + " frames");
|
||||
}
|
||||
SkippedFrames += skippedFrames;
|
||||
isPerfectFrame = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (QualitySettings.vSyncCount != 0)
|
||||
{
|
||||
#if UNITY_2022_2_OR_NEWER
|
||||
float refreshRate = (float)( Screen.currentResolution.refreshRateRatio.value );
|
||||
#else
|
||||
float refreshRate = (float)( Screen.currentResolution.refreshRate );
|
||||
#endif
|
||||
|
||||
long vsyncDuration = (long)((QualitySettings.vSyncCount * Helper.SecondsToHNS) / refreshRate);
|
||||
if (timeStamp != _lastTimeStamp)
|
||||
{
|
||||
float framesPerVSync = (float)frameDuration / (float)vsyncDuration;
|
||||
//Debug.Log((float)frameDuration + " " + (float)vsyncDuration);
|
||||
float fractionalFramesPerVSync = framesPerVSync - Mathf.FloorToInt(framesPerVSync);
|
||||
|
||||
//Debug.Log(framesPerVSync + " " + fractionalFramesPerVSync);
|
||||
// VSync rate is a multiple of the video rate so we should be able to get perfectly smooth playback
|
||||
if (fractionalFramesPerVSync <= 0.0001f)
|
||||
{
|
||||
// Check for duplicate frames
|
||||
if (!Mathf.Approximately(_sameFrameCount, (int)framesPerVSync))
|
||||
{
|
||||
if (LogIssues)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo][Quality] Frame " + timeStamp + " was shown for " + _sameFrameCount + " frames instead of expected " + framesPerVSync);
|
||||
}
|
||||
DuplicateFrames++;
|
||||
isPerfectFrame = false;
|
||||
}
|
||||
}
|
||||
|
||||
_sameFrameCount = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Count the number of Unity-frames the video-frame is displayed for
|
||||
_sameFrameCount++;
|
||||
}
|
||||
|
||||
// Check for Unity dropping frames
|
||||
{
|
||||
long frameTime = (long)(Time.deltaTime * Helper.SecondsToHNS);
|
||||
if (frameTime > (vsyncDuration + (vsyncDuration / 3)))
|
||||
{
|
||||
if (LogIssues)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo][Quality] Possible Unity dropped frame, delta time: " + (Time.deltaTime * 1000f) + "ms");
|
||||
}
|
||||
UnityDroppedFrames++;
|
||||
isPerfectFrame = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_lastTimeStamp != timeStamp)
|
||||
{
|
||||
if (isPerfectFrame)
|
||||
{
|
||||
PerfectFrames++;
|
||||
}
|
||||
TotalFrames++;
|
||||
PerfectFramesT = (float)PerfectFrames / (float)TotalFrames;
|
||||
}
|
||||
|
||||
_lastTimeStamp = timeStamp;
|
||||
}
|
||||
|
||||
private static bool IsGameViewVSyncEnabled()
|
||||
{
|
||||
bool result = true;
|
||||
#if UNITY_EDITOR && UNITY_2019_1_OR_NEWER
|
||||
if (Application.isBatchMode)
|
||||
return false;
|
||||
System.Reflection.Assembly assembly = typeof(UnityEditor.EditorWindow).Assembly;
|
||||
System.Type type = assembly.GetType("UnityEditor.GameView");
|
||||
UnityEditor.EditorWindow window = UnityEditor.EditorWindow.GetWindow(type);
|
||||
System.Reflection.PropertyInfo prop = type.GetProperty("vSyncEnabled");
|
||||
if (prop != null)
|
||||
{
|
||||
result = (bool)prop.GetValue(window);
|
||||
}
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 4f344d823db9c4148af4dec2235f690d
|
||||
timeCreated: 1634119397
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
7
Assets/AVProVideo/Runtime/Scripts/Internal/Players.meta
Normal file
7
Assets/AVProVideo/Runtime/Scripts/Internal/Players.meta
Normal file
@@ -0,0 +1,7 @@
|
||||
fileFormatVersion: 2
|
||||
guid: e2f8dd08c4c77654282b755fd4a069c1
|
||||
folderAsset: yes
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,347 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2021 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// This media player fakes video playback for platforms that aren't supported
|
||||
/// </summary>
|
||||
public sealed partial class NullMediaPlayer : BaseMediaPlayer
|
||||
{
|
||||
private bool _isPlaying = false;
|
||||
private bool _isPaused = false;
|
||||
private double _currentTime = 0.0;
|
||||
// private bool _audioMuted = false;
|
||||
private float _volume = 0.0f;
|
||||
private float _playbackRate = 1.0f;
|
||||
private bool _bLoop;
|
||||
|
||||
private int _Width = 256;
|
||||
private int _height = 256;
|
||||
private Texture2D _texture;
|
||||
private Texture2D _texture_AVPro;
|
||||
private Texture2D _texture_AVPro1;
|
||||
private float _fakeFlipTime;
|
||||
private int _frameCount;
|
||||
|
||||
private const float FrameRate = 10f;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string GetVersion()
|
||||
{
|
||||
return "0.0.0";
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string GetExpectedVersion()
|
||||
{
|
||||
return GetVersion();
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool OpenMedia(string path, long offset, string httpHeader, MediaHints mediaHints, int forceFileFormat = 0, bool startWithHighestBitrate = false)
|
||||
{
|
||||
_texture_AVPro = (Texture2D)Resources.Load("Textures/AVProVideo-NullPlayer-Frame0");
|
||||
_texture_AVPro1 = (Texture2D)Resources.Load("Textures/AVProVideo-NullPlayer-Frame1");
|
||||
|
||||
if( _texture_AVPro )
|
||||
{
|
||||
_Width = _texture_AVPro.width;
|
||||
_height = _texture_AVPro.height;
|
||||
}
|
||||
|
||||
_texture = _texture_AVPro;
|
||||
|
||||
_fakeFlipTime = 0.0f;
|
||||
_frameCount = 0;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void CloseMedia()
|
||||
{
|
||||
_frameCount = 0;
|
||||
Resources.UnloadAsset(_texture_AVPro);
|
||||
Resources.UnloadAsset(_texture_AVPro1);
|
||||
|
||||
base.CloseMedia();
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void SetLooping( bool bLooping )
|
||||
{
|
||||
_bLoop = bLooping;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool IsLooping()
|
||||
{
|
||||
return _bLoop;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool HasMetaData()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool CanPlay()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool HasAudio()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool HasVideo()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Play()
|
||||
{
|
||||
_isPlaying = true;
|
||||
_isPaused = false;
|
||||
_fakeFlipTime = 0.0f;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Pause()
|
||||
{
|
||||
_isPlaying = false;
|
||||
_isPaused = true;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Stop()
|
||||
{
|
||||
_isPlaying = false;
|
||||
_isPaused = false;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool IsSeeking()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
/// <inheritdoc/>
|
||||
public override bool IsPlaying()
|
||||
{
|
||||
return _isPlaying;
|
||||
}
|
||||
/// <inheritdoc/>
|
||||
public override bool IsPaused()
|
||||
{
|
||||
return _isPaused;
|
||||
}
|
||||
/// <inheritdoc/>
|
||||
public override bool IsFinished()
|
||||
{
|
||||
return _isPlaying && (_currentTime >= GetDuration());
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool IsBuffering()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override double GetDuration()
|
||||
{
|
||||
return 10.0;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override int GetVideoWidth()
|
||||
{
|
||||
return _Width;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override int GetVideoHeight()
|
||||
{
|
||||
return _height;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override float GetVideoDisplayRate()
|
||||
{
|
||||
return FrameRate;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override Texture GetTexture( int index )
|
||||
{
|
||||
// return _texture ? _texture : Texture2D.whiteTexture;
|
||||
return _texture;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override int GetTextureFrameCount()
|
||||
{
|
||||
return _frameCount;
|
||||
}
|
||||
|
||||
internal override StereoPacking InternalGetTextureStereoPacking()
|
||||
{
|
||||
return StereoPacking.Unknown;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool RequiresVerticalFlip()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Seek(double time)
|
||||
{
|
||||
_currentTime = time;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void SeekFast(double time)
|
||||
{
|
||||
_currentTime = time;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override double GetCurrentTime()
|
||||
{
|
||||
return _currentTime;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void SetPlaybackRate(float rate)
|
||||
{
|
||||
_playbackRate = rate;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override float GetPlaybackRate()
|
||||
{
|
||||
return _playbackRate;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void MuteAudio(bool bMuted)
|
||||
{
|
||||
// _audioMuted = bMuted;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override bool IsMuted()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void SetVolume(float volume)
|
||||
{
|
||||
_volume = volume;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override float GetVolume()
|
||||
{
|
||||
return _volume;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override float GetVideoFrameRate()
|
||||
{
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Update()
|
||||
{
|
||||
UpdateSubtitles();
|
||||
|
||||
if (_isPlaying)
|
||||
{
|
||||
_currentTime += Time.deltaTime;
|
||||
if (_currentTime >= GetDuration())
|
||||
{
|
||||
_currentTime = GetDuration();
|
||||
if( _bLoop )
|
||||
{
|
||||
Rewind();
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
|
||||
_fakeFlipTime += Time.deltaTime;
|
||||
if( _fakeFlipTime >= (1.0 / FrameRate))
|
||||
{
|
||||
_fakeFlipTime = 0.0f;
|
||||
_texture = ( _texture == _texture_AVPro ) ? _texture_AVPro1 : _texture_AVPro;
|
||||
_frameCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Render()
|
||||
{
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
public sealed partial class NullMediaPlayer : BaseMediaPlayer
|
||||
{
|
||||
internal override bool InternalSetActiveTrack(TrackType trackType, int trackUid)
|
||||
{
|
||||
// Set the active text track using the unique identifier
|
||||
// Or disable all text tracks if < 0
|
||||
return false;
|
||||
}
|
||||
|
||||
internal override bool InternalIsChangedTracks(TrackType trackType)
|
||||
{
|
||||
// Has the tracks changed since the last frame 'tick'
|
||||
return false;
|
||||
}
|
||||
|
||||
internal override int InternalGetTrackCount(TrackType trackType)
|
||||
{
|
||||
// Return number of text tracks
|
||||
return 0;
|
||||
}
|
||||
|
||||
internal override TrackBase InternalGetTrackInfo(TrackType trackType, int index, ref bool isActiveTrack)
|
||||
{
|
||||
// Get information about the specific track at index, range is [0...InternalGetTextTrackCount)
|
||||
return null;
|
||||
}
|
||||
|
||||
internal override bool InternalIsChangedTextCue()
|
||||
{
|
||||
// Has the text cue changed since the last frame 'tick'
|
||||
return false;
|
||||
}
|
||||
|
||||
internal override string InternalGetCurrentTextCue()
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 478671181ab1c9b42be924da77d7fcbe
|
||||
timeCreated: 1438703159
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,775 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2025 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#if UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX
|
||||
#define AVPRO_VIDEO_PLATFORMMEDIAPLAYER_MACOS
|
||||
#endif
|
||||
|
||||
#if !UNITY_EDITOR && (UNITY_IOS || UNITY_TVOS || UNITY_VISIONOS || UNITY_ANDROID || UNITY_OPENHARMONY)
|
||||
#define AVPRO_VIDEO_PLATFORMMEDIAPLAYER_MOBILE
|
||||
#endif
|
||||
|
||||
#if AVPRO_VIDEO_PLATFORMMEDIAPLAYER_MOBILE && UNITY_IOS
|
||||
#define AVPRO_VIDEO_PLATFORMMEDIAPLAYER_IOS
|
||||
#endif
|
||||
|
||||
#if AVPRO_VIDEO_PLATFORMMEDIAPLAYER_MOBILE && UNITY_TVOS
|
||||
#define AVPRO_VIDEO_PLATFORMMEDIAPLAYER_TVOS
|
||||
#endif
|
||||
|
||||
#if AVPRO_VIDEO_PLATFORMMEDIAPLAYER_MOBILE && UNITY_VISIONOS
|
||||
#define AVPRO_VIDEO_PLATFORMMEDIAPLAYER_VISIONOS
|
||||
#endif
|
||||
|
||||
#if AVPRO_VIDEO_PLATFORMMEDIAPLAYER_MOBILE && UNITY_ANDROID
|
||||
#define AVPRO_VIDEO_PLATFORMMEDIAPLAYER_ANDROID
|
||||
#endif
|
||||
|
||||
#if AVPRO_VIDEO_PLATFORMMEDIAPLAYER_MOBILE && UNITY_OPENHARMONY
|
||||
#define AVPRO_VIDEO_PLATFORMMEDIAPLAYER_OPENHARMONY
|
||||
#endif
|
||||
|
||||
#if AVPRO_VIDEO_PLATFORMMEDIAPLAYER_IOS || AVPRO_VIDEO_PLATFORMMEDIAPLAYER_TVOS || AVPRO_VIDEO_PLATFORMMEDIAPLAYER_VISIONOS
|
||||
#define AVPRO_VIDEO_PLATFORMMEDIAPLAYER_IPHONE
|
||||
#endif
|
||||
|
||||
#if AVPRO_VIDEO_PLATFORMMEDIAPLAYER_MACOS || AVPRO_VIDEO_PLATFORMMEDIAPLAYER_IPHONE
|
||||
#define AVPRO_VIDEO_PLATFORMMEDIAPLAYER_APPLE
|
||||
#endif
|
||||
|
||||
#if UNITY_2017_2_OR_NEWER && (AVPRO_VIDEO_PLATFORMMEDIAPLAYER_MACOS || AVPRO_VIDEO_PLATFORMMEDIAPLAYER_MOBILE)
|
||||
#define AVPRO_VIDEO_PLATFORMMEDIAPLAYER_SUPPORTED
|
||||
#endif
|
||||
|
||||
#if AVPRO_VIDEO_PLATFORMMEDIAPLAYER_SUPPORTED
|
||||
|
||||
using System;
|
||||
using System.Runtime.InteropServices;
|
||||
using UnityEngine;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public sealed partial class PlatformMediaPlayer
|
||||
{
|
||||
internal partial struct Native
|
||||
{
|
||||
#if AVPRO_VIDEO_PLATFORMMEDIAPLAYER_MACOS
|
||||
private const string PluginName = "AVProVideo";
|
||||
#elif AVPRO_VIDEO_PLATFORMMEDIAPLAYER_IPHONE
|
||||
private const string PluginName = "__Internal";
|
||||
#elif AVPRO_VIDEO_PLATFORMMEDIAPLAYER_ANDROID
|
||||
private const string PluginName = "AVProVideo2Native";
|
||||
#elif AVPRO_VIDEO_PLATFORMMEDIAPLAYER_OPENHARMONY
|
||||
private const string PluginName = "avprovideolib";
|
||||
#endif
|
||||
internal const int kAVPPlayerRenderEventId = 0x5d5ac000;
|
||||
internal const int kAVPPlayerRenderEventMask = 0x7ffff000;
|
||||
internal const int kAVPPlayerRenderEventTypeMask = 0x00000f00;
|
||||
internal const int kAVPPlayerRenderEventTypeShift = 8;
|
||||
internal const int kAVPPlayerRenderEventDataPlayerIDMask = 0xffff;
|
||||
internal const int kAVPPlayerRenderEventDataPlayerIDShift = 0;
|
||||
internal const int kAVPPlayerRenderEventDataOptionsMask = 0xff;
|
||||
internal const int kAVPPlayerRenderEventDataOptionsShift = 16;
|
||||
|
||||
internal enum AVPPluginRenderEvent: int
|
||||
{
|
||||
None,
|
||||
PlayerSetup,
|
||||
PlayerRender,
|
||||
PlayerFreeResources,
|
||||
}
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerRenderEventPlayerSetupFlags: int
|
||||
{
|
||||
AndroidUseOESFastPath = 1 << 0,
|
||||
LinearColourSpace = 1 << 1,
|
||||
GenerateMipmaps = 1 << 2,
|
||||
#if AVPRO_VIDEO_XR_COMPOSITION_LAYERS
|
||||
XRCompositionLayer = 1 << 3,
|
||||
#endif
|
||||
}
|
||||
|
||||
// Video settings
|
||||
|
||||
internal enum AVPPlayerVideoAPI: int
|
||||
{
|
||||
// Apple - just included for completeness
|
||||
AVFoundation,
|
||||
|
||||
// Android - Matches Android.VideoApi
|
||||
MediaPlayer = Android.VideoApi.MediaPlayer,
|
||||
ExoPlayer = Android.VideoApi.ExoPlayer,
|
||||
}
|
||||
|
||||
internal enum AVPPlayerVideoOutputMode: int
|
||||
{
|
||||
// Default output mode, to a texture
|
||||
Texture,
|
||||
|
||||
// Android - XR composition layer
|
||||
#if AVPRO_VIDEO_XR_COMPOSITION_LAYERS
|
||||
XRCompositionLayer = Android.VideoOutputMode.XRCompositionLayer
|
||||
#endif
|
||||
}
|
||||
|
||||
internal enum AVPPlayerVideoPixelFormat: int
|
||||
{
|
||||
Invalid,
|
||||
Bgra,
|
||||
YCbCr420
|
||||
}
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerFeatureFlags: int
|
||||
{
|
||||
Caching = 1 << 0,
|
||||
}
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerVideoOutputSettingsFlags: int
|
||||
{
|
||||
None = 0,
|
||||
LinearColorSpace = 1 << 0,
|
||||
GenerateMipmaps = 1 << 1,
|
||||
PreferSoftwareDecoder = 1 << 2,
|
||||
ForceEnableMediaCodecAsynchronousQueueing = 1 << 3,
|
||||
AllowUnsupportedVideoTrackVariants = 1 << 4,
|
||||
}
|
||||
|
||||
// Audio settings
|
||||
|
||||
internal enum AVPPlayerAudioOutputMode : int
|
||||
{
|
||||
SystemDirect,
|
||||
Unity,
|
||||
SystemDirectWithCapture,
|
||||
FacebookAudio360,
|
||||
}
|
||||
|
||||
// Network settings
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerNetworkSettingsFlags : int
|
||||
{
|
||||
None = 0,
|
||||
PlayWithoutBuffering = 1 << 0,
|
||||
UseSinglePlayerItem = 1 << 1,
|
||||
ForceStartHighestBitrate = 1 << 2,
|
||||
ForceRtpTCP = 1 << 3,
|
||||
PrioritizeTimeOverSize = 1 << 4,
|
||||
}
|
||||
|
||||
// NOTE: The layout of this structure is important - if adding anything put it at the end, make sure alignment is 4 bytes and DO NOT USE bool
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerSettings
|
||||
{
|
||||
// Video
|
||||
internal AVPPlayerVideoAPI videoApi;
|
||||
internal AVPPlayerVideoPixelFormat pixelFormat;
|
||||
internal AVPPlayerVideoOutputSettingsFlags videoFlags;
|
||||
internal float preferredMaximumResolution_width;
|
||||
internal float preferredMaximumResolution_height;
|
||||
internal float maximumPlaybackRate;
|
||||
|
||||
// Audio
|
||||
internal AVPPlayerAudioOutputMode audioOutputMode;
|
||||
internal int sampleRate;
|
||||
internal int bufferLength;
|
||||
internal int audioFlags;
|
||||
internal Audio360ChannelMode audio360Channels;
|
||||
internal int audio360LatencyMS;
|
||||
|
||||
// Network
|
||||
internal double preferredPeakBitRate;
|
||||
internal double preferredForwardBufferDuration;
|
||||
internal AVPPlayerNetworkSettingsFlags networkFlags;
|
||||
internal int minBufferMs;
|
||||
internal int maxBufferMs;
|
||||
internal int bufferForPlaybackMs;
|
||||
internal int bufferForPlaybackAfterRebufferMs;
|
||||
}
|
||||
|
||||
internal enum AVPPlayerOpenOptionsForceFileFormat: int
|
||||
{
|
||||
Unknown,
|
||||
HLS,
|
||||
DASH,
|
||||
SmoothStreaming
|
||||
};
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerOpenOptionsFlags: int
|
||||
{
|
||||
None = 0,
|
||||
};
|
||||
|
||||
// NOTE: The layout of this structure is important - if adding anything put it at the end, make sure alignment is 4 bytes and DO NOT USE bool
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerOpenOptions
|
||||
{
|
||||
internal long fileOffset;
|
||||
internal AVPPlayerOpenOptionsForceFileFormat forceFileFormat;
|
||||
internal AVPPlayerOpenOptionsFlags flags;
|
||||
};
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerStatus : int
|
||||
{
|
||||
Unknown = 0,
|
||||
ReadyToPlay = 1 << 0,
|
||||
Playing = 1 << 1,
|
||||
Paused = 1 << 2,
|
||||
Finished = 1 << 3,
|
||||
Seeking = 1 << 4,
|
||||
Buffering = 1 << 5,
|
||||
Stalled = 1 << 6,
|
||||
ExternalPlaybackActive = 1 << 7,
|
||||
Cached = 1 << 8,
|
||||
FinishedSeeking = 1 << 9,
|
||||
|
||||
UpdatedAssetInfo = 1 << 16,
|
||||
UpdatedTexture = 1 << 17,
|
||||
UpdatedBufferedTimeRanges = 1 << 18,
|
||||
UpdatedSeekableTimeRanges = 1 << 19,
|
||||
UpdatedText = 1 << 20,
|
||||
UpdatedTextureTransform = 1 << 21,
|
||||
UpdatedTimedMetadata = 1 << 22,
|
||||
|
||||
HasVideo = 1 << 24,
|
||||
HasAudio = 1 << 25,
|
||||
HasText = 1 << 26,
|
||||
HasMetadata = 1 << 27,
|
||||
HasVariants = 1 << 28,
|
||||
|
||||
Failed = 1 << 31
|
||||
}
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerFlags : int
|
||||
{
|
||||
None = 0,
|
||||
Looping = 1 << 0,
|
||||
Muted = 1 << 1,
|
||||
AllowExternalPlayback = 1 << 2,
|
||||
ResumePlayback = 1 << 16, // iOS only, resumes playback after audio session route change
|
||||
Dirty = 1 << 31
|
||||
}
|
||||
|
||||
internal enum AVPPlayerExternalPlaybackVideoGravity : int
|
||||
{
|
||||
Resize,
|
||||
ResizeAspect,
|
||||
ResizeAspectFill
|
||||
};
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerSize
|
||||
{
|
||||
internal float width;
|
||||
internal float height;
|
||||
public static readonly AVPPlayerSize Zero = new ()
|
||||
{
|
||||
width = 0.0f,
|
||||
height = 0.0f
|
||||
};
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPAffineTransform
|
||||
{
|
||||
internal float a;
|
||||
internal float b;
|
||||
internal float c;
|
||||
internal float d;
|
||||
internal float tx;
|
||||
internal float ty;
|
||||
|
||||
public static readonly AVPAffineTransform Identity = new()
|
||||
{
|
||||
a = 1.0f,
|
||||
b = 0.0f,
|
||||
c = 0.0f,
|
||||
d = 1.0f,
|
||||
tx = 0.0f,
|
||||
ty = 0.0f
|
||||
};
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
return $"{{ {a}, {b}, {c}, {d}, {tx}, {ty} }}";
|
||||
}
|
||||
}
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerAssetFlags : int
|
||||
{
|
||||
None = 0,
|
||||
CompatibleWithAirPlay = 1 << 0,
|
||||
};
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerAssetInfo
|
||||
{
|
||||
internal double duration;
|
||||
internal AVPPlayerSize dimensions;
|
||||
internal float frameRate;
|
||||
internal int videoTrackCount;
|
||||
internal int audioTrackCount;
|
||||
internal int textTrackCount;
|
||||
internal int variantCount;
|
||||
internal AVPPlayerAssetFlags flags;
|
||||
}
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerTrackFlags: int
|
||||
{
|
||||
Default = 1 << 0,
|
||||
}
|
||||
|
||||
internal enum AVPPlayerVideoTrackStereoMode: int
|
||||
{
|
||||
Unknown = -1,
|
||||
Monoscopic,
|
||||
StereoscopicTopBottom,
|
||||
StereoscopicLeftRight,
|
||||
StereoscopicCustom,
|
||||
StereoscopicRightLeft,
|
||||
StereoscopicMultiviewLeftPrimary,
|
||||
StereoscopicMultiviewRightPrimary,
|
||||
}
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerVideoTrackFlags: int
|
||||
{
|
||||
HasAlpha = 1 << 0,
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerVideoTrackInfo
|
||||
{
|
||||
[MarshalAs(UnmanagedType.LPWStr)] internal string name;
|
||||
[MarshalAs(UnmanagedType.LPWStr)] internal string language;
|
||||
internal int trackId;
|
||||
internal float estimatedDataRate;
|
||||
internal uint codecSubtype;
|
||||
internal AVPPlayerTrackFlags flags;
|
||||
|
||||
internal AVPPlayerSize dimensions;
|
||||
internal float frameRate;
|
||||
internal AVPAffineTransform transform;
|
||||
internal AVPPlayerVideoTrackStereoMode stereoMode;
|
||||
internal int bitsPerComponent;
|
||||
internal AVPPlayerVideoTrackFlags videoTrackFlags;
|
||||
|
||||
internal Matrix4x4 yCbCrTransform;
|
||||
|
||||
public static readonly AVPPlayerVideoTrackInfo Default = new()
|
||||
{
|
||||
name = null,
|
||||
language = null,
|
||||
trackId = -1,
|
||||
estimatedDataRate = 0,
|
||||
codecSubtype = 0,
|
||||
flags = 0,
|
||||
dimensions = AVPPlayerSize.Zero,
|
||||
frameRate = 0.0f,
|
||||
transform = AVPAffineTransform.Identity,
|
||||
stereoMode = AVPPlayerVideoTrackStereoMode.Unknown,
|
||||
bitsPerComponent = 0,
|
||||
videoTrackFlags = 0,
|
||||
yCbCrTransform = Matrix4x4.identity
|
||||
};
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerAudioTrackInfo
|
||||
{
|
||||
[MarshalAs(UnmanagedType.LPWStr)] internal string name;
|
||||
[MarshalAs(UnmanagedType.LPWStr)] internal string language;
|
||||
internal int trackId;
|
||||
internal float estimatedDataRate;
|
||||
internal uint codecSubtype;
|
||||
internal AVPPlayerTrackFlags flags;
|
||||
|
||||
internal double sampleRate;
|
||||
internal uint channelCount;
|
||||
internal uint channelLayoutTag;
|
||||
internal AudioChannelMaskFlags channelBitmap;
|
||||
|
||||
public static readonly AVPPlayerAudioTrackInfo Default = new()
|
||||
{
|
||||
name = null,
|
||||
language = null,
|
||||
trackId = -1,
|
||||
estimatedDataRate = 0,
|
||||
codecSubtype = 0,
|
||||
flags = 0,
|
||||
sampleRate = 0.0,
|
||||
channelCount = 0,
|
||||
channelLayoutTag = 0,
|
||||
channelBitmap = AudioChannelMaskFlags.Unspecified
|
||||
};
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerTextTrackInfo
|
||||
{
|
||||
[MarshalAs(UnmanagedType.LPWStr)] internal string name;
|
||||
[MarshalAs(UnmanagedType.LPWStr)] internal string language;
|
||||
internal int trackId;
|
||||
internal float estimatedDataRate;
|
||||
internal uint codecSubtype;
|
||||
internal AVPPlayerTrackFlags flags;
|
||||
|
||||
public static readonly AVPPlayerTextTrackInfo Default = new()
|
||||
{
|
||||
name = null,
|
||||
language = null,
|
||||
trackId = -1,
|
||||
estimatedDataRate = 0,
|
||||
codecSubtype = 0,
|
||||
flags = 0
|
||||
};
|
||||
}
|
||||
|
||||
internal enum AVPPlayerVideoRange : int
|
||||
{
|
||||
SDR,
|
||||
HLG,
|
||||
PQ
|
||||
}
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerVariantFlags: int
|
||||
{
|
||||
Default = 1 << 0,
|
||||
Unsupported = 1 << 1,
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerVariantInfo
|
||||
{
|
||||
// Video
|
||||
internal int averageDataRate;
|
||||
internal int peakDataRate;
|
||||
internal CodecType videoCodecType;
|
||||
internal float frameRate;
|
||||
internal AVPPlayerSize dimensions;
|
||||
internal AVPPlayerVideoRange videoRange;
|
||||
|
||||
// Audio
|
||||
internal CodecType audioCodecType;
|
||||
|
||||
// Flags
|
||||
internal AVPPlayerVariantFlags flags;
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerTimeRange
|
||||
{
|
||||
internal double start;
|
||||
internal double duration;
|
||||
};
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerState
|
||||
{
|
||||
internal AVPPlayerStatus status;
|
||||
internal double currentTime;
|
||||
internal double currentDate;
|
||||
internal int selectedVideoTrack;
|
||||
internal int selectedAudioTrack;
|
||||
internal int selectedTextTrack;
|
||||
internal int bufferedTimeRangesCount;
|
||||
internal int seekableTimeRangesCount;
|
||||
internal int audioCaptureBufferedSamplesCount;
|
||||
internal int selectedVariant;
|
||||
}
|
||||
|
||||
internal enum AVPPlayerTextureFormat: int
|
||||
{
|
||||
Unknown,
|
||||
BGRA8,
|
||||
R8,
|
||||
RG8,
|
||||
BC1,
|
||||
BC3,
|
||||
BC4,
|
||||
BC5,
|
||||
BC7,
|
||||
BGR10A2,
|
||||
R16,
|
||||
RG16,
|
||||
BGR10XR,
|
||||
RGBA16Float,
|
||||
AndroidOES,
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerTexturePlane
|
||||
{
|
||||
internal IntPtr plane;
|
||||
internal int width;
|
||||
internal int height;
|
||||
internal AVPPlayerTextureFormat textureFormat;
|
||||
}
|
||||
|
||||
[Flags]
|
||||
internal enum AVPPlayerTextureFlags: int
|
||||
{
|
||||
None = 0,
|
||||
Flipped = 1 << 0,
|
||||
Linear = 1 << 1,
|
||||
Mipmapped = 1 << 2,
|
||||
YCbCr = 1 << 3,
|
||||
}
|
||||
|
||||
internal enum AVPPlayerTextureYCbCrMatrix: int
|
||||
{
|
||||
Identity,
|
||||
ITU_R_601,
|
||||
ITU_R_709,
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerTexture
|
||||
{
|
||||
[MarshalAs(UnmanagedType.ByValArray, SizeConst=4)]
|
||||
internal AVPPlayerTexturePlane[] planes;
|
||||
internal long itemTime;
|
||||
internal int frameCounter;
|
||||
internal int planeCount;
|
||||
internal AVPPlayerTextureFlags flags;
|
||||
internal AVPPlayerTextureYCbCrMatrix YCbCrMatrix;
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerText
|
||||
{
|
||||
internal IntPtr buffer;
|
||||
internal long itemTime;
|
||||
internal int length;
|
||||
internal int sequence;
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
internal struct AVPPlayerTimedMetadata
|
||||
{
|
||||
internal IntPtr buffer;
|
||||
internal long itemTime;
|
||||
internal int length;
|
||||
}
|
||||
|
||||
internal enum AVPPlayerTrackType : int
|
||||
{
|
||||
Video,
|
||||
Audio,
|
||||
Text
|
||||
}
|
||||
|
||||
internal static string GetPluginVersion()
|
||||
{
|
||||
return System.Runtime.InteropServices.Marshal.PtrToStringAnsi(AVPPluginGetVersionStringPointer());
|
||||
}
|
||||
|
||||
#if AVPRO_VIDEO_PLATFORMMEDIAPLAYER_IPHONE
|
||||
#if UNITY_2022_1_OR_NEWER
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPUnityRegisterPlugin(IntPtr fn);
|
||||
|
||||
delegate void UnityRegisterPluginDelegate(IntPtr loadFn, IntPtr unloadFn);
|
||||
|
||||
#if UNITY_6000_0_OR_NEWER
|
||||
private const string UnityRegisterPluginEntryPoint = "UnityRegisterPlugin";
|
||||
#else
|
||||
private const string UnityRegisterPluginEntryPoint = "UnityRegisterRenderingPluginV5";
|
||||
#endif
|
||||
|
||||
[DllImport(PluginName, EntryPoint = UnityRegisterPluginEntryPoint)]
|
||||
[AOT.MonoPInvokeCallback(typeof(UnityRegisterPluginDelegate))]
|
||||
internal static extern void UnityRegisterPlugin(IntPtr loadFn, IntPtr unloadFn);
|
||||
|
||||
internal static void AVPPluginBootstrap()
|
||||
{
|
||||
UnityRegisterPluginDelegate unityRegisterPluginDelegate = UnityRegisterPlugin;
|
||||
IntPtr pFn = Marshal.GetFunctionPointerForDelegate(unityRegisterPluginDelegate);
|
||||
AVPUnityRegisterPlugin(pFn);
|
||||
}
|
||||
#else
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPluginBootstrap();
|
||||
#endif
|
||||
#elif AVPRO_VIDEO_PLATFORMMEDIAPLAYER_ANDROID
|
||||
internal static void AVPPluginBootstrap()
|
||||
{
|
||||
AndroidJavaClass activityClass = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
|
||||
if (activityClass != null)
|
||||
{
|
||||
AndroidJavaObject activityContext = activityClass.GetStatic<AndroidJavaObject>("currentActivity");
|
||||
if (activityContext != null)
|
||||
{
|
||||
AndroidJavaObject avProVideoManager = new AndroidJavaObject("com.renderheads.AVPro.Video.Manager");
|
||||
if (avProVideoManager != null)
|
||||
{
|
||||
avProVideoManager.CallStatic("SetContext", activityContext);
|
||||
}
|
||||
}
|
||||
}
|
||||
// TODO: Handle failure?
|
||||
}
|
||||
#elif AVPRO_VIDEO_PLATFORMMEDIAPLAYER_OPENHARMONY
|
||||
internal static void AVPPluginBootstrap()
|
||||
{
|
||||
Debug.Log("UNITY_OPENHARMONY: Calling Bootstrap");
|
||||
OpenHarmonyJSClass openHarmonyJSClass = new OpenHarmonyJSClass("Manager");
|
||||
openHarmonyJSClass.CallStatic( "Bootstrap" );
|
||||
}
|
||||
#endif
|
||||
|
||||
[DllImport(PluginName)]
|
||||
private static extern IntPtr AVPPluginGetVersionStringPointer();
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern IntPtr AVPPluginGetRenderEventFunction();
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern IntPtr AVPPluginMakePlayer(AVPPlayerSettings settings);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerRelease(IntPtr player);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern AVPPlayerFeatureFlags AVPPlayerGetSupportedFeatures(IntPtr player);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerUpdate(IntPtr _player);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerGetState(IntPtr player, ref AVPPlayerState state);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetFlags(IntPtr player, int flags);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerGetAssetInfo(IntPtr player, ref AVPPlayerAssetInfo info);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerGetVideoTrackInfo(IntPtr player, int index, ref AVPPlayerVideoTrackInfo info);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerGetAudioTrackInfo(IntPtr player, int index, ref AVPPlayerAudioTrackInfo info);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerGetTextTrackInfo(IntPtr player, int index, ref AVPPlayerTextTrackInfo info);
|
||||
|
||||
[DllImport( PluginName )]
|
||||
internal static extern void AVPPlayerGetVariantInfo(IntPtr player, int index, ref AVPPlayerVariantInfo info);
|
||||
|
||||
[DllImport( PluginName )]
|
||||
internal static extern void AVPPlayerSelectVariant(IntPtr player, int index);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerGetBufferedTimeRanges(IntPtr player, AVPPlayerTimeRange[] ranges, int count);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerGetSeekableTimeRanges(IntPtr player, AVPPlayerTimeRange[] ranges, int count);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerGetTexture(IntPtr player, ref AVPPlayerTexture texture);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerGetText(IntPtr player, ref AVPPlayerText text);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerGetTimedMetadata(IntPtr player, ref AVPPlayerTimedMetadata timedMetadata);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetPlayerSettings(IntPtr player, AVPPlayerSettings settings);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
[return: MarshalAs(UnmanagedType.U1)]
|
||||
internal static extern bool AVPPlayerOpenURL(IntPtr player, string url, string headers, AVPPlayerOpenOptions options);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerClose(IntPtr player);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern int AVPPlayerGetAudio(IntPtr player, float[] buffer, int length);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetAudioHeadRotation(IntPtr _player, float[] rotation);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetPositionTrackingEnabled(IntPtr _player, bool enabled);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetAudioFocusEnabled(IntPtr _player, bool enabled);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetAudioFocusProperties(IntPtr _player, float offFocusLevel, float widthDegrees);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetAudioFocusRotation(IntPtr _player, float[] rotation);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerResetAudioFocus(IntPtr _player);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetRate(IntPtr player, float rate);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetVolume(IntPtr player, float volume);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetExternalPlaybackVideoGravity(IntPtr player, AVPPlayerExternalPlaybackVideoGravity gravity);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSeek(IntPtr player, double toTime, double toleranceBefore, double toleranceAfter);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetKeyServerAuthToken(IntPtr player, string token);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetKeyServerURL(IntPtr player, string url);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
internal static extern void AVPPlayerSetDecryptionKey(IntPtr player, byte[] key, int length);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
internal static extern bool AVPPlayerSetTrack(IntPtr player, AVPPlayerTrackType type, int index);
|
||||
|
||||
public struct MediaCachingOptions
|
||||
{
|
||||
public double minimumRequiredBitRate;
|
||||
public float minimumRequiredResolution_width;
|
||||
public float minimumRequiredResolution_height;
|
||||
public string title;
|
||||
public IntPtr artwork;
|
||||
public int artworkLength;
|
||||
}
|
||||
|
||||
[DllImport(PluginName)]
|
||||
public static extern void AVPPlayerCacheMediaForURL(IntPtr player, string url, string headers, MediaCachingOptions options);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
public static extern void AVPPlayerCancelDownloadOfMediaForURL(IntPtr player, string url);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
public static extern void AVPPlayerPauseDownloadOfMediaForURL(IntPtr player, string url);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
public static extern void AVPPlayerResumeDownloadOfMediaForURL(IntPtr player, string url);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
public static extern void AVPPlayerRemoveCachedMediaForURL(IntPtr player, string url);
|
||||
|
||||
[DllImport(PluginName)]
|
||||
public static extern int AVPPlayerGetCachedMediaStatusForURL(IntPtr player, string url, ref float progress);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 0bf374b5848b649e6b3840fe1dc03cd2
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 3f68628a1ef6349648e502d1c66b5114
|
||||
timeCreated: 1547113004
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,241 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2024 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#if UNITY_2017_2_OR_NEWER && (UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || (!UNITY_EDITOR && (UNITY_IOS || UNITY_TVOS || UNITY_VISIONOS || UNITY_ANDROID || UNITY_OPENHARMONY)))
|
||||
|
||||
using System;
|
||||
using System.Runtime.InteropServices;
|
||||
using UnityEngine;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
internal static class PlatformMediaPlayerExtensions
|
||||
{
|
||||
// AVPPlayerStatus
|
||||
|
||||
internal static bool IsReadyToPlay(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.ReadyToPlay) == PlatformMediaPlayer.Native.AVPPlayerStatus.ReadyToPlay;
|
||||
}
|
||||
|
||||
internal static bool IsPlaying(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.Playing) == PlatformMediaPlayer.Native.AVPPlayerStatus.Playing;
|
||||
}
|
||||
|
||||
internal static bool IsPaused(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.Paused) == PlatformMediaPlayer.Native.AVPPlayerStatus.Paused;
|
||||
}
|
||||
|
||||
internal static bool IsFinished(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.Finished) == PlatformMediaPlayer.Native.AVPPlayerStatus.Finished;
|
||||
}
|
||||
|
||||
internal static bool IsSeeking(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.Seeking) == PlatformMediaPlayer.Native.AVPPlayerStatus.Seeking;
|
||||
}
|
||||
|
||||
internal static bool IsBuffering(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.Buffering) == PlatformMediaPlayer.Native.AVPPlayerStatus.Buffering;
|
||||
}
|
||||
|
||||
internal static bool IsStalled(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.Stalled) == PlatformMediaPlayer.Native.AVPPlayerStatus.Stalled;
|
||||
}
|
||||
|
||||
internal static bool IsExternalPlaybackActive(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.ExternalPlaybackActive) == PlatformMediaPlayer.Native.AVPPlayerStatus.ExternalPlaybackActive;
|
||||
}
|
||||
|
||||
internal static bool IsCached(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.Cached) == PlatformMediaPlayer.Native.AVPPlayerStatus.Cached;
|
||||
}
|
||||
|
||||
internal static bool HasFinishedSeeking(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.FinishedSeeking) == PlatformMediaPlayer.Native.AVPPlayerStatus.FinishedSeeking;
|
||||
}
|
||||
|
||||
internal static bool HasUpdatedAssetInfo(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedAssetInfo) == PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedAssetInfo;
|
||||
}
|
||||
|
||||
internal static bool HasUpdatedTexture(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedTexture) == PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedTexture;
|
||||
}
|
||||
|
||||
internal static bool HasUpdatedTextureTransform(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedTextureTransform) == PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedTextureTransform;
|
||||
}
|
||||
|
||||
internal static bool HasUpdatedBufferedTimeRanges(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedBufferedTimeRanges) == PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedBufferedTimeRanges;
|
||||
}
|
||||
|
||||
internal static bool HasUpdatedSeekableTimeRanges(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedSeekableTimeRanges) == PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedSeekableTimeRanges;
|
||||
}
|
||||
|
||||
internal static bool HasUpdatedText(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedText) == PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedText;
|
||||
}
|
||||
|
||||
internal static bool HasUpdatedTimedMetaData(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedTimedMetadata) == PlatformMediaPlayer.Native.AVPPlayerStatus.UpdatedTimedMetadata;
|
||||
}
|
||||
|
||||
internal static bool HasVideo(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.HasVideo) == PlatformMediaPlayer.Native.AVPPlayerStatus.HasVideo;
|
||||
}
|
||||
|
||||
internal static bool HasAudio(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.HasAudio) == PlatformMediaPlayer.Native.AVPPlayerStatus.HasAudio;
|
||||
}
|
||||
|
||||
internal static bool HasText(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.HasText) == PlatformMediaPlayer.Native.AVPPlayerStatus.HasText;
|
||||
}
|
||||
|
||||
internal static bool HasMetadata(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.HasMetadata) == PlatformMediaPlayer.Native.AVPPlayerStatus.HasMetadata;
|
||||
}
|
||||
|
||||
internal static bool HasFailed(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.Failed) == PlatformMediaPlayer.Native.AVPPlayerStatus.Failed;
|
||||
}
|
||||
|
||||
internal static bool HasVariants(this PlatformMediaPlayer.Native.AVPPlayerStatus status)
|
||||
{
|
||||
return (status & PlatformMediaPlayer.Native.AVPPlayerStatus.HasVariants) == PlatformMediaPlayer.Native.AVPPlayerStatus.HasVariants;
|
||||
}
|
||||
|
||||
// AVPPlayerFlags
|
||||
|
||||
internal static bool IsLooping(this PlatformMediaPlayer.Native.AVPPlayerFlags flags)
|
||||
{
|
||||
return (flags & PlatformMediaPlayer.Native.AVPPlayerFlags.Looping) == PlatformMediaPlayer.Native.AVPPlayerFlags.Looping;
|
||||
}
|
||||
|
||||
internal static PlatformMediaPlayer.Native.AVPPlayerFlags SetLooping(this PlatformMediaPlayer.Native.AVPPlayerFlags flags, bool b)
|
||||
{
|
||||
if (flags.IsLooping() ^ b)
|
||||
{
|
||||
flags = (b ? flags | PlatformMediaPlayer.Native.AVPPlayerFlags.Looping
|
||||
: flags & ~PlatformMediaPlayer.Native.AVPPlayerFlags.Looping) | PlatformMediaPlayer.Native.AVPPlayerFlags.Dirty;
|
||||
}
|
||||
return flags;
|
||||
}
|
||||
|
||||
internal static bool IsMuted(this PlatformMediaPlayer.Native.AVPPlayerFlags flags)
|
||||
{
|
||||
return (flags & PlatformMediaPlayer.Native.AVPPlayerFlags.Muted) == PlatformMediaPlayer.Native.AVPPlayerFlags.Muted;
|
||||
}
|
||||
|
||||
internal static PlatformMediaPlayer.Native.AVPPlayerFlags SetMuted(this PlatformMediaPlayer.Native.AVPPlayerFlags flags, bool b)
|
||||
{
|
||||
if (flags.IsMuted() ^ b)
|
||||
{
|
||||
flags = (b ? flags | PlatformMediaPlayer.Native.AVPPlayerFlags.Muted
|
||||
: flags & ~PlatformMediaPlayer.Native.AVPPlayerFlags.Muted) | PlatformMediaPlayer.Native.AVPPlayerFlags.Dirty;
|
||||
}
|
||||
return flags;
|
||||
}
|
||||
|
||||
internal static bool IsExternalPlaybackAllowed(this PlatformMediaPlayer.Native.AVPPlayerFlags flags)
|
||||
{
|
||||
return (flags & PlatformMediaPlayer.Native.AVPPlayerFlags.AllowExternalPlayback) == PlatformMediaPlayer.Native.AVPPlayerFlags.AllowExternalPlayback;
|
||||
}
|
||||
|
||||
internal static PlatformMediaPlayer.Native.AVPPlayerFlags SetAllowExternalPlayback(this PlatformMediaPlayer.Native.AVPPlayerFlags flags, bool b)
|
||||
{
|
||||
if (flags.IsExternalPlaybackAllowed() ^ b)
|
||||
{
|
||||
flags = (b ? flags | PlatformMediaPlayer.Native.AVPPlayerFlags.AllowExternalPlayback
|
||||
: flags & ~PlatformMediaPlayer.Native.AVPPlayerFlags.AllowExternalPlayback) | PlatformMediaPlayer.Native.AVPPlayerFlags.Dirty;
|
||||
}
|
||||
return flags;
|
||||
}
|
||||
|
||||
internal static bool ResumePlayback(this PlatformMediaPlayer.Native.AVPPlayerFlags flags)
|
||||
{
|
||||
return (flags & PlatformMediaPlayer.Native.AVPPlayerFlags.ResumePlayback) == PlatformMediaPlayer.Native.AVPPlayerFlags.ResumePlayback;
|
||||
}
|
||||
|
||||
internal static PlatformMediaPlayer.Native.AVPPlayerFlags SetResumePlayback(this PlatformMediaPlayer.Native.AVPPlayerFlags flags, bool b)
|
||||
{
|
||||
if (flags.ResumePlayback() ^ b)
|
||||
{
|
||||
flags = (b ? flags | PlatformMediaPlayer.Native.AVPPlayerFlags.ResumePlayback
|
||||
: flags & ~PlatformMediaPlayer.Native.AVPPlayerFlags.ResumePlayback) | PlatformMediaPlayer.Native.AVPPlayerFlags.Dirty;
|
||||
}
|
||||
return flags;
|
||||
}
|
||||
|
||||
internal static bool IsDirty(this PlatformMediaPlayer.Native.AVPPlayerFlags flags)
|
||||
{
|
||||
return (flags & PlatformMediaPlayer.Native.AVPPlayerFlags.Dirty) == PlatformMediaPlayer.Native.AVPPlayerFlags.Dirty;
|
||||
}
|
||||
|
||||
internal static PlatformMediaPlayer.Native.AVPPlayerFlags SetDirty(this PlatformMediaPlayer.Native.AVPPlayerFlags flags, bool b)
|
||||
{
|
||||
if (flags.IsDirty() ^ b)
|
||||
{
|
||||
flags = b ? flags | PlatformMediaPlayer.Native.AVPPlayerFlags.Dirty : flags & ~PlatformMediaPlayer.Native.AVPPlayerFlags.Dirty;
|
||||
}
|
||||
return flags;
|
||||
}
|
||||
|
||||
// MARK: AVPPlayerAssetFlags
|
||||
|
||||
internal static bool IsCompatibleWithAirPlay(this PlatformMediaPlayer.Native.AVPPlayerAssetFlags flags)
|
||||
{
|
||||
return (flags & PlatformMediaPlayer.Native.AVPPlayerAssetFlags.CompatibleWithAirPlay) == PlatformMediaPlayer.Native.AVPPlayerAssetFlags.CompatibleWithAirPlay;
|
||||
}
|
||||
|
||||
// MARK: AVPPlayerTrackFlags
|
||||
|
||||
internal static bool IsDefault(this PlatformMediaPlayer.Native.AVPPlayerTrackFlags flags)
|
||||
{
|
||||
return (flags & PlatformMediaPlayer.Native.AVPPlayerTrackFlags.Default) == PlatformMediaPlayer.Native.AVPPlayerTrackFlags.Default;
|
||||
}
|
||||
|
||||
// AVPPlayerTextureFlags
|
||||
|
||||
internal static bool IsFlipped(this PlatformMediaPlayer.Native.AVPPlayerTextureFlags flags)
|
||||
{
|
||||
return (flags & PlatformMediaPlayer.Native.AVPPlayerTextureFlags.Flipped) == PlatformMediaPlayer.Native.AVPPlayerTextureFlags.Flipped;
|
||||
}
|
||||
|
||||
internal static bool IsLinear(this PlatformMediaPlayer.Native.AVPPlayerTextureFlags flags)
|
||||
{
|
||||
return (flags & PlatformMediaPlayer.Native.AVPPlayerTextureFlags.Linear) == PlatformMediaPlayer.Native.AVPPlayerTextureFlags.Linear;
|
||||
}
|
||||
|
||||
internal static bool IsMipmapped(this PlatformMediaPlayer.Native.AVPPlayerTextureFlags flags)
|
||||
{
|
||||
return (flags & PlatformMediaPlayer.Native.AVPPlayerTextureFlags.Mipmapped) == PlatformMediaPlayer.Native.AVPPlayerTextureFlags.Mipmapped;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: e27ea5523e11f44c09e8d368eb1f2983
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,930 @@
|
||||
//#define AVPRO_WEBGL_USE_RENDERTEXTURE
|
||||
// NOTE: We only allow this script to compile in editor so we can easily check for compilation issues
|
||||
#if (UNITY_EDITOR || UNITY_WEBGL)
|
||||
using UnityEngine;
|
||||
using System;
|
||||
using System.Text;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2021 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// WebGL implementation of BaseMediaPlayer
|
||||
/// </summary>
|
||||
public sealed class WebGLMediaPlayer : BaseMediaPlayer
|
||||
{
|
||||
//private enum AVPPlayerStatus
|
||||
//{
|
||||
// Unknown,
|
||||
// ReadyToPlay,
|
||||
// Playing,
|
||||
// Finished,
|
||||
// Seeking,
|
||||
// Failed
|
||||
//}
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerInsertVideoElement(string path, int[] idValues, int externalLibrary);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern int AVPPlayerWidth(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern int AVPPlayerHeight(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern int AVPPlayerGetLastError(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern int AVPPlayerGetVideoTrackCount(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern int AVPPlayerGetAudioTrackCount(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern int AVPPlayerGetTextTrackCount(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerSetActiveVideoTrack(int player, int trackId);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerSetActiveAudioTrack(int player, int trackId);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerSetActiveTextTrack(int player, int trackId);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern void AVPPlayerClose(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerReady(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern void AVPPlayerSetLooping(int player, bool loop);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerIsLooping(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerIsSeeking(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerIsPlaying(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerIsPaused(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerIsFinished(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerIsBuffering(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerIsPlaybackStalled(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerPlay(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern void AVPPlayerPause(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern void AVPPlayerSeekToTime(int player, double time, bool fast);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern double AVPPlayerGetCurrentTime(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern float AVPPlayerGetDuration(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern float AVPPlayerGetPlaybackRate(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern void AVPPlayerSetPlaybackRate(int player, float rate);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern void AVPPlayerSetMuted(int player, bool muted);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerIsMuted(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern float AVPPlayerGetVolume(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern void AVPPlayerSetVolume(int player, float volume);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerHasVideo(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerHasAudio(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern void AVPPlayerCreateVideoTexture(int textureId);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern void AVPPlayerDestroyVideoTexture(int textureId);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern void AVPPlayerFetchVideoTexture(int player, IntPtr texture, bool init);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern int AVPPlayerGetDecodedFrameCount(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerSupportedDecodedFrameCount(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerHasMetadata(int player);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern int AVPPlayerUpdatePlayerIndex(int id);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern int AVPPlayerGetNumBufferedTimeRanges(int id);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern double AVPPlayerGetTimeRangeStart(int id, int timeRangeIndex);
|
||||
[DllImport("__Internal")]
|
||||
private static extern double AVPPlayerGetTimeRangeEnd(int id, int timeRangeIndex);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern string AVPPlayerGetVideoTrackName(int player, int trackIndex);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern string AVPPlayerGetAudioTrackName(int player, int trackIndex);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern string AVPPlayerGetTextTrackName(int player, int trackIndex);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern string AVPPlayerGetVideoTrackLanguage(int player, int trackIndex);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern string AVPPlayerGetAudioTrackLanguage(int player, int trackIndex);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern string AVPPlayerGetTextTrackLanguage(int player, int trackIndex);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerIsVideoTrackActive(int player, int trackIndex);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerIsAudioTrackActive(int player, int trackIndex);
|
||||
|
||||
[DllImport("__Internal")]
|
||||
private static extern bool AVPPlayerIsTextTrackActive(int player, int trackIndex);
|
||||
|
||||
private WebGL.ExternalLibrary _externalLibrary = WebGL.ExternalLibrary.None;
|
||||
private int _playerIndex = -1;
|
||||
private int _playerID = -1;
|
||||
|
||||
#if AVPRO_WEBGL_USE_RENDERTEXTURE
|
||||
private RenderTexture _texture = null;
|
||||
#else
|
||||
private Texture2D _texture = null;
|
||||
#endif
|
||||
|
||||
private int _width = 0;
|
||||
private int _height = 0;
|
||||
private int _cachedVideoTrackCount = 0;
|
||||
private int _cachedAudioTrackCount = 0;
|
||||
private int _cachedTextTrackCount = 0;
|
||||
private bool _isDirtyVideoTracks = false;
|
||||
private bool _isDirtyAudioTracks = false;
|
||||
private bool _isDirtyTextTracks = false;
|
||||
private bool _useTextureMips = false;
|
||||
private System.IntPtr _cachedTextureNativePtr = System.IntPtr.Zero;
|
||||
|
||||
private static bool _isWebGL1 = false;
|
||||
|
||||
public static bool InitialisePlatform()
|
||||
{
|
||||
#if UNITY_2023_1_OR_NEWER
|
||||
_isWebGL1 = false;
|
||||
#else
|
||||
_isWebGL1 = (SystemInfo.graphicsDeviceType == UnityEngine.Rendering.GraphicsDeviceType.OpenGLES2);
|
||||
#endif
|
||||
return true;
|
||||
}
|
||||
|
||||
public WebGLMediaPlayer(MediaPlayer.OptionsWebGL options)
|
||||
{
|
||||
SetOptions(options);
|
||||
}
|
||||
|
||||
public void SetOptions(MediaPlayer.OptionsWebGL options)
|
||||
{
|
||||
_externalLibrary = options.externalLibrary;
|
||||
_useTextureMips = options.useTextureMips;
|
||||
}
|
||||
|
||||
public override string GetVersion()
|
||||
{
|
||||
return "3.1.0";
|
||||
}
|
||||
|
||||
public override string GetExpectedVersion()
|
||||
{
|
||||
return GetVersion();
|
||||
}
|
||||
|
||||
public override bool OpenMedia(string path, long offset, string httpHeader, MediaHints mediaHints, int forceFileFormat = 0, bool startWithHighestBitrate = false)
|
||||
{
|
||||
bool result = false;
|
||||
|
||||
if (path.StartsWith("http://") ||
|
||||
path.StartsWith("https://") ||
|
||||
path.StartsWith("file://") ||
|
||||
path.StartsWith("blob:") ||
|
||||
path.StartsWith("chrome-extension://"))
|
||||
{
|
||||
int[] idValues = new int[2];
|
||||
idValues[0] = -1;
|
||||
AVPPlayerInsertVideoElement(path, idValues, (int)_externalLibrary);
|
||||
{
|
||||
int playerIndex = idValues[0];
|
||||
_playerID = idValues[1];
|
||||
|
||||
if (playerIndex > -1)
|
||||
{
|
||||
_playerIndex = playerIndex;
|
||||
_mediaHints = mediaHints;
|
||||
result = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Unknown URL protocol");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override void CloseMedia()
|
||||
{
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
Pause();
|
||||
|
||||
_width = 0;
|
||||
_height = 0;
|
||||
_cachedVideoTrackCount = 0;
|
||||
_cachedAudioTrackCount = 0;
|
||||
_cachedTextTrackCount = 0;
|
||||
_isDirtyVideoTracks = false;
|
||||
_isDirtyAudioTracks = false;
|
||||
_isDirtyTextTracks = false;
|
||||
|
||||
AVPPlayerClose(_playerIndex);
|
||||
|
||||
if (_texture != null)
|
||||
{
|
||||
DestroyTexture();
|
||||
}
|
||||
|
||||
_playerIndex = -1;
|
||||
_playerID = -1;
|
||||
|
||||
base.CloseMedia();
|
||||
}
|
||||
}
|
||||
|
||||
public override bool IsLooping()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player IsLooping");
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerIsLooping(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override void SetLooping(bool looping)
|
||||
{
|
||||
//Debug.Assert(_playerIndex != -1, "no player SetLooping");
|
||||
|
||||
AVPPlayerSetLooping(_playerIndex, looping);
|
||||
}
|
||||
|
||||
public override bool HasAudio()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player HasAudio");
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerHasAudio(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override bool HasVideo()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player HasVideo");
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerHasVideo(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override bool HasMetaData()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player HasMetaData");
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerHasMetadata(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override bool CanPlay()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player CanPlay");
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerReady(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override void Play()
|
||||
{
|
||||
// Debug.Assert(_playerIndex != -1, "no player Play");
|
||||
|
||||
if (!AVPPlayerPlay(_playerIndex))
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] Browser permission prevented video playback");
|
||||
}
|
||||
}
|
||||
|
||||
public override void Pause()
|
||||
{
|
||||
// Debug.Assert(_playerIndex != -1, "no player Pause");
|
||||
|
||||
AVPPlayerPause(_playerIndex);
|
||||
}
|
||||
|
||||
public override void Stop()
|
||||
{
|
||||
// Debug.Assert(_playerIndex != -1, "no player Stop");
|
||||
|
||||
AVPPlayerPause(_playerIndex);
|
||||
}
|
||||
|
||||
public override void Seek(double time)
|
||||
{
|
||||
// Debug.Assert(_playerIndex != -1, "no player Seek");
|
||||
AVPPlayerSeekToTime(_playerIndex, time, false);
|
||||
}
|
||||
|
||||
public override void SeekFast(double time)
|
||||
{
|
||||
// Debug.Assert(_playerIndex != -1, "no player SeekFast");
|
||||
AVPPlayerSeekToTime(_playerIndex, time, true);
|
||||
}
|
||||
|
||||
public override double GetCurrentTime()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player GetCurrentTime");
|
||||
double result = 0.0;
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerGetCurrentTime(_playerIndex);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public override void SetPlaybackRate(float rate)
|
||||
{
|
||||
// Debug.Assert(_playerIndex != -1, "no player SetPlaybackRate");
|
||||
|
||||
// No HTML implementations allow negative rate yet
|
||||
rate = Mathf.Clamp(rate, 0.25f, 8f);
|
||||
|
||||
AVPPlayerSetPlaybackRate(_playerIndex, rate);
|
||||
}
|
||||
|
||||
public override float GetPlaybackRate()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player GetPlaybackRate");
|
||||
float result = 0.0f;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerGetPlaybackRate(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override double GetDuration()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player GetDuration");
|
||||
double result = 0.0;
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerGetDuration(_playerIndex);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public override int GetVideoWidth()
|
||||
{
|
||||
if (_width == 0)
|
||||
{
|
||||
_width = AVPPlayerWidth(_playerIndex);
|
||||
}
|
||||
return _width;
|
||||
}
|
||||
|
||||
public override int GetVideoHeight()
|
||||
{
|
||||
if (_height == 0)
|
||||
{
|
||||
_height = AVPPlayerHeight(_playerIndex);
|
||||
}
|
||||
return _height;
|
||||
}
|
||||
|
||||
public override float GetVideoFrameRate()
|
||||
{
|
||||
// There is no way in HTML5 yet to get the frame rate of the video
|
||||
return 0f;
|
||||
}
|
||||
|
||||
public override bool IsSeeking()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player IsSeeking");
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerIsSeeking(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override bool IsPlaying()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player IsPlaying");
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerIsPlaying(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override bool IsPaused()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player IsPaused");
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerIsPaused(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override bool IsFinished()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player IsFinished");
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerIsFinished(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override bool IsBuffering()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player IsBuffering");
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerIsBuffering(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override Texture GetTexture( int index )
|
||||
{
|
||||
return _texture;
|
||||
}
|
||||
|
||||
public override int GetTextureFrameCount()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player GetTextureFrameCount");
|
||||
int result = 0;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerGetDecodedFrameCount(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
internal override StereoPacking InternalGetTextureStereoPacking()
|
||||
{
|
||||
return StereoPacking.Unknown;
|
||||
}
|
||||
|
||||
public override bool SupportsTextureFrameCount()
|
||||
{
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerSupportedDecodedFrameCount(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override bool RequiresVerticalFlip()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public override bool IsMuted()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player IsMuted");
|
||||
bool result = false;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerIsMuted(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override void MuteAudio(bool bMute)
|
||||
{
|
||||
// Debug.Assert(_playerIndex != -1, "no player MuteAudio");
|
||||
|
||||
AVPPlayerSetMuted(_playerIndex, bMute);
|
||||
}
|
||||
|
||||
public override void SetVolume(float volume)
|
||||
{
|
||||
// Debug.Assert(_playerIndex != -1, "no player SetVolume");
|
||||
|
||||
AVPPlayerSetVolume(_playerIndex, volume);
|
||||
}
|
||||
|
||||
public override float GetVolume()
|
||||
{
|
||||
//Debug.Assert(_player != -1, "no player GetVolume");
|
||||
float result = 0.0f;
|
||||
|
||||
if (_playerIndex != -1)
|
||||
{
|
||||
result = AVPPlayerGetVolume(_playerIndex);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override void Render()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
private void UpdateLastErrorCode()
|
||||
{
|
||||
var code = AVPPlayerGetLastError(_playerIndex);
|
||||
|
||||
switch(code){
|
||||
case 0:
|
||||
_lastError = ErrorCode.None;
|
||||
break;
|
||||
case 1:
|
||||
_lastError = ErrorCode.LoadFailed;
|
||||
break;
|
||||
case 2:
|
||||
_lastError = ErrorCode.LoadFailed;
|
||||
break;
|
||||
case 3:
|
||||
_lastError = ErrorCode.DecodeFailed;
|
||||
break;
|
||||
case 4:
|
||||
_lastError = ErrorCode.LoadFailed;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private bool IsMipMapGenerationSupported(int videoWidth, int videoHeight)
|
||||
{
|
||||
if (!_isWebGL1 || (Mathf.IsPowerOfTwo(videoWidth) && Mathf.IsPowerOfTwo(videoHeight)))
|
||||
{
|
||||
// Mip generation only supported in WebGL 2.0, or WebGL 1.0 when using power-of-two textures
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private void CreateTexture()
|
||||
{
|
||||
//Debug.Log("creating texture " + _width + " X " + _height);
|
||||
|
||||
#if AVPRO_WEBGL_USE_RENDERTEXTURE
|
||||
_texture = new RenderTexture(_width, _height, 0, RenderTextureFormat.Default);
|
||||
_texture.autoGenerateMips = false;
|
||||
_texture.useMipMap = (_useTextureMips && IsMipMapGenerationSupported(_width, _height));
|
||||
_texture.Create();
|
||||
_cachedTextureNativePtr = _texture.GetNativeTexturePtr();
|
||||
#else
|
||||
int textureId = 80000 + _playerIndex;
|
||||
_cachedTextureNativePtr = new System.IntPtr(textureId);
|
||||
AVPPlayerCreateVideoTexture(textureId);
|
||||
// TODO: add support for mip generation
|
||||
_texture = Texture2D.CreateExternalTexture(_width, _height, TextureFormat.RGBA32, false, false, _cachedTextureNativePtr);
|
||||
if (_useTextureMips)
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] Texture Mips not yet implemented in this WebGL rendering path");
|
||||
}
|
||||
//Debug.Log("created texture1 " + _texture);
|
||||
//Debug.Log("created texture2 " + _texture.GetNativeTexturePtr().ToInt32());
|
||||
#endif
|
||||
|
||||
ApplyTextureProperties(_texture);
|
||||
|
||||
bool initTexture = true;
|
||||
#if AVPRO_WEBGL_USE_RENDERTEXTURE
|
||||
// Textures in WebGL 2.0 don't require texImage2D as they are already recreated with texStorage2D
|
||||
initTexture = _isWebGL1;
|
||||
#endif
|
||||
AVPPlayerFetchVideoTexture(_playerIndex, _cachedTextureNativePtr, initTexture);
|
||||
}
|
||||
|
||||
private void DestroyTexture()
|
||||
{
|
||||
// Have to update with zero to release Metal textures!
|
||||
//_texture.UpdateExternalTexture(0);
|
||||
if (_texture != null)
|
||||
{
|
||||
#if AVPRO_WEBGL_USE_RENDERTEXTURE
|
||||
RenderTexture.Destroy(_texture);
|
||||
#else
|
||||
Texture2D.Destroy(_texture);
|
||||
AVPPlayerDestroyVideoTexture(_cachedTextureNativePtr.ToInt32());
|
||||
#endif
|
||||
_texture = null;
|
||||
}
|
||||
_cachedTextureNativePtr = System.IntPtr.Zero;
|
||||
}
|
||||
|
||||
public override void Update()
|
||||
{
|
||||
if(_playerID >= 0) // CheckPlayer's index and update it
|
||||
{
|
||||
_playerIndex = AVPPlayerUpdatePlayerIndex(_playerID);
|
||||
}
|
||||
|
||||
if(_playerIndex >= 0)
|
||||
{
|
||||
CheckTracksDirty();
|
||||
UpdateTracks();
|
||||
UpdateTextCue();
|
||||
UpdateSubtitles();
|
||||
UpdateLastErrorCode();
|
||||
|
||||
if (AVPPlayerReady(_playerIndex))
|
||||
{
|
||||
UpdateTimeRanges();
|
||||
if (AVPPlayerHasVideo(_playerIndex))
|
||||
{
|
||||
_width = AVPPlayerWidth(_playerIndex);
|
||||
_height = AVPPlayerHeight(_playerIndex);
|
||||
|
||||
if (_texture != null && (_texture.width != _width || _texture.height != _height))
|
||||
{
|
||||
DestroyTexture();
|
||||
}
|
||||
|
||||
if (_texture == null && _width > 0 && _height > 0)
|
||||
{
|
||||
CreateTexture();
|
||||
}
|
||||
|
||||
// Update the texture
|
||||
if (_cachedTextureNativePtr != System.IntPtr.Zero)
|
||||
{
|
||||
// TODO: only update the texture when the frame count changes
|
||||
// (actually this will break the update for certain browsers such as edge and possibly safari - Sunrise)
|
||||
AVPPlayerFetchVideoTexture(_playerIndex, _cachedTextureNativePtr, false);
|
||||
|
||||
#if AVPRO_WEBGL_USE_RENDERTEXTURE
|
||||
if (_texture.useMipMap)
|
||||
{
|
||||
_texture.GenerateMips();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
UpdateDisplayFrameRate();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void CheckTracksDirty()
|
||||
{
|
||||
_isDirtyVideoTracks = false;
|
||||
_isDirtyAudioTracks = false;
|
||||
_isDirtyTextTracks = false;
|
||||
|
||||
// TODO: replace this crude polling check with events, or only do it once metadataReady
|
||||
// Need to add event support as tracks can be added via HTML (especially text)
|
||||
int videoTrackCount = AVPPlayerGetVideoTrackCount(_playerIndex);
|
||||
int audioTrackCount = AVPPlayerGetAudioTrackCount(_playerIndex);
|
||||
int textTrackCount = AVPPlayerGetTextTrackCount(_playerIndex);
|
||||
|
||||
_isDirtyVideoTracks = (_cachedVideoTrackCount != videoTrackCount);
|
||||
_isDirtyAudioTracks = (_cachedAudioTrackCount != audioTrackCount);
|
||||
_isDirtyTextTracks = ( _cachedTextTrackCount != textTrackCount);
|
||||
|
||||
_cachedVideoTrackCount = videoTrackCount;
|
||||
_cachedAudioTrackCount = audioTrackCount;
|
||||
_cachedTextTrackCount = textTrackCount;
|
||||
}
|
||||
|
||||
private void UpdateTimeRanges()
|
||||
{
|
||||
{
|
||||
int rangeCount = AVPPlayerGetNumBufferedTimeRanges(_playerIndex);
|
||||
if (rangeCount != _bufferedTimes.Count)
|
||||
{
|
||||
_bufferedTimes._ranges = new TimeRange[rangeCount];
|
||||
}
|
||||
for (int i = 0; i < rangeCount; i++)
|
||||
{
|
||||
double startTime = AVPPlayerGetTimeRangeStart(_playerIndex, i);
|
||||
double endTime = AVPPlayerGetTimeRangeEnd(_playerIndex, i);
|
||||
_bufferedTimes._ranges[i] = new TimeRange(startTime, endTime - startTime);
|
||||
}
|
||||
_bufferedTimes.CalculateRange();
|
||||
}
|
||||
|
||||
{
|
||||
double duration = GetDuration();
|
||||
if (duration > 0.0)
|
||||
{
|
||||
_seekableTimes._ranges = new TimeRange[1];
|
||||
_seekableTimes._ranges[0] = new TimeRange(0.0, duration);
|
||||
}
|
||||
else
|
||||
{
|
||||
_seekableTimes._ranges = new TimeRange[0];
|
||||
}
|
||||
_seekableTimes.CalculateRange();
|
||||
}
|
||||
}
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
CloseMedia();
|
||||
}
|
||||
|
||||
public override bool IsPlaybackStalled()
|
||||
{
|
||||
bool result = false;
|
||||
if (_playerIndex > -1)
|
||||
{
|
||||
result = AVPPlayerIsPlaybackStalled(_playerIndex) && IsPlaying();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Tracks
|
||||
internal override int InternalGetTrackCount(TrackType trackType)
|
||||
{
|
||||
int result = 0;
|
||||
switch (trackType)
|
||||
{
|
||||
case TrackType.Video:
|
||||
result = AVPPlayerGetVideoTrackCount(_playerIndex);
|
||||
break;
|
||||
case TrackType.Audio:
|
||||
result = AVPPlayerGetAudioTrackCount(_playerIndex);
|
||||
break;
|
||||
case TrackType.Text:
|
||||
result = AVPPlayerGetTextTrackCount(_playerIndex);
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
internal override bool InternalIsChangedTracks(TrackType trackType)
|
||||
{
|
||||
bool result = false;
|
||||
switch (trackType)
|
||||
{
|
||||
case TrackType.Video:
|
||||
result = _isDirtyVideoTracks;
|
||||
break;
|
||||
case TrackType.Audio:
|
||||
result = _isDirtyAudioTracks;
|
||||
break;
|
||||
case TrackType.Text:
|
||||
result = _isDirtyTextTracks;
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
internal override bool InternalSetActiveTrack(TrackType trackType, int trackId)
|
||||
{
|
||||
bool result = false;
|
||||
switch (trackType)
|
||||
{
|
||||
case TrackType.Video:
|
||||
result = AVPPlayerSetActiveVideoTrack(_playerIndex, trackId);
|
||||
break;
|
||||
case TrackType.Audio:
|
||||
result = AVPPlayerSetActiveAudioTrack(_playerIndex, trackId);
|
||||
break;
|
||||
case TrackType.Text:
|
||||
result = AVPPlayerSetActiveTextTrack(_playerIndex, trackId);
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
internal override TrackBase InternalGetTrackInfo(TrackType trackType, int trackIndex, ref bool isActiveTrack)
|
||||
{
|
||||
TrackBase result = null;
|
||||
|
||||
switch (trackType)
|
||||
{
|
||||
case TrackType.Video:
|
||||
{
|
||||
string trackName = AVPPlayerGetVideoTrackName(_playerIndex, trackIndex);
|
||||
string trackLanguage = AVPPlayerGetVideoTrackLanguage(_playerIndex, trackIndex);
|
||||
bool isActive = AVPPlayerIsVideoTrackActive(_playerIndex, trackIndex);
|
||||
result = new VideoTrack(trackIndex, trackName, trackLanguage, isActive);
|
||||
break;
|
||||
}
|
||||
case TrackType.Audio:
|
||||
{
|
||||
string trackName = AVPPlayerGetAudioTrackName(_playerIndex, trackIndex);
|
||||
string trackLanguage = AVPPlayerGetAudioTrackLanguage(_playerIndex, trackIndex);
|
||||
bool isActive = AVPPlayerIsAudioTrackActive(_playerIndex, trackIndex);
|
||||
result = new AudioTrack(trackIndex, trackName, trackLanguage, isActive);
|
||||
break;
|
||||
}
|
||||
case TrackType.Text:
|
||||
{
|
||||
string trackName = AVPPlayerGetTextTrackName(_playerIndex, trackIndex);
|
||||
string trackLanguage = AVPPlayerGetTextTrackLanguage(_playerIndex, trackIndex);
|
||||
bool isActive = AVPPlayerIsTextTrackActive(_playerIndex, trackIndex);
|
||||
result = new TextTrack(trackIndex, trackName, trackLanguage, isActive);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Text Cue stub methods
|
||||
internal override bool InternalIsChangedTextCue() { return false; }
|
||||
internal override string InternalGetCurrentTextCue() { return string.Empty; }
|
||||
}
|
||||
}
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: c044ff13d5570e64a8156bc718b3cfec
|
||||
timeCreated: 1468230219
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 6f3c954eb61392a4193295a8376bd8db
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
@@ -0,0 +1,5 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2025 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
// This file is intentionally blank
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 2b36cc2d6962ce34e86c5a83a0de6d4a
|
||||
timeCreated: 1630292296
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,973 @@
|
||||
//#define AVPROVIDEO_WINDOWS_UNIFIED_DLLS // DEV FEATURE: are we using new unified (DS + MF + WRT) Windows DLLs?
|
||||
//
|
||||
// NOTE: We only allow this script to compile in editor so we can easily check for compilation issues
|
||||
#if ((UNITY_EDITOR || (UNITY_STANDALONE_WIN || UNITY_WSA_10_0)) && !AVPROVIDEO_WINDOWS_UNIFIED_DLLS)
|
||||
|
||||
#if UNITY_WSA_10 || ENABLE_IL2CPP
|
||||
#define AVPROVIDEO_MARSHAL_RETURN_BOOL
|
||||
#endif
|
||||
|
||||
using UnityEngine;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Text;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2018-2021 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public enum PlaybackState
|
||||
{
|
||||
None = 0,
|
||||
Opening = 1,
|
||||
Buffering = 2, // Replace with Stalled and add Buffering as State 64??
|
||||
Playing = 3,
|
||||
Paused = 4,
|
||||
StateMask = 7,
|
||||
|
||||
Seeking = 32,
|
||||
}
|
||||
|
||||
public partial class WindowsRtMediaPlayer : BaseMediaPlayer
|
||||
{
|
||||
private bool _isMediaLoaded = false;
|
||||
private bool _isLooping = false;
|
||||
private float _volume = 1.0f;
|
||||
private bool _use10BitTextures = false;
|
||||
private bool _useLowLiveLatency = false;
|
||||
|
||||
public WindowsRtMediaPlayer(MediaPlayer.OptionsWindows options) : base()
|
||||
{
|
||||
_playerDescription = "WinRT";
|
||||
|
||||
SetOptions(options);
|
||||
|
||||
for (int i = 0; i < _eyeTextures.Length; i++)
|
||||
{
|
||||
_eyeTextures[i] = new EyeTexture();
|
||||
}
|
||||
}
|
||||
|
||||
public WindowsRtMediaPlayer(MediaPlayer.OptionsWindowsUWP options) : base()
|
||||
{
|
||||
_playerDescription = "WinRT";
|
||||
_use10BitTextures = options.use10BitTextures;
|
||||
_useLowLiveLatency = options.useLowLiveLatency;
|
||||
for (int i = 0; i < _eyeTextures.Length; i++)
|
||||
{
|
||||
_eyeTextures[i] = new EyeTexture();
|
||||
}
|
||||
}
|
||||
|
||||
public void SetOptions(MediaPlayer.OptionsWindows options)
|
||||
{
|
||||
_use10BitTextures = options.use10BitTextures;
|
||||
_useLowLiveLatency = options.useLowLiveLatency;
|
||||
}
|
||||
|
||||
public override bool CanPlay()
|
||||
{
|
||||
return HasMetaData();
|
||||
}
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
CloseMedia();
|
||||
if (_playerInstance != System.IntPtr.Zero)
|
||||
{
|
||||
Native.DestroyPlayer(_playerInstance); _playerInstance = System.IntPtr.Zero;
|
||||
Native.IssueRenderThreadEvent_FreeAllTextures();
|
||||
}
|
||||
for (int i = 0; i < _eyeTextures.Length; i++)
|
||||
{
|
||||
_eyeTextures[i].Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
public override bool PlayerSupportsLinearColorSpace()
|
||||
{
|
||||
// The current player doesn't support rendering to SRGB textures
|
||||
return false;
|
||||
}
|
||||
|
||||
public override double GetCurrentTime()
|
||||
{
|
||||
return Native.GetCurrentPosition(_playerInstance);
|
||||
}
|
||||
|
||||
public override double GetDuration()
|
||||
{
|
||||
return Native.GetDuration(_playerInstance);
|
||||
}
|
||||
|
||||
public override float GetPlaybackRate()
|
||||
{
|
||||
return Native.GetPlaybackRate(_playerInstance);
|
||||
}
|
||||
|
||||
public override Texture GetTexture(int index = 0)
|
||||
{
|
||||
Texture result = null;
|
||||
if (_frameTimeStamp > 0 && index < _eyeTextures.Length)
|
||||
{
|
||||
result = _eyeTextures[index].texture;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public override int GetTextureCount()
|
||||
{
|
||||
if (_eyeTextures[1].texture != null)
|
||||
{
|
||||
return 2;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
public override int GetTextureFrameCount()
|
||||
{
|
||||
return (int)_frameTimeStamp;
|
||||
}
|
||||
|
||||
internal override StereoPacking InternalGetTextureStereoPacking()
|
||||
{
|
||||
/// [MOZ] Windows plugin internal stereo packing enum is now out of date and doesn't take into account
|
||||
/// the changes required for supporting MV-HEVC on visionOS and Android.
|
||||
int value = Native.GetStereoPacking(_playerInstance);
|
||||
|
||||
StereoPacking stereoPacking;
|
||||
switch (value)
|
||||
{
|
||||
case 0: // None
|
||||
stereoPacking = StereoPacking.Monoscopic;
|
||||
break;
|
||||
|
||||
case 1: // TopBottom
|
||||
stereoPacking = StereoPacking.TopBottom;
|
||||
break;
|
||||
|
||||
case 2: // LeftRight
|
||||
stereoPacking = StereoPacking.LeftRight;
|
||||
break;
|
||||
|
||||
case 3: // CustomUV
|
||||
stereoPacking = StereoPacking.CustomUV;
|
||||
break;
|
||||
|
||||
case 4: // TwoTextures
|
||||
stereoPacking = StereoPacking.MultiviewLeftPrimary;
|
||||
break;
|
||||
|
||||
default:
|
||||
stereoPacking = StereoPacking.Unknown;
|
||||
break;
|
||||
}
|
||||
|
||||
return stereoPacking;
|
||||
}
|
||||
|
||||
public override string GetVersion()
|
||||
{
|
||||
return _version;
|
||||
}
|
||||
|
||||
public override string GetExpectedVersion()
|
||||
{
|
||||
return Helper.ExpectedPluginVersion.WinRT;
|
||||
}
|
||||
|
||||
public override float GetVideoFrameRate()
|
||||
{
|
||||
float result = 0f;
|
||||
Native.VideoTrack videoTrack;
|
||||
if (Native.GetActiveVideoTrackInfo(_playerInstance, out videoTrack))
|
||||
{
|
||||
result = videoTrack.frameRate;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public override int GetVideoWidth()
|
||||
{
|
||||
int result = 0;
|
||||
if (_eyeTextures[0].texture)
|
||||
{
|
||||
result = _eyeTextures[0].texture.width;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public override int GetVideoHeight()
|
||||
{
|
||||
int result = 0;
|
||||
if (_eyeTextures[0].texture)
|
||||
{
|
||||
result = _eyeTextures[0].texture.height;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public override float GetVolume()
|
||||
{
|
||||
return _volume;//Native.GetAudioVolume(_playerInstance);
|
||||
}
|
||||
|
||||
public override void SetBalance(float balance)
|
||||
{
|
||||
Native.SetAudioBalance(_playerInstance, balance);
|
||||
}
|
||||
|
||||
public override float GetBalance()
|
||||
{
|
||||
return Native.GetAudioBalance(_playerInstance);
|
||||
}
|
||||
|
||||
public override bool HasAudio()
|
||||
{
|
||||
return _audioTracks.Count > 0;
|
||||
}
|
||||
|
||||
public override bool HasMetaData()
|
||||
{
|
||||
return Native.GetDuration(_playerInstance) > 0f;
|
||||
}
|
||||
|
||||
public override bool HasVideo()
|
||||
{
|
||||
return _videoTracks.Count > 0;
|
||||
}
|
||||
|
||||
public override bool IsBuffering()
|
||||
{
|
||||
return ((Native.GetPlaybackState(_playerInstance) & PlaybackState.StateMask) == PlaybackState.Buffering);
|
||||
}
|
||||
|
||||
public override bool IsFinished()
|
||||
{
|
||||
bool result = false;
|
||||
if (IsPaused() && !IsSeeking() && GetCurrentTime() >= GetDuration())
|
||||
{
|
||||
result = true;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public override bool IsLooping()
|
||||
{
|
||||
return _isLooping;//Native.IsLooping(_playerInstance);
|
||||
}
|
||||
|
||||
public override bool IsMuted()
|
||||
{
|
||||
return Native.IsAudioMuted(_playerInstance);
|
||||
}
|
||||
|
||||
public override bool IsPaused()
|
||||
{
|
||||
return ((Native.GetPlaybackState(_playerInstance) & PlaybackState.StateMask) == PlaybackState.Paused);
|
||||
}
|
||||
|
||||
public override bool IsPlaying()
|
||||
{
|
||||
return ((Native.GetPlaybackState(_playerInstance) & PlaybackState.StateMask) == PlaybackState.Playing);
|
||||
}
|
||||
|
||||
public override bool IsSeeking()
|
||||
{
|
||||
return ((Native.GetPlaybackState(_playerInstance) & PlaybackState.Seeking) != 0);
|
||||
}
|
||||
|
||||
public override void MuteAudio(bool bMuted)
|
||||
{
|
||||
Native.SetAudioMuted(_playerInstance, bMuted);
|
||||
}
|
||||
|
||||
// TODO: replace all these options with a structure
|
||||
public override bool OpenMedia(string path, long offset, string httpHeader, MediaHints mediaHints, int forceFileFormat = 0, bool startWithHighestBitrate = false)
|
||||
{
|
||||
bool result = false;
|
||||
|
||||
// RJT NOTE: Commented out as already called by 'InternalOpenMedia()' which calls this function
|
||||
// CloseMedia();
|
||||
|
||||
if (_playerInstance == System.IntPtr.Zero)
|
||||
{
|
||||
_playerInstance = Native.CreatePlayer();
|
||||
|
||||
// Force setting any auth data as it wouldn't have been set without a _playerInstance
|
||||
AuthenticationData = _nextAuthData;
|
||||
}
|
||||
if (_playerInstance != System.IntPtr.Zero)
|
||||
{
|
||||
result = Native.OpenMedia(_playerInstance, path, httpHeader, (FileFormat)forceFileFormat, startWithHighestBitrate, _use10BitTextures);
|
||||
if (result)
|
||||
{
|
||||
if (_useLowLiveLatency)
|
||||
{
|
||||
Native.SetLiveOffset(_playerInstance, 0.0);
|
||||
}
|
||||
|
||||
// RJT NOTE: Other platforms create their native instances earlier than 'OpenMedia()' and set looping at that
|
||||
// point which Windows misses, so make sure once we have an instance we pass the looping flag down retrospectively
|
||||
// - https://github.com/RenderHeads/UnityPlugin-AVProVideo/issues/1913
|
||||
// - Same now with volume: https://github.com/RenderHeads/UnityPlugin-AVProVideo/issues/1916
|
||||
Native.SetLooping(_playerInstance, _isLooping);
|
||||
Native.SetAudioVolume(_playerInstance, _volume);
|
||||
}
|
||||
_mediaHints = mediaHints;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override void CloseMedia()
|
||||
{
|
||||
// NOTE: This unloads the current video, but the texture should remain
|
||||
_isMediaLoaded = false;
|
||||
_isLooping = false;
|
||||
_volume = 1.0f;
|
||||
Native.CloseMedia(_playerInstance);
|
||||
|
||||
base.CloseMedia();
|
||||
}
|
||||
|
||||
public override void Pause()
|
||||
{
|
||||
Native.Pause(_playerInstance);
|
||||
}
|
||||
|
||||
public override void Play()
|
||||
{
|
||||
Native.Play(_playerInstance);
|
||||
}
|
||||
|
||||
public override void Render()
|
||||
{
|
||||
Native.IssueRenderThreadEvent_UpdateAllTextures();
|
||||
}
|
||||
|
||||
private void Update_Textures()
|
||||
{
|
||||
// See if there is a new frame ready
|
||||
{
|
||||
System.IntPtr texturePointerLeft = System.IntPtr.Zero;
|
||||
System.IntPtr texturePointerRight = System.IntPtr.Zero;
|
||||
ulong frameTimeStamp = 0;
|
||||
int width, height;
|
||||
if (Native.GetLatestFrame(_playerInstance, out texturePointerLeft, out texturePointerRight, out frameTimeStamp, out width, out height))
|
||||
{
|
||||
bool isFrameUpdated = false;
|
||||
bool isNewFrameTime = (frameTimeStamp > _frameTimeStamp) || (_frameTimeStamp == 0 && frameTimeStamp == 0);
|
||||
for (int i = 0; i < _eyeTextures.Length; i++)
|
||||
{
|
||||
EyeTexture eyeTexture = _eyeTextures[i];
|
||||
System.IntPtr texturePointer = texturePointerLeft;
|
||||
if (i == 1)
|
||||
{
|
||||
texturePointer = texturePointerRight;
|
||||
}
|
||||
|
||||
bool isNewFrameSpecs = (eyeTexture.texture != null && (texturePointer == IntPtr.Zero || eyeTexture.texture.width != width || eyeTexture.texture.height != height));
|
||||
//Debug.Log("tex? " + i + " " + width + " " + height + " " + (eyeTexture.texture != null) + " " + texturePointer.ToString() + " " + frameTimeStamp);
|
||||
|
||||
// Check whether the latest frame is newer than the one we got last time
|
||||
if (isNewFrameTime || isNewFrameSpecs)
|
||||
{
|
||||
if (isNewFrameSpecs)
|
||||
{
|
||||
eyeTexture.Dispose();
|
||||
// TODO: blit from the old texture to the new texture before destroying?
|
||||
}
|
||||
|
||||
/// Switch to the latest texture pointer
|
||||
if (eyeTexture.texture != null)
|
||||
{
|
||||
// TODO: check whether UpdateExternalTexture resets the sampling filter to POINT - it seems to in Unity 5.6.6
|
||||
if (eyeTexture.nativePointer != texturePointer)
|
||||
{
|
||||
eyeTexture.texture.UpdateExternalTexture(texturePointer);
|
||||
eyeTexture.nativePointer = texturePointer;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (texturePointer != IntPtr.Zero)
|
||||
{
|
||||
// RJT NOTE: See notes in 'WindowsMediaPlayer::UpdateTexture()' re: 'isLinear'
|
||||
bool isLinear = (/*!_supportsLinearColorSpace*/true && (QualitySettings.activeColorSpace == ColorSpace.Linear));
|
||||
eyeTexture.texture = Texture2D.CreateExternalTexture(width, height, TextureFormat.BGRA32, false, isLinear, texturePointer);
|
||||
if (eyeTexture.texture != null)
|
||||
{
|
||||
eyeTexture.texture.name = "AVProVideo";
|
||||
eyeTexture.nativePointer = texturePointer;
|
||||
ApplyTextureProperties(eyeTexture.texture);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to create texture");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
isFrameUpdated = true;
|
||||
}
|
||||
}
|
||||
if (isFrameUpdated)
|
||||
{
|
||||
_frameTimeStamp = frameTimeStamp;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private AuthData _nextAuthData = new AuthData();
|
||||
|
||||
public AuthData AuthenticationData
|
||||
{
|
||||
get
|
||||
{
|
||||
return _nextAuthData;
|
||||
}
|
||||
set
|
||||
{
|
||||
_nextAuthData = value;
|
||||
Native.SetNextAuthData(_playerInstance, _nextAuthData);
|
||||
}
|
||||
}
|
||||
|
||||
public override bool RequiresVerticalFlip()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public override void Seek(double time)
|
||||
{
|
||||
Native.SeekParams seekParams = new Native.SeekParams();
|
||||
seekParams.timeSeconds = time;
|
||||
seekParams.mode = Native.SeekMode.Accurate;
|
||||
Native.Seek(_playerInstance, ref seekParams);
|
||||
}
|
||||
|
||||
public override void SeekFast(double time)
|
||||
{
|
||||
// Keyframe seeking is not supported on this platform
|
||||
Seek(time);
|
||||
}
|
||||
|
||||
public override void SetLooping(bool bLooping)
|
||||
{
|
||||
_isLooping = bLooping;
|
||||
Native.SetLooping(_playerInstance, _isLooping);
|
||||
}
|
||||
|
||||
public override void SetPlaybackRate(float rate)
|
||||
{
|
||||
// Clamp rate as WinRT doesn't seem to be able to handle negative rate
|
||||
rate = Mathf.Max(0f, rate);
|
||||
Native.SetPlaybackRate(_playerInstance, rate);
|
||||
}
|
||||
|
||||
public override void SetVolume(float volume)
|
||||
{
|
||||
_volume = volume;
|
||||
Native.SetAudioVolume(_playerInstance, _volume);
|
||||
}
|
||||
|
||||
public override void Stop()
|
||||
{
|
||||
Pause();
|
||||
}
|
||||
|
||||
private void UpdateTimeRanges()
|
||||
{
|
||||
UpdateTimeRange(ref _seekableTimes._ranges, Native.TimeRangeTypes.Seekable);
|
||||
UpdateTimeRange(ref _bufferedTimes._ranges, Native.TimeRangeTypes.Buffered);
|
||||
_seekableTimes.CalculateRange();
|
||||
_bufferedTimes.CalculateRange();
|
||||
}
|
||||
|
||||
private void UpdateTimeRange(ref TimeRange[] range, Native.TimeRangeTypes timeRangeType)
|
||||
{
|
||||
int newCount = Native.GetTimeRanges(_playerInstance, range, range.Length, timeRangeType);
|
||||
if (newCount != range.Length)
|
||||
{
|
||||
range = new TimeRange[newCount];
|
||||
Native.GetTimeRanges(_playerInstance, range, range.Length, timeRangeType);
|
||||
}
|
||||
}
|
||||
|
||||
public override System.DateTime GetProgramDateTime()
|
||||
{
|
||||
double seconds = Native.GetCurrentDateTimeSecondsSince1970(_playerInstance);
|
||||
return Helper.ConvertSecondsSince1970ToDateTime(seconds);
|
||||
}
|
||||
|
||||
public override void Update()
|
||||
{
|
||||
Native.Update(_playerInstance);
|
||||
UpdateTracks();
|
||||
UpdateTextCue();
|
||||
|
||||
_lastError = (ErrorCode)Native.GetLastErrorCode(_playerInstance);
|
||||
|
||||
UpdateTimeRanges();
|
||||
UpdateSubtitles();
|
||||
Update_Textures();
|
||||
UpdateDisplayFrameRate();
|
||||
|
||||
if (!_isMediaLoaded)
|
||||
{
|
||||
if (HasVideo() && _eyeTextures[0].texture != null)
|
||||
{
|
||||
Native.VideoTrack videoTrack;
|
||||
if (Native.GetActiveVideoTrackInfo(_playerInstance, out videoTrack))
|
||||
{
|
||||
Helper.LogInfo("Using playback path: " + _playerDescription + " (" + videoTrack.frameWidth + "x" + videoTrack.frameHeight + "@" + videoTrack.frameRate.ToString("F2") + ")");
|
||||
_isMediaLoaded = true;
|
||||
}
|
||||
}
|
||||
else if (HasAudio() && !HasVideo())
|
||||
{
|
||||
Helper.LogInfo("Using playback path: " + _playerDescription);
|
||||
_isMediaLoaded = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*public override void SetKeyServerURL(string url)
|
||||
{
|
||||
_nextAuthData.URL = url;
|
||||
AuthenticationData = _nextAuthData;
|
||||
}*/
|
||||
|
||||
public override void SetKeyServerAuthToken(string token)
|
||||
{
|
||||
_nextAuthData.Token = token;
|
||||
AuthenticationData = _nextAuthData;
|
||||
}
|
||||
|
||||
public override void SetOverrideDecryptionKey(byte[] key)
|
||||
{
|
||||
_nextAuthData.KeyBytes = key;
|
||||
AuthenticationData = _nextAuthData;
|
||||
}
|
||||
}
|
||||
|
||||
// Tracks
|
||||
public sealed partial class WindowsRtMediaPlayer
|
||||
{
|
||||
internal override bool InternalSetActiveTrack(TrackType trackType, int trackUid)
|
||||
{
|
||||
return Native.SetActiveTrack(_playerInstance, trackType, trackUid);
|
||||
}
|
||||
|
||||
// Has it changed since the last frame 'tick'
|
||||
internal override bool InternalIsChangedTracks(TrackType trackType)
|
||||
{
|
||||
return Native.IsChangedTracks(_playerInstance, trackType);
|
||||
}
|
||||
|
||||
internal override int InternalGetTrackCount(TrackType trackType)
|
||||
{
|
||||
return Native.GetTrackCount(_playerInstance, trackType);
|
||||
}
|
||||
|
||||
internal override TrackBase InternalGetTrackInfo(TrackType trackType, int trackIndex, ref bool isActiveTrack)
|
||||
{
|
||||
TrackBase result = null;
|
||||
StringBuilder name = new StringBuilder(128);
|
||||
StringBuilder language = new StringBuilder(16);
|
||||
int uid = -1;
|
||||
if (Native.GetTrackInfo(_playerInstance, trackType, trackIndex, ref uid, ref isActiveTrack, name, name.Capacity, language, language.Capacity))
|
||||
{
|
||||
if (trackType == TrackType.Video)
|
||||
{
|
||||
result = new VideoTrack(uid, name.ToString(), language.ToString(), false);
|
||||
}
|
||||
else if (trackType == TrackType.Audio)
|
||||
{
|
||||
result = new AudioTrack(uid, name.ToString(), language.ToString(), false);
|
||||
}
|
||||
else if (trackType == TrackType.Text)
|
||||
{
|
||||
result = new TextTrack(uid, name.ToString(), language.ToString(), false);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private partial struct Native
|
||||
{
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
public static extern bool IsChangedTracks(System.IntPtr instance, TrackType trackType);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern int GetTrackCount(System.IntPtr instance, TrackType trackType);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
public static extern bool GetTrackInfo(System.IntPtr instance, TrackType trackType, int index, ref int uid,
|
||||
ref bool isActive,
|
||||
[MarshalAs(UnmanagedType.LPWStr)] StringBuilder name, int maxNameLength,
|
||||
[MarshalAs(UnmanagedType.LPWStr)] StringBuilder language, int maxLanguageLength);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
public static extern bool SetActiveTrack(System.IntPtr instance, TrackType trackType, int trackUid);
|
||||
}
|
||||
}
|
||||
|
||||
// Text Cue
|
||||
public sealed partial class WindowsRtMediaPlayer
|
||||
{
|
||||
// Has it changed since the last frame 'tick'
|
||||
internal override bool InternalIsChangedTextCue()
|
||||
{
|
||||
return Native.IsChangedTextCue(_playerInstance);
|
||||
}
|
||||
|
||||
internal override string InternalGetCurrentTextCue()
|
||||
{
|
||||
string result = null;
|
||||
System.IntPtr ptr = Native.GetCurrentTextCue(_playerInstance);
|
||||
if (ptr != System.IntPtr.Zero)
|
||||
{
|
||||
result = System.Runtime.InteropServices.Marshal.PtrToStringUni(ptr);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private partial struct Native
|
||||
{
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
public static extern bool IsChangedTextCue(System.IntPtr instance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern System.IntPtr GetCurrentTextCue(System.IntPtr instance);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed partial class WindowsRtMediaPlayer
|
||||
{
|
||||
private partial struct Native
|
||||
{
|
||||
[DllImport("AVProVideoWinRT", EntryPoint = "GetPluginVersion")]
|
||||
private static extern System.IntPtr GetPluginVersionStringPointer();
|
||||
|
||||
public static string GetPluginVersion()
|
||||
{
|
||||
return System.Runtime.InteropServices.Marshal.PtrToStringAnsi(GetPluginVersionStringPointer());
|
||||
}
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern System.IntPtr CreatePlayer();
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void DestroyPlayer(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
#if AVPROVIDEO_MARSHAL_RETURN_BOOL
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
#endif
|
||||
public static extern bool OpenMedia(System.IntPtr playerInstance, [MarshalAs(UnmanagedType.LPWStr)] string filePath,
|
||||
[MarshalAs(UnmanagedType.LPWStr)] string httpHeader, FileFormat overrideFileFormat,
|
||||
bool startWithHighestBitrate, bool use10BitTextures);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void CloseMedia(System.IntPtr playerInstance);
|
||||
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void Pause(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void Play(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void SetAudioVolume(System.IntPtr playerInstance, float volume);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void SetAudioBalance(System.IntPtr playerInstance, float balance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void SetPlaybackRate(System.IntPtr playerInstance, float rate);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void SetAudioMuted(System.IntPtr playerInstance, bool muted);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern float GetAudioVolume(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
#if AVPROVIDEO_MARSHAL_RETURN_BOOL
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
#endif
|
||||
public static extern bool IsAudioMuted(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern float GetAudioBalance(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern float GetPlaybackRate(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void SetLooping(System.IntPtr playerInstance, bool looping);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
#if AVPROVIDEO_MARSHAL_RETURN_BOOL
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
#endif
|
||||
public static extern bool IsLooping(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern int GetLastErrorCode(System.IntPtr playerInstance);
|
||||
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void Update(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern double GetDuration(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern int GetStereoPacking(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern double GetCurrentPosition(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
#if AVPROVIDEO_MARSHAL_RETURN_BOOL
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
#endif
|
||||
public static extern bool GetLatestFrame(System.IntPtr playerInstance, out System.IntPtr leftEyeTexturePointer, out System.IntPtr rightEyeTexturePointer, out ulong frameTimeStamp, out int width, out int height);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern PlaybackState GetPlaybackState(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
#if AVPROVIDEO_MARSHAL_RETURN_BOOL
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
#endif
|
||||
public static extern bool GetActiveVideoTrackInfo(System.IntPtr playerInstance, out VideoTrack videoTrack);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
#if AVPROVIDEO_MARSHAL_RETURN_BOOL
|
||||
[return: MarshalAs(UnmanagedType.I1)]
|
||||
#endif
|
||||
public static extern bool GetActiveAudioTrackInfo(System.IntPtr playerInstance, out AudioTrack audioTrack);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern double GetCurrentDateTimeSecondsSince1970(System.IntPtr playerInstance);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void SetLiveOffset(System.IntPtr playerInstance, double seconds);
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void DebugValues(System.IntPtr playerInstance, out int isD3D, out int isUnityD3D, out int isTexture, out int isSharedTexture, out int isSurface);
|
||||
|
||||
public enum SeekMode
|
||||
{
|
||||
Fast = 0,
|
||||
Accurate = 1,
|
||||
// TODO: Add Fast_Before and Fast_After
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential, Pack = 1)]
|
||||
public struct VideoTrack
|
||||
{
|
||||
public int trackIndex;
|
||||
public int frameWidth;
|
||||
public int frameHeight;
|
||||
public float frameRate;
|
||||
public uint averageBitRate;
|
||||
//public string trackName;
|
||||
// TODO: add index, language, name, bitrate, codec etc
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential, Pack = 1)]
|
||||
public struct AudioTrack
|
||||
{
|
||||
public int trackIndex;
|
||||
public uint channelCount;
|
||||
public uint sampleRate;
|
||||
public uint bitsPerSample;
|
||||
public uint averageBitRate;
|
||||
//public string trackName;
|
||||
// TODO: add index, language, name, bitrate, codec etc
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential, Pack = 1)]
|
||||
public struct SeekParams
|
||||
{
|
||||
public double timeSeconds;
|
||||
public SeekMode mode;
|
||||
// TODO: add min-max thresholds
|
||||
}
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern void Seek(System.IntPtr playerInstance, ref SeekParams seekParams);
|
||||
|
||||
public static void SetNextAuthData(System.IntPtr playerInstance, RenderHeads.Media.AVProVideo.AuthData srcAuthData)
|
||||
{
|
||||
Native.AuthData ad = new Native.AuthData();
|
||||
ad.url = string.IsNullOrEmpty(srcAuthData.URL) ? null : srcAuthData.URL;
|
||||
ad.token = string.IsNullOrEmpty(srcAuthData.Token) ? null : srcAuthData.Token;
|
||||
if (srcAuthData.KeyBytes != null && srcAuthData.KeyBytes.Length > 0)
|
||||
{
|
||||
ad.keyBytes = Marshal.AllocHGlobal(srcAuthData.KeyBytes.Length);
|
||||
Marshal.Copy(srcAuthData.KeyBytes, 0, ad.keyBytes, srcAuthData.KeyBytes.Length);
|
||||
ad.keyBytesLength = srcAuthData.KeyBytes.Length;
|
||||
}
|
||||
else
|
||||
{
|
||||
ad.keyBytes = System.IntPtr.Zero;
|
||||
ad.keyBytesLength = 0;
|
||||
}
|
||||
|
||||
SetNextAuthData(playerInstance, ref ad);
|
||||
|
||||
if (ad.keyBytes != System.IntPtr.Zero)
|
||||
{
|
||||
Marshal.FreeHGlobal(ad.keyBytes);
|
||||
}
|
||||
}
|
||||
|
||||
[StructLayout(LayoutKind.Sequential, Pack = 1)]
|
||||
public struct AuthData
|
||||
{
|
||||
[MarshalAs(UnmanagedType.LPWStr)]
|
||||
public string url;
|
||||
|
||||
[MarshalAs(UnmanagedType.LPWStr)]
|
||||
public string token;
|
||||
|
||||
public System.IntPtr keyBytes;
|
||||
public int keyBytesLength;
|
||||
};
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
private static extern void SetNextAuthData(System.IntPtr playerInstance, ref AuthData authData);
|
||||
|
||||
internal enum TimeRangeTypes
|
||||
{
|
||||
Seekable = 0,
|
||||
Buffered = 1,
|
||||
}
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern int GetTimeRanges(System.IntPtr playerInstance, [Out, MarshalAs(UnmanagedType.LPArray, SizeParamIndex=2)] TimeRange[] ranges, int rangeCount, TimeRangeTypes timeRangeType);
|
||||
|
||||
// RJT TODO: Clean this up to better match non-WinRT
|
||||
|
||||
[DllImport("AVProVideoWinRT")]
|
||||
public static extern System.IntPtr GetRenderEventFunc();
|
||||
|
||||
private static System.IntPtr _nativeFunction_UnityRenderEvent;
|
||||
public static void IssueRenderThreadEvent_UpdateAllTextures()
|
||||
{
|
||||
if (_nativeFunction_UnityRenderEvent == System.IntPtr.Zero)
|
||||
{
|
||||
_nativeFunction_UnityRenderEvent = Native.GetRenderEventFunc();
|
||||
}
|
||||
if (_nativeFunction_UnityRenderEvent != System.IntPtr.Zero)
|
||||
{
|
||||
GL.IssuePluginEvent(_nativeFunction_UnityRenderEvent, /*(int)Native.RenderThreadEvent.UpdateAllTextures*/1);
|
||||
}
|
||||
}
|
||||
|
||||
public static void IssueRenderThreadEvent_FreeAllTextures()
|
||||
{
|
||||
if (_nativeFunction_UnityRenderEvent == System.IntPtr.Zero)
|
||||
{
|
||||
_nativeFunction_UnityRenderEvent = Native.GetRenderEventFunc();
|
||||
}
|
||||
if (_nativeFunction_UnityRenderEvent != System.IntPtr.Zero)
|
||||
{
|
||||
GL.IssuePluginEvent(_nativeFunction_UnityRenderEvent, /*(int)Native.RenderThreadEvent.FreeTextures*/2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed partial class WindowsRtMediaPlayer
|
||||
{
|
||||
private static bool _isInitialised = false;
|
||||
private static string _version = "Plug-in not yet initialised";
|
||||
|
||||
private ulong _frameTimeStamp;
|
||||
private System.IntPtr _playerInstance;
|
||||
|
||||
class EyeTexture
|
||||
{
|
||||
public Texture2D texture = null;
|
||||
public System.IntPtr nativePointer = System.IntPtr.Zero;
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (texture)
|
||||
{
|
||||
if (Application.isPlaying) { Texture2D.Destroy(texture); }
|
||||
else { Texture2D.DestroyImmediate(texture); }
|
||||
texture = null;
|
||||
}
|
||||
nativePointer = System.IntPtr.Zero;
|
||||
}
|
||||
}
|
||||
|
||||
private EyeTexture[] _eyeTextures = new EyeTexture[2];
|
||||
|
||||
public static bool InitialisePlatform()
|
||||
{
|
||||
if (!_isInitialised)
|
||||
{
|
||||
try
|
||||
{
|
||||
#if !UNITY_2019_3_OR_NEWER
|
||||
if (SystemInfo.graphicsDeviceType == UnityEngine.Rendering.GraphicsDeviceType.Direct3D12)
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Direct3D 12 is not supported until Unity 2019.3");
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
if (SystemInfo.graphicsDeviceType == UnityEngine.Rendering.GraphicsDeviceType.Null ||
|
||||
SystemInfo.graphicsDeviceType == UnityEngine.Rendering.GraphicsDeviceType.Direct3D11 ||
|
||||
SystemInfo.graphicsDeviceType == UnityEngine.Rendering.GraphicsDeviceType.Direct3D12)
|
||||
{
|
||||
/*if (!Native.Init(QualitySettings.activeColorSpace == ColorSpace.Linear))
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failing to initialise platform");
|
||||
}
|
||||
else*/
|
||||
{
|
||||
_isInitialised = true;
|
||||
_version = Native.GetPluginVersion();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Only Direct3D 11 and 12 are supported, graphicsDeviceType not supported: " + SystemInfo.graphicsDeviceType);
|
||||
}
|
||||
}
|
||||
catch (System.DllNotFoundException e)
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to load DLL. " + e.Message);
|
||||
}
|
||||
}
|
||||
|
||||
return _isInitialised;
|
||||
}
|
||||
|
||||
public static void DeinitPlatform()
|
||||
{
|
||||
//Native.Deinit();
|
||||
_isInitialised = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 7b04c4ad4a3b8c44a98a08ea2ae71a6d
|
||||
timeCreated: 1541807235
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
48
Assets/AVProVideo/Runtime/Scripts/Internal/TextTrack.cs
Normal file
48
Assets/AVProVideo/Runtime/Scripts/Internal/TextTrack.cs
Normal file
@@ -0,0 +1,48 @@
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2021 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public class TextCue
|
||||
{
|
||||
private TextCue() { }
|
||||
|
||||
internal TextCue(string text)
|
||||
{
|
||||
Text = text;
|
||||
}
|
||||
|
||||
public string Text { get; private set; }
|
||||
}
|
||||
|
||||
public partial class BaseMediaPlayer : ITextTracks
|
||||
{
|
||||
protected TextCue _currentTextCue = null;
|
||||
public TextCue GetCurrentTextCue() { return _currentTextCue; } // Returns null when there is no active text
|
||||
|
||||
protected bool UpdateTextCue(bool force = false)
|
||||
{
|
||||
bool result = false;
|
||||
// Has it changed since the last 'tick'
|
||||
if (force || InternalIsChangedTextCue())
|
||||
{
|
||||
_currentTextCue = null;
|
||||
string text = InternalGetCurrentTextCue();
|
||||
if (!string.IsNullOrEmpty(text))
|
||||
{
|
||||
_currentTextCue = new TextCue(text);
|
||||
}
|
||||
result = true;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
internal abstract bool InternalIsChangedTextCue();
|
||||
internal abstract string InternalGetCurrentTextCue();
|
||||
}
|
||||
}
|
||||
12
Assets/AVProVideo/Runtime/Scripts/Internal/TextTrack.cs.meta
Normal file
12
Assets/AVProVideo/Runtime/Scripts/Internal/TextTrack.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 70b7a3055e537f74cb49d2fc4e6989e6
|
||||
timeCreated: 1438695622
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
83
Assets/AVProVideo/Runtime/Scripts/Internal/TimedMetadata.cs
Normal file
83
Assets/AVProVideo/Runtime/Scripts/Internal/TimedMetadata.cs
Normal file
@@ -0,0 +1,83 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2025 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public interface ITimedMetadata
|
||||
{
|
||||
bool HasNewTimedMetadataItem();
|
||||
|
||||
TimedMetadataItem GetTimedMetadataItem();
|
||||
}
|
||||
|
||||
public class TimedMetadataItem
|
||||
{
|
||||
public double PresentationTime
|
||||
{
|
||||
get
|
||||
{
|
||||
return _presentationTime;
|
||||
}
|
||||
}
|
||||
|
||||
public string Text
|
||||
{
|
||||
get
|
||||
{
|
||||
return _text;
|
||||
}
|
||||
}
|
||||
|
||||
internal TimedMetadataItem(double presentationTime, string text)
|
||||
{
|
||||
_presentationTime = presentationTime;
|
||||
_text = text;
|
||||
}
|
||||
|
||||
private TimedMetadataItem()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
private double _presentationTime;
|
||||
private string _text;
|
||||
}
|
||||
|
||||
public partial class BaseMediaPlayer : ITimedMetadata
|
||||
{
|
||||
public bool HasNewTimedMetadataItem()
|
||||
{
|
||||
return _hasNewTimedMetadataItem;
|
||||
}
|
||||
|
||||
public TimedMetadataItem GetTimedMetadataItem()
|
||||
{
|
||||
_hasNewTimedMetadataItem = false;
|
||||
return _timedMetadataItem;
|
||||
}
|
||||
|
||||
protected void UpdateTimedMetadata()
|
||||
{
|
||||
var hasUpdatedTimedMetadata = InternalHasUpdatedTimedMetadata();
|
||||
if (hasUpdatedTimedMetadata)
|
||||
{
|
||||
_timedMetadataItem = InternalGetTimedMetadataItem();
|
||||
_hasNewTimedMetadataItem = true;
|
||||
}
|
||||
}
|
||||
|
||||
protected virtual bool InternalHasUpdatedTimedMetadata()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
protected virtual TimedMetadataItem InternalGetTimedMetadataItem()
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
private TimedMetadataItem _timedMetadataItem = null;
|
||||
private bool _hasNewTimedMetadataItem = false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 5ebadc28ebe254b37ba87c2f1c8f3299
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
326
Assets/AVProVideo/Runtime/Scripts/Internal/Tracks.cs
Normal file
326
Assets/AVProVideo/Runtime/Scripts/Internal/Tracks.cs
Normal file
@@ -0,0 +1,326 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2024 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public enum TrackType
|
||||
{
|
||||
Video,
|
||||
Audio,
|
||||
Text,
|
||||
}
|
||||
|
||||
public class TrackBase
|
||||
{
|
||||
protected TrackBase() { }
|
||||
|
||||
internal TrackBase(TrackType trackType, int uid, string name, string language, bool isDefault)
|
||||
{
|
||||
TrackType = trackType;
|
||||
Uid = uid;
|
||||
Name = name;
|
||||
Language = language;
|
||||
IsDefault = isDefault;
|
||||
DisplayName = CreateDisplayName();
|
||||
}
|
||||
|
||||
// The UID is unique to the media
|
||||
public int Uid { get; private set; }
|
||||
|
||||
public TrackType TrackType { get; private set; }
|
||||
|
||||
public string DisplayName { get; private set; }
|
||||
|
||||
// Optional
|
||||
public string Name { get; private set; }
|
||||
|
||||
// Optional
|
||||
public string Language { get; private set; }
|
||||
|
||||
// Optional
|
||||
public bool IsDefault { get; private set; }
|
||||
|
||||
protected string CreateDisplayName()
|
||||
{
|
||||
string result;
|
||||
if (!string.IsNullOrEmpty(Name))
|
||||
{
|
||||
result = Name;
|
||||
}
|
||||
else
|
||||
{
|
||||
result = "Track " + Uid.ToString();
|
||||
}
|
||||
if (!string.IsNullOrEmpty(Language))
|
||||
{
|
||||
result = string.Format("{0} ({1})", result, Language);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
public abstract class TrackCollection : IEnumerable
|
||||
{
|
||||
public virtual TrackType TrackType { get; private set; }
|
||||
public abstract int Count { get; }
|
||||
public abstract IEnumerator GetEnumerator();
|
||||
|
||||
public abstract int GetTrackArrayIndexFromUid( int Uid );
|
||||
|
||||
internal abstract void Clear();
|
||||
internal abstract void Add(TrackBase track);
|
||||
internal abstract bool HasActiveTrack();
|
||||
internal abstract bool IsActiveTrack(TrackBase track);
|
||||
internal abstract void SetActiveTrack(TrackBase track);
|
||||
internal abstract void SetFirstTrackActive();
|
||||
public abstract int GetActiveTrackIndex();
|
||||
}
|
||||
|
||||
public class TrackCollection<T> : TrackCollection where T : TrackBase
|
||||
{
|
||||
internal TrackCollection() {}
|
||||
|
||||
public override IEnumerator GetEnumerator()
|
||||
{
|
||||
return _tracks.GetEnumerator();
|
||||
}
|
||||
|
||||
public T this[int index]
|
||||
{
|
||||
get
|
||||
{
|
||||
return _tracks[index];
|
||||
}
|
||||
}
|
||||
|
||||
internal T ActiveTrack { get; set; }
|
||||
|
||||
internal override bool HasActiveTrack() { return ActiveTrack != null; }
|
||||
|
||||
internal override bool IsActiveTrack(TrackBase track)
|
||||
{
|
||||
return (ActiveTrack == track);
|
||||
}
|
||||
|
||||
public override int GetTrackArrayIndexFromUid( int Uid )
|
||||
{
|
||||
int iTrackArrayIndex = 0;
|
||||
foreach( TrackBase track in _tracks )
|
||||
{
|
||||
if( track.Uid == Uid )
|
||||
{
|
||||
return iTrackArrayIndex;
|
||||
}
|
||||
++iTrackArrayIndex;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
internal override void Clear()
|
||||
{
|
||||
_tracks.Clear();
|
||||
ActiveTrack = null;
|
||||
}
|
||||
|
||||
internal override void Add(TrackBase track)
|
||||
{
|
||||
_tracks.Add(track as T);
|
||||
}
|
||||
|
||||
internal override void SetActiveTrack(TrackBase track)
|
||||
{
|
||||
ActiveTrack = track as T;
|
||||
}
|
||||
|
||||
internal override void SetFirstTrackActive()
|
||||
{
|
||||
if (_tracks.Count > 0)
|
||||
{
|
||||
ActiveTrack = _tracks[0];
|
||||
}
|
||||
}
|
||||
|
||||
public override int GetActiveTrackIndex()
|
||||
{
|
||||
// Debug.Log("[AVProVideo]: GetActiveTrackIndex() | ActiveTrack = " + ActiveTrack + " | ActiveTrack.Uid = " + (( ActiveTrack != null ) ? ActiveTrack.Uid : -1));
|
||||
if( ActiveTrack != null )
|
||||
{
|
||||
return ActiveTrack.Uid;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
public override int Count { get{ return _tracks.Count; } }
|
||||
|
||||
internal List<T> _tracks = new List<T>(4);
|
||||
}
|
||||
|
||||
public class VideoTracks : TrackCollection<VideoTrack>
|
||||
{
|
||||
public override TrackType TrackType { get { return TrackType.Video; } }
|
||||
}
|
||||
|
||||
public class AudioTracks : TrackCollection<AudioTrack>
|
||||
{
|
||||
public override TrackType TrackType { get { return TrackType.Audio; } }
|
||||
}
|
||||
|
||||
public class TextTracks : TrackCollection<TextTrack>
|
||||
{
|
||||
public override TrackType TrackType { get { return TrackType.Text; } }
|
||||
}
|
||||
|
||||
public class VideoTrack : TrackBase
|
||||
{
|
||||
private VideoTrack() { }
|
||||
|
||||
internal VideoTrack(int uid, string name, string language, bool isDefault)
|
||||
: base(TrackType.Video, uid, name, language, isDefault) { }
|
||||
|
||||
// Optional
|
||||
public int Bitrate { get; set; }
|
||||
}
|
||||
|
||||
public class AudioTrack : TrackBase
|
||||
{
|
||||
private AudioTrack() { }
|
||||
|
||||
internal AudioTrack(int uid, string name, string language, bool isDefault)
|
||||
: base(TrackType.Audio, uid, name, language, isDefault) { }
|
||||
|
||||
// Optional
|
||||
public int Bitrate { get; private set; }
|
||||
|
||||
// Optional
|
||||
public int ChannelCount { get; private set; }
|
||||
}
|
||||
|
||||
public class TextTrack : TrackBase
|
||||
{
|
||||
private TextTrack() { }
|
||||
|
||||
internal TextTrack(int uid, string name, string language, bool isDefault)
|
||||
: base(TrackType.Text, uid, name, language, isDefault) { }
|
||||
}
|
||||
|
||||
public interface IVideoTracks
|
||||
{
|
||||
VideoTracks GetVideoTracks();
|
||||
VideoTrack GetActiveVideoTrack();
|
||||
void SetActiveVideoTrack(VideoTrack track);
|
||||
}
|
||||
|
||||
public interface IAudioTracks
|
||||
{
|
||||
AudioTracks GetAudioTracks();
|
||||
AudioTrack GetActiveAudioTrack();
|
||||
void SetActiveAudioTrack(AudioTrack track);
|
||||
}
|
||||
|
||||
public interface ITextTracks
|
||||
{
|
||||
TextTracks GetTextTracks();
|
||||
TextTrack GetActiveTextTrack();
|
||||
void SetActiveTextTrack(TextTrack track);
|
||||
TextCue GetCurrentTextCue();
|
||||
|
||||
int GetTextTrackArrayIndexFromUid( int Uid );
|
||||
}
|
||||
|
||||
public partial class BaseMediaPlayer : IVideoTracks, IAudioTracks, ITextTracks
|
||||
{
|
||||
protected VideoTracks _videoTracks = new VideoTracks();
|
||||
protected AudioTracks _audioTracks = new AudioTracks();
|
||||
protected TextTracks _textTracks = new TextTracks();
|
||||
protected TrackCollection[] _trackCollections;
|
||||
|
||||
public VideoTracks GetVideoTracks() { return _videoTracks; }
|
||||
public AudioTracks GetAudioTracks() { return _audioTracks; }
|
||||
public TextTracks GetTextTracks() { return _textTracks; }
|
||||
public VideoTrack GetActiveVideoTrack() { return _videoTracks.ActiveTrack; }
|
||||
public AudioTrack GetActiveAudioTrack() { return _audioTracks.ActiveTrack; }
|
||||
public TextTrack GetActiveTextTrack() { return _textTracks.ActiveTrack; }
|
||||
public void SetActiveVideoTrack(VideoTrack track) { if (track != null) SetActiveTrack(_videoTracks, track); }
|
||||
public void SetActiveAudioTrack(AudioTrack track) { if (track != null) SetActiveTrack(_audioTracks, track); }
|
||||
public void SetActiveTextTrack(TextTrack track) { SetActiveTrack(_textTracks, track); }
|
||||
|
||||
public int GetTextTrackArrayIndexFromUid(int Uid) { return _trackCollections[2].GetTrackArrayIndexFromUid(Uid); }
|
||||
|
||||
internal abstract bool InternalIsChangedTracks(TrackType trackType);
|
||||
internal abstract int InternalGetTrackCount(TrackType trackType);
|
||||
internal abstract bool InternalSetActiveTrack(TrackType trackType, int trackUid);
|
||||
internal abstract TrackBase InternalGetTrackInfo(TrackType trackType, int trackIndex, ref bool isActiveTrack);
|
||||
|
||||
private void InitTracks()
|
||||
{
|
||||
_trackCollections = new TrackCollection[3] { _videoTracks, _audioTracks, _textTracks };
|
||||
}
|
||||
|
||||
protected void UpdateTracks()
|
||||
{
|
||||
foreach (TrackCollection trackCollection in _trackCollections)
|
||||
{
|
||||
if (InternalIsChangedTracks(trackCollection.TrackType))
|
||||
{
|
||||
PopulateTrackCollection(trackCollection);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void PopulateTrackCollection(TrackCollection collection)
|
||||
{
|
||||
collection.Clear();
|
||||
int trackCount = InternalGetTrackCount(collection.TrackType);
|
||||
for (int i = 0; i < trackCount; i++)
|
||||
{
|
||||
bool isActiveTrack = false;
|
||||
TrackBase track = InternalGetTrackInfo(collection.TrackType, i, ref isActiveTrack);
|
||||
if (track != null)
|
||||
{
|
||||
collection.Add(track);
|
||||
if (isActiveTrack)
|
||||
{
|
||||
collection.SetActiveTrack(track);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
UnityEngine.Debug.LogWarning(string.Format("[AVProVideo] Failed to enumerate {0} track {1} ", collection.TrackType, i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void SetActiveTrack(TrackCollection collection, TrackBase track)
|
||||
{
|
||||
// Check if this is already the active track
|
||||
if (collection.IsActiveTrack(track)) return;
|
||||
|
||||
// Convert from TextTrack to uid
|
||||
int trackUid = -1;
|
||||
if (track != null)
|
||||
{
|
||||
trackUid = track.Uid;
|
||||
}
|
||||
|
||||
// Set track based on uid (-1 is no active track)
|
||||
// NOTE: TrackType is pulled from collection as track may be null
|
||||
if (InternalSetActiveTrack(collection.TrackType, trackUid))
|
||||
{
|
||||
collection.SetActiveTrack(track);
|
||||
switch (collection.TrackType)
|
||||
{
|
||||
case TrackType.Text:
|
||||
UpdateTextCue(force: true);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
12
Assets/AVProVideo/Runtime/Scripts/Internal/Tracks.cs.meta
Normal file
12
Assets/AVProVideo/Runtime/Scripts/Internal/Tracks.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 087e0a6fc1bd92e4bbd96796ec593162
|
||||
timeCreated: 1596803411
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
9
Assets/AVProVideo/Runtime/Scripts/Internal/Utils.meta
Normal file
9
Assets/AVProVideo/Runtime/Scripts/Internal/Utils.meta
Normal file
@@ -0,0 +1,9 @@
|
||||
fileFormatVersion: 2
|
||||
guid: c6f0eb1069a1ccc4b94ebe97c97b9cd1
|
||||
folderAsset: yes
|
||||
timeCreated: 1551721729
|
||||
licenseType: Store
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
268
Assets/AVProVideo/Runtime/Scripts/Internal/Utils/Easing.cs
Normal file
268
Assets/AVProVideo/Runtime/Scripts/Internal/Utils/Easing.cs
Normal file
@@ -0,0 +1,268 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2024 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
using UnityEngine;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Easing functions
|
||||
/// </summary>
|
||||
// [System.Serializable]
|
||||
public static class Easing
|
||||
{
|
||||
// public Preset preset = Preset.Linear;
|
||||
|
||||
public enum Preset
|
||||
{
|
||||
Step,
|
||||
Linear,
|
||||
InQuad,
|
||||
OutQuad,
|
||||
InOutQuad,
|
||||
InCubic,
|
||||
OutCubic,
|
||||
InOutCubic,
|
||||
InQuint,
|
||||
OutQuint,
|
||||
InOutQuint,
|
||||
InQuart,
|
||||
OutQuart,
|
||||
InOutQuart,
|
||||
InExpo,
|
||||
OutExpo,
|
||||
InOutExpo,
|
||||
Random,
|
||||
RandomNotStep,
|
||||
}
|
||||
|
||||
public static System.Func<float, float> GetFunction(Preset preset)
|
||||
{
|
||||
System.Func<float, float> result = null;
|
||||
switch (preset)
|
||||
{
|
||||
case Preset.Step:
|
||||
result = Step;
|
||||
break;
|
||||
|
||||
case Preset.Linear:
|
||||
result = Linear;
|
||||
break;
|
||||
|
||||
case Preset.InQuad:
|
||||
result = InQuad;
|
||||
break;
|
||||
|
||||
case Preset.OutQuad:
|
||||
result = OutQuad;
|
||||
break;
|
||||
|
||||
case Preset.InOutQuad:
|
||||
result = InOutQuad;
|
||||
break;
|
||||
|
||||
case Preset.InCubic:
|
||||
result = InCubic;
|
||||
break;
|
||||
|
||||
case Preset.OutCubic:
|
||||
result = OutCubic;
|
||||
break;
|
||||
|
||||
case Preset.InOutCubic:
|
||||
result = InOutCubic;
|
||||
break;
|
||||
|
||||
case Preset.InQuint:
|
||||
result = InQuint;
|
||||
break;
|
||||
|
||||
case Preset.OutQuint:
|
||||
result = OutQuint;
|
||||
break;
|
||||
|
||||
case Preset.InOutQuint:
|
||||
result = InOutQuint;
|
||||
break;
|
||||
|
||||
case Preset.InQuart:
|
||||
result = InQuart;
|
||||
break;
|
||||
|
||||
case Preset.OutQuart:
|
||||
result = OutQuart;
|
||||
break;
|
||||
|
||||
case Preset.InOutQuart:
|
||||
result = InOutQuart;
|
||||
break;
|
||||
|
||||
case Preset.InExpo:
|
||||
result = InExpo;
|
||||
break;
|
||||
|
||||
case Preset.OutExpo:
|
||||
result = OutExpo;
|
||||
break;
|
||||
|
||||
case Preset.InOutExpo:
|
||||
result = InOutExpo;
|
||||
break;
|
||||
|
||||
case Preset.Random:
|
||||
result = GetFunction((Preset)Random.Range(0, (int)Preset.Random));
|
||||
break;
|
||||
|
||||
case Preset.RandomNotStep:
|
||||
result = GetFunction((Preset)Random.Range((int)Preset.Step+1, (int)Preset.Random));
|
||||
break;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static float PowerEaseIn(float t, float power)
|
||||
{
|
||||
return Mathf.Pow(t, power);
|
||||
}
|
||||
|
||||
public static float PowerEaseOut(float t, float power)
|
||||
{
|
||||
return 1f - Mathf.Abs(Mathf.Pow(t - 1f, power));
|
||||
}
|
||||
|
||||
public static float PowerEaseInOut(float t, float power)
|
||||
{
|
||||
float result;
|
||||
if (t < 0.5f)
|
||||
{
|
||||
result = PowerEaseIn(t * 2f, power) / 2f;
|
||||
}
|
||||
else
|
||||
{
|
||||
result = PowerEaseOut(t * 2f - 1f, power) / 2f + 0.5f;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static float Step(float t)
|
||||
{
|
||||
float result = 0f;
|
||||
if (t >= 0.5f)
|
||||
{
|
||||
result = 1f;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static float Linear(float t)
|
||||
{
|
||||
return t;
|
||||
}
|
||||
|
||||
public static float InQuad(float t)
|
||||
{
|
||||
return PowerEaseIn(t, 2f);
|
||||
}
|
||||
|
||||
public static float OutQuad(float t)
|
||||
{
|
||||
return PowerEaseOut(t, 2f);
|
||||
}
|
||||
|
||||
public static float InOutQuad(float t)
|
||||
{
|
||||
return PowerEaseInOut(t, 2f);
|
||||
}
|
||||
|
||||
public static float InCubic(float t)
|
||||
{
|
||||
return PowerEaseIn(t, 3f);
|
||||
}
|
||||
|
||||
public static float OutCubic(float t)
|
||||
{
|
||||
return PowerEaseOut(t, 3f);
|
||||
}
|
||||
|
||||
public static float InOutCubic(float t)
|
||||
{
|
||||
return PowerEaseInOut(t, 3f);
|
||||
}
|
||||
|
||||
public static float InQuart(float t)
|
||||
{
|
||||
return PowerEaseIn(t, 4f);
|
||||
}
|
||||
|
||||
public static float OutQuart(float t)
|
||||
{
|
||||
return PowerEaseOut(t, 4f);
|
||||
}
|
||||
|
||||
public static float InOutQuart(float t)
|
||||
{
|
||||
return PowerEaseInOut(t, 4f);
|
||||
}
|
||||
|
||||
public static float InQuint(float t)
|
||||
{
|
||||
return PowerEaseIn(t, 5f);
|
||||
}
|
||||
|
||||
public static float OutQuint(float t)
|
||||
{
|
||||
return PowerEaseOut(t, 5f);
|
||||
}
|
||||
|
||||
public static float InOutQuint(float t)
|
||||
{
|
||||
return PowerEaseInOut(t, 5f);
|
||||
}
|
||||
|
||||
public static float InExpo(float t)
|
||||
{
|
||||
float result = 0f;
|
||||
if (t != 0f)
|
||||
{
|
||||
result = Mathf.Pow(2f, 10f * (t - 1f));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static float OutExpo(float t)
|
||||
{
|
||||
float result = 1f;
|
||||
if (t != 1f)
|
||||
{
|
||||
result = -Mathf.Pow(2f, -10f * t) + 1f;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static float InOutExpo(float t)
|
||||
{
|
||||
float result = 0f;
|
||||
if (t > 0f)
|
||||
{
|
||||
result = 1f;
|
||||
if (t < 1f)
|
||||
{
|
||||
t *= 2f;
|
||||
if (t < 1f)
|
||||
{
|
||||
result = 0.5f * Mathf.Pow(2f, 10f * (t - 1f));
|
||||
}
|
||||
else
|
||||
{
|
||||
t--;
|
||||
result = 0.5f * (-Mathf.Pow(2f, -10f * t) + 2f);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: c8563989f140841bea81208295a89781
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
127
Assets/AVProVideo/Runtime/Scripts/Internal/Utils/HttpHeader.cs
Normal file
127
Assets/AVProVideo/Runtime/Scripts/Internal/Utils/HttpHeader.cs
Normal file
@@ -0,0 +1,127 @@
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2020-2021 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
[System.Serializable]
|
||||
public struct HttpHeader
|
||||
{
|
||||
public string name;
|
||||
public string value;
|
||||
|
||||
public HttpHeader(string name, string value) { this.name = name; this.value = value; }
|
||||
|
||||
public bool IsComplete()
|
||||
{
|
||||
return (!string.IsNullOrEmpty(name) && !string.IsNullOrEmpty(value));
|
||||
}
|
||||
|
||||
public string ToValidatedString()
|
||||
{
|
||||
string result = null;
|
||||
if (IsComplete())
|
||||
{
|
||||
if (IsValid())
|
||||
{
|
||||
result = string.Format("{0}:{1}\r\n", name, value);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static bool IsValid(string text)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(text))
|
||||
{
|
||||
if (!IsAscii(text)) return false;
|
||||
if (text.Contains("\r") || text.Contains("\n")) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool IsAscii(string text)
|
||||
{
|
||||
foreach (char c in text)
|
||||
{
|
||||
if (c >= 128) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool IsValid()
|
||||
{
|
||||
if (!IsValid(name) || !IsValid(value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
// TODO: check via regular expression
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Data for handling custom HTTP header fields
|
||||
/// </summary>
|
||||
[System.Serializable]
|
||||
public class HttpHeaderData : IEnumerable
|
||||
{
|
||||
[SerializeField]
|
||||
private List<HttpHeader> httpHeaders = new List<HttpHeader>();
|
||||
|
||||
public IEnumerator GetEnumerator()
|
||||
{
|
||||
return httpHeaders.GetEnumerator();
|
||||
}
|
||||
|
||||
public HttpHeader this[int index]
|
||||
{
|
||||
get
|
||||
{
|
||||
return httpHeaders[index];
|
||||
}
|
||||
}
|
||||
|
||||
public void Clear()
|
||||
{
|
||||
httpHeaders.Clear();
|
||||
}
|
||||
|
||||
public void Add(string name, string value)
|
||||
{
|
||||
httpHeaders.Add(new HttpHeader(name, value));
|
||||
}
|
||||
|
||||
public bool IsModified()
|
||||
{
|
||||
return (httpHeaders != null && httpHeaders.Count > 0);
|
||||
}
|
||||
|
||||
public string ToValidatedString()
|
||||
{
|
||||
string result = string.Empty;
|
||||
foreach (HttpHeader header in httpHeaders)
|
||||
{
|
||||
if (header.IsComplete())
|
||||
{
|
||||
string line = header.ToValidatedString();
|
||||
if (!string.IsNullOrEmpty(line))
|
||||
{
|
||||
result += line;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] Custom HTTP header field ignored due to invalid format");
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 2cfc6f8c038acdf4a9b384e8cb5e9cb2
|
||||
timeCreated: 1588604301
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,72 @@
|
||||
using System;
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2020-2021 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Data for handling authentication of encrypted AES-128 HLS streams
|
||||
/// </summary>
|
||||
///
|
||||
[Serializable]
|
||||
public class KeyAuthData : ISerializationCallbackReceiver
|
||||
{
|
||||
[SerializeField]
|
||||
public string keyServerToken;
|
||||
|
||||
[SerializeField, Multiline]
|
||||
public string overrideDecryptionKeyBase64;
|
||||
|
||||
public bool IsModified()
|
||||
{
|
||||
return !String.IsNullOrEmpty(keyServerToken) || !String.IsNullOrEmpty(overrideDecryptionKeyBase64);
|
||||
}
|
||||
|
||||
private byte[] _overrideDecryptionKey;
|
||||
public byte[] overrideDecryptionKey
|
||||
{
|
||||
get
|
||||
{
|
||||
return _overrideDecryptionKey;
|
||||
}
|
||||
set
|
||||
{
|
||||
_overrideDecryptionKey = value;
|
||||
if (value == null)
|
||||
overrideDecryptionKeyBase64 = "";
|
||||
else
|
||||
overrideDecryptionKeyBase64 = Convert.ToBase64String(_overrideDecryptionKey);
|
||||
}
|
||||
}
|
||||
|
||||
// ISerializationCallbackReceiver
|
||||
|
||||
public void OnBeforeSerialize()
|
||||
{
|
||||
// Nothing to do here
|
||||
}
|
||||
|
||||
public void OnAfterDeserialize()
|
||||
{
|
||||
if (!string.IsNullOrEmpty(overrideDecryptionKeyBase64))
|
||||
{
|
||||
try
|
||||
{
|
||||
// Regenerate the byte[]
|
||||
_overrideDecryptionKey = Convert.FromBase64String(overrideDecryptionKeyBase64);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Debug.LogWarning($"Failed to decode overrideDecryptionKeyBase64, error: {e}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_overrideDecryptionKey = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 0e784fab214313d44aaa5906743860fa
|
||||
timeCreated: 1588604301
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
594
Assets/AVProVideo/Runtime/Scripts/Internal/Utils/Resampler.cs
Normal file
594
Assets/AVProVideo/Runtime/Scripts/Internal/Utils/Resampler.cs
Normal file
@@ -0,0 +1,594 @@
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2021 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
/// <summary>
|
||||
/// Utility class to resample MediaPlayer video frames to allow for smoother playback
|
||||
/// Keeps a buffer of frames with timestamps and presents them using its own clock
|
||||
/// </summary>
|
||||
public class Resampler
|
||||
{
|
||||
private class TimestampedRenderTexture
|
||||
{
|
||||
public RenderTexture texture = null;
|
||||
public long timestamp = 0;
|
||||
public bool used = false;
|
||||
}
|
||||
|
||||
public enum ResampleMode
|
||||
{
|
||||
POINT, LINEAR
|
||||
}
|
||||
|
||||
private List<TimestampedRenderTexture[]> _buffer = new List<TimestampedRenderTexture[]>();
|
||||
private MediaPlayer _mediaPlayer;
|
||||
private RenderTexture[] _outputTexture = null;
|
||||
|
||||
private int _start = 0;
|
||||
private int _end = 0;
|
||||
private int _bufferSize = 0;
|
||||
|
||||
private long _baseTimestamp = 0;
|
||||
private float _elapsedTimeSinceBase = 0f;
|
||||
|
||||
private Material _blendMat;
|
||||
|
||||
private ResampleMode _resampleMode;
|
||||
private string _name = "";
|
||||
|
||||
private long _lastTimeStamp = -1;
|
||||
|
||||
private int _droppedFrames = 0;
|
||||
|
||||
private long _lastDisplayedTimestamp = 0;
|
||||
private int _frameDisplayedTimer = 0;
|
||||
private long _currentDisplayedTimestamp = 0;
|
||||
|
||||
public int DroppedFrames
|
||||
{
|
||||
get { return _droppedFrames; }
|
||||
}
|
||||
|
||||
public int FrameDisplayedTimer
|
||||
{
|
||||
get { return _frameDisplayedTimer; }
|
||||
}
|
||||
|
||||
public long BaseTimestamp
|
||||
{
|
||||
get { return _baseTimestamp; }
|
||||
set { _baseTimestamp = value; }
|
||||
}
|
||||
|
||||
public float ElapsedTimeSinceBase
|
||||
{
|
||||
get { return _elapsedTimeSinceBase; }
|
||||
set { _elapsedTimeSinceBase = value; }
|
||||
}
|
||||
|
||||
public float LastT
|
||||
{
|
||||
get; private set;
|
||||
}
|
||||
|
||||
public long TextureTimeStamp
|
||||
{
|
||||
get; private set;
|
||||
}
|
||||
|
||||
private const string ShaderPropT = "_t";
|
||||
private const string ShaderPropAftertex = "_AfterTex";
|
||||
private int _propAfterTex;
|
||||
private int _propT;
|
||||
private float _videoFrameRate;
|
||||
|
||||
public void OnVideoEvent(MediaPlayer mp, MediaPlayerEvent.EventType et, ErrorCode errorCode)
|
||||
{
|
||||
switch (et)
|
||||
{
|
||||
case MediaPlayerEvent.EventType.MetaDataReady:
|
||||
_videoFrameRate = mp.Info.GetVideoFrameRate();
|
||||
_elapsedTimeSinceBase = 0f;
|
||||
if (_videoFrameRate > 0f)
|
||||
{
|
||||
_elapsedTimeSinceBase = _bufferSize / _videoFrameRate;
|
||||
}
|
||||
break;
|
||||
case MediaPlayerEvent.EventType.Closing:
|
||||
Reset();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public Resampler(MediaPlayer player, string name, int bufferSize = 2, ResampleMode resampleMode = ResampleMode.LINEAR)
|
||||
{
|
||||
_bufferSize = Mathf.Max(2, bufferSize);
|
||||
|
||||
player.Events.AddListener(OnVideoEvent);
|
||||
|
||||
_mediaPlayer = player;
|
||||
|
||||
Shader blendShader = Shader.Find("AVProVideo/Internal/BlendFrames");
|
||||
if (blendShader != null)
|
||||
{
|
||||
_blendMat = new Material(blendShader);
|
||||
_propT = Shader.PropertyToID(ShaderPropT);
|
||||
_propAfterTex = Shader.PropertyToID(ShaderPropAftertex);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[AVProVideo] Failed to find BlendFrames shader");
|
||||
}
|
||||
|
||||
_resampleMode = resampleMode;
|
||||
_name = name;
|
||||
|
||||
Debug.Log("[AVProVideo] Resampler " + _name + " started");
|
||||
}
|
||||
|
||||
public Texture[] OutputTexture
|
||||
{
|
||||
get { return _outputTexture; }
|
||||
}
|
||||
|
||||
public void Reset()
|
||||
{
|
||||
_lastTimeStamp = -1;
|
||||
_baseTimestamp = 0;
|
||||
InvalidateBuffer();
|
||||
}
|
||||
|
||||
public void Release()
|
||||
{
|
||||
ReleaseRenderTextures();
|
||||
if (_blendMat != null)
|
||||
{
|
||||
if (Application.isPlaying)
|
||||
{
|
||||
Material.Destroy(_blendMat);
|
||||
}
|
||||
else
|
||||
{
|
||||
Material.DestroyImmediate(_blendMat);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void ReleaseRenderTextures()
|
||||
{
|
||||
for (int i = 0; i < _buffer.Count; ++i)
|
||||
{
|
||||
for (int j = 0; j < _buffer[i].Length; ++j)
|
||||
{
|
||||
if (_buffer[i][j].texture != null)
|
||||
{
|
||||
RenderTexture.ReleaseTemporary(_buffer[i][j].texture);
|
||||
_buffer[i][j].texture = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (_outputTexture != null && _outputTexture[i] != null)
|
||||
{
|
||||
RenderTexture.ReleaseTemporary(_outputTexture[i]);
|
||||
}
|
||||
}
|
||||
|
||||
_outputTexture = null;
|
||||
}
|
||||
|
||||
private void ConstructRenderTextures()
|
||||
{
|
||||
ReleaseRenderTextures();
|
||||
_buffer.Clear();
|
||||
|
||||
_outputTexture = new RenderTexture[_mediaPlayer.TextureProducer.GetTextureCount()];
|
||||
|
||||
for (int i = 0; i < _mediaPlayer.TextureProducer.GetTextureCount(); ++i)
|
||||
{
|
||||
Texture tex = _mediaPlayer.TextureProducer.GetTexture(i);
|
||||
_buffer.Add(new TimestampedRenderTexture[_bufferSize]);
|
||||
for (int j = 0; j < _bufferSize; ++j)
|
||||
{
|
||||
_buffer[i][j] = new TimestampedRenderTexture();
|
||||
}
|
||||
|
||||
for (int j = 0; j < _buffer[i].Length; ++j)
|
||||
{
|
||||
_buffer[i][j].texture = RenderTexture.GetTemporary(tex.width, tex.height, 0);
|
||||
_buffer[i][j].timestamp = 0;
|
||||
_buffer[i][j].used = false;
|
||||
}
|
||||
|
||||
_outputTexture[i] = RenderTexture.GetTemporary(tex.width, tex.height, 0);
|
||||
_outputTexture[i].filterMode = tex.filterMode;
|
||||
_outputTexture[i].wrapMode = tex.wrapMode;
|
||||
_outputTexture[i].anisoLevel = tex.anisoLevel;
|
||||
// TODO: set up the mips level too?
|
||||
}
|
||||
}
|
||||
|
||||
private bool CheckRenderTexturesValid()
|
||||
{
|
||||
for (int i = 0; i < _mediaPlayer.TextureProducer.GetTextureCount(); ++i)
|
||||
{
|
||||
Texture tex = _mediaPlayer.TextureProducer.GetTexture(i);
|
||||
for (int j = 0; j < _buffer.Count; ++j)
|
||||
{
|
||||
if (_buffer[i][j].texture == null || _buffer[i][j].texture.width != tex.width || _buffer[i][j].texture.height != tex.height)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (_outputTexture == null || _outputTexture[i] == null || _outputTexture[i].width != tex.width || _outputTexture[i].height != tex.height)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
//finds closest frame that occurs before given index
|
||||
private int FindBeforeFrameIndex(int frameIdx)
|
||||
{
|
||||
if (frameIdx >= _buffer.Count)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
int foundFrame = -1;
|
||||
float smallestDif = float.MaxValue;
|
||||
int closest = -1;
|
||||
float smallestElapsed = float.MaxValue;
|
||||
|
||||
for (int i = 0; i < _buffer[frameIdx].Length; ++i)
|
||||
{
|
||||
if (_buffer[frameIdx][i].used)
|
||||
{
|
||||
float elapsed = (_buffer[frameIdx][i].timestamp - _baseTimestamp) / 10000000f;
|
||||
|
||||
//keep track of closest after frame, just in case no before frame was found
|
||||
if (elapsed < smallestElapsed)
|
||||
{
|
||||
closest = i;
|
||||
smallestElapsed = elapsed;
|
||||
}
|
||||
|
||||
float dif = _elapsedTimeSinceBase - elapsed;
|
||||
|
||||
if (dif >= 0 && dif < smallestDif)
|
||||
{
|
||||
smallestDif = dif;
|
||||
foundFrame = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (foundFrame < 0)
|
||||
{
|
||||
if (closest < 0)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
return closest;
|
||||
}
|
||||
|
||||
return foundFrame;
|
||||
}
|
||||
|
||||
private int FindClosestFrame(int frameIdx)
|
||||
{
|
||||
if (frameIdx >= _buffer.Count)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
int foundPos = -1;
|
||||
float smallestDif = float.MaxValue;
|
||||
|
||||
for (int i = 0; i < _buffer[frameIdx].Length; ++i)
|
||||
{
|
||||
if (_buffer[frameIdx][i].used)
|
||||
{
|
||||
float elapsed = (_buffer[frameIdx][i].timestamp - _baseTimestamp) / 10000000f;
|
||||
float dif = Mathf.Abs(_elapsedTimeSinceBase - elapsed);
|
||||
if (dif < smallestDif)
|
||||
{
|
||||
foundPos = i;
|
||||
smallestDif = dif;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return foundPos;
|
||||
}
|
||||
|
||||
//point update selects closest frame and uses that as output
|
||||
private void PointUpdate()
|
||||
{
|
||||
for (int i = 0; i < _buffer.Count; ++i)
|
||||
{
|
||||
int frameIndex = FindClosestFrame(i);
|
||||
if (frameIndex < 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
_outputTexture[i].DiscardContents();
|
||||
Graphics.Blit(_buffer[i][frameIndex].texture, _outputTexture[i]);
|
||||
TextureTimeStamp = _currentDisplayedTimestamp = _buffer[i][frameIndex].timestamp;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//Updates currently displayed frame
|
||||
private void SampleFrame(int frameIdx, int bufferIdx)
|
||||
{
|
||||
_outputTexture[bufferIdx].DiscardContents();
|
||||
Graphics.Blit(_buffer[bufferIdx][frameIdx].texture, _outputTexture[bufferIdx]);
|
||||
TextureTimeStamp = _currentDisplayedTimestamp = _buffer[bufferIdx][frameIdx].timestamp;
|
||||
}
|
||||
|
||||
//Same as sample frame, but does a lerp of the two given frames and outputs that image instead
|
||||
private void SampleFrames(int bufferIdx, int frameIdx1, int frameIdx2, float t)
|
||||
{
|
||||
_blendMat.SetFloat(_propT, t);
|
||||
_blendMat.SetTexture(_propAfterTex, _buffer[bufferIdx][frameIdx2].texture);
|
||||
_outputTexture[bufferIdx].DiscardContents();
|
||||
Graphics.Blit(_buffer[bufferIdx][frameIdx1].texture, _outputTexture[bufferIdx], _blendMat);
|
||||
TextureTimeStamp = (long)Mathf.Lerp(_buffer[bufferIdx][frameIdx1].timestamp, _buffer[bufferIdx][frameIdx2].timestamp, t);
|
||||
_currentDisplayedTimestamp = _buffer[bufferIdx][frameIdx1].timestamp;
|
||||
}
|
||||
|
||||
private void LinearUpdate()
|
||||
{
|
||||
for (int i = 0; i < _buffer.Count; ++i)
|
||||
{
|
||||
//find closest frame
|
||||
int frameIndex = FindBeforeFrameIndex(i);
|
||||
|
||||
//no valid frame, this should never ever happen actually...
|
||||
if (frameIndex < 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
//resample or just use last frame and set current elapsed time to that frame
|
||||
float frameElapsed = (_buffer[i][frameIndex].timestamp - _baseTimestamp) / 10000000f;
|
||||
if (frameElapsed > _elapsedTimeSinceBase)
|
||||
{
|
||||
SampleFrame(frameIndex, i);
|
||||
LastT = -1f;
|
||||
}
|
||||
else
|
||||
{
|
||||
int next = (frameIndex + 1) % _buffer[i].Length;
|
||||
float nextElapsed = (_buffer[i][next].timestamp - _baseTimestamp) / 10000000f;
|
||||
|
||||
//no larger frame, move elapsed time back a bit since we cant predict the future
|
||||
if (nextElapsed < frameElapsed)
|
||||
{
|
||||
SampleFrame(frameIndex, i);
|
||||
LastT = 2f;
|
||||
}
|
||||
//have a before and after frame, interpolate
|
||||
else
|
||||
{
|
||||
|
||||
float range = nextElapsed - frameElapsed;
|
||||
float t = (_elapsedTimeSinceBase - frameElapsed) / range;
|
||||
SampleFrames(i, frameIndex, next, t);
|
||||
LastT = t;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void InvalidateBuffer()
|
||||
{
|
||||
_elapsedTimeSinceBase = (_bufferSize / 2) / _videoFrameRate;
|
||||
|
||||
for (int i = 0; i < _buffer.Count; ++i)
|
||||
{
|
||||
for (int j = 0; j < _buffer[i].Length; ++j)
|
||||
{
|
||||
_buffer[i][j].used = false;
|
||||
}
|
||||
}
|
||||
|
||||
_start = _end = 0;
|
||||
}
|
||||
|
||||
private float GuessFrameRate()
|
||||
{
|
||||
int fpsCount = 0;
|
||||
long fps = 0;
|
||||
|
||||
for (int k = 0; k < _buffer[0].Length; k++)
|
||||
{
|
||||
if (_buffer[0][k].used)
|
||||
{
|
||||
// Find the pair with the smallest difference
|
||||
long smallestDiff = long.MaxValue;
|
||||
for (int j = k + 1; j < _buffer[0].Length; j++)
|
||||
{
|
||||
if (_buffer[0][j].used)
|
||||
{
|
||||
long diff = System.Math.Abs(_buffer[0][k].timestamp - _buffer[0][j].timestamp);
|
||||
if (diff < smallestDiff)
|
||||
{
|
||||
smallestDiff = diff;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (smallestDiff != long.MaxValue)
|
||||
{
|
||||
fps += smallestDiff;
|
||||
fpsCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fpsCount > 1)
|
||||
{
|
||||
fps /= fpsCount;
|
||||
}
|
||||
return 10000000f / (float)fps;
|
||||
}
|
||||
|
||||
public void Update()
|
||||
{
|
||||
if (_mediaPlayer.TextureProducer == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
//recreate textures if invalid
|
||||
if (_mediaPlayer.TextureProducer == null || _mediaPlayer.TextureProducer.GetTexture() == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!CheckRenderTexturesValid())
|
||||
{
|
||||
ConstructRenderTextures();
|
||||
}
|
||||
|
||||
long currentTimestamp = _mediaPlayer.TextureProducer.GetTextureTimeStamp();
|
||||
|
||||
//if frame has been updated, do a calculation to estimate dropped frames
|
||||
if (currentTimestamp != _lastTimeStamp)
|
||||
{
|
||||
float dif = Mathf.Abs(currentTimestamp - _lastTimeStamp);
|
||||
float frameLength = (10000000f / _videoFrameRate);
|
||||
if (dif > frameLength * 1.1f && dif < frameLength * 3.1f)
|
||||
{
|
||||
_droppedFrames += (int)((dif - frameLength) / frameLength + 0.5);
|
||||
}
|
||||
_lastTimeStamp = currentTimestamp;
|
||||
}
|
||||
|
||||
//Adding texture to buffer logic
|
||||
long timestamp = _mediaPlayer.TextureProducer.GetTextureTimeStamp();
|
||||
bool insertNewFrame = !_mediaPlayer.Control.IsSeeking();
|
||||
//if buffer is not empty, we need to check if we need to reject the new frame
|
||||
if (_start != _end || _buffer[0][_end].used)
|
||||
{
|
||||
int lastFrame = (_end + _buffer[0].Length - 1) % _buffer[0].Length;
|
||||
//frame is not new and thus we do not need to store it
|
||||
if (timestamp == _buffer[0][lastFrame].timestamp)
|
||||
{
|
||||
insertNewFrame = false;
|
||||
}
|
||||
}
|
||||
|
||||
bool bufferWasNotFull = (_start != _end) || (!_buffer[0][_end].used);
|
||||
|
||||
if (insertNewFrame)
|
||||
{
|
||||
//buffer empty, reset base timestamp to current
|
||||
if (_start == _end && !_buffer[0][_end].used)
|
||||
{
|
||||
_baseTimestamp = timestamp;
|
||||
}
|
||||
|
||||
//update buffer counters, if buffer is full, we get rid of the earliest frame by incrementing the start counter
|
||||
if (_end == _start && _buffer[0][_end].used)
|
||||
{
|
||||
_start = (_start + 1) % _buffer[0].Length;
|
||||
}
|
||||
|
||||
for (int i = 0; i < _mediaPlayer.TextureProducer.GetTextureCount(); ++i)
|
||||
{
|
||||
Texture currentTexture = _mediaPlayer.TextureProducer.GetTexture(i);
|
||||
|
||||
//store frame info
|
||||
_buffer[i][_end].texture.DiscardContents();
|
||||
Graphics.Blit(currentTexture, _buffer[i][_end].texture);
|
||||
_buffer[i][_end].timestamp = timestamp;
|
||||
_buffer[i][_end].used = true;
|
||||
}
|
||||
|
||||
_end = (_end + 1) % _buffer[0].Length;
|
||||
}
|
||||
|
||||
bool bufferNotFull = (_start != _end) || (!_buffer[0][_end].used);
|
||||
|
||||
if (bufferNotFull)
|
||||
{
|
||||
for (int i = 0; i < _buffer.Count; ++i)
|
||||
{
|
||||
_outputTexture[i].DiscardContents();
|
||||
Graphics.Blit(_buffer[i][_start].texture, _outputTexture[i]);
|
||||
_currentDisplayedTimestamp = _buffer[i][_start].timestamp;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// If we don't have a valid frame rate and the buffer is now full, guess the frame rate by looking at the buffered timestamps
|
||||
if (bufferWasNotFull && _videoFrameRate <= 0f)
|
||||
{
|
||||
_videoFrameRate = GuessFrameRate();
|
||||
_elapsedTimeSinceBase = (_bufferSize / 2) / _videoFrameRate;
|
||||
}
|
||||
}
|
||||
|
||||
if (_mediaPlayer.Control.IsPaused())
|
||||
{
|
||||
InvalidateBuffer();
|
||||
}
|
||||
|
||||
//we always wait until buffer is full before display things, just assign first frame in buffer to output so that the user can see something
|
||||
if (bufferNotFull)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (_mediaPlayer.Control.IsPlaying() && !_mediaPlayer.Control.IsFinished())
|
||||
{
|
||||
//correct elapsed time if too far out
|
||||
long ts = _buffer[0][(_start + _bufferSize / 2) % _bufferSize].timestamp - _baseTimestamp;
|
||||
double dif = Mathf.Abs(((float)((double)_elapsedTimeSinceBase * 10000000) - ts));
|
||||
double threshold = (_buffer[0].Length / 2) / _videoFrameRate * 10000000;
|
||||
|
||||
if (dif > threshold)
|
||||
{
|
||||
_elapsedTimeSinceBase = ts / 10000000f;
|
||||
}
|
||||
|
||||
if (_resampleMode == ResampleMode.POINT)
|
||||
{
|
||||
PointUpdate();
|
||||
}
|
||||
else if (_resampleMode == ResampleMode.LINEAR)
|
||||
{
|
||||
LinearUpdate();
|
||||
}
|
||||
|
||||
_elapsedTimeSinceBase += Time.unscaledDeltaTime;
|
||||
}
|
||||
}
|
||||
|
||||
public void UpdateTimestamp()
|
||||
{
|
||||
if (_lastDisplayedTimestamp != _currentDisplayedTimestamp)
|
||||
{
|
||||
_lastDisplayedTimestamp = _currentDisplayedTimestamp;
|
||||
_frameDisplayedTimer = 0;
|
||||
}
|
||||
_frameDisplayedTimer++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 8ac8dc09faa6b1d48bf6f490c9888550
|
||||
timeCreated: 1497356591
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
144
Assets/AVProVideo/Runtime/Scripts/Internal/Utils/Subtitles.cs
Normal file
144
Assets/AVProVideo/Runtime/Scripts/Internal/Utils/Subtitles.cs
Normal file
@@ -0,0 +1,144 @@
|
||||
using UnityEngine;
|
||||
using System.Collections.Generic;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2021 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public class Subtitle
|
||||
{
|
||||
public int index;
|
||||
// Rich string can contain <font color=""> <u> etc
|
||||
public string text;
|
||||
public double timeStart, timeEnd;
|
||||
|
||||
public bool IsBefore(double time)
|
||||
{
|
||||
return (time > timeStart && time > timeEnd);
|
||||
}
|
||||
|
||||
public bool IsTime(double time)
|
||||
{
|
||||
return (time >= timeStart && time < timeEnd);
|
||||
}
|
||||
}
|
||||
|
||||
public class SubtitlePlayer
|
||||
{
|
||||
// min time, max time
|
||||
// set time
|
||||
// event for change(subs added, subs removed)
|
||||
// list of subs on
|
||||
}
|
||||
|
||||
public class SubtitleUtils
|
||||
{
|
||||
/// <summary>
|
||||
/// Parse time in format: 00:00:48,924 and convert to seconds
|
||||
/// </summary>
|
||||
private static double ParseTimeToSeconds(string text)
|
||||
{
|
||||
double result = 0.0;
|
||||
|
||||
string[] digits = text.Split(new char[] { ':', ',' });
|
||||
|
||||
if (digits.Length == 4)
|
||||
{
|
||||
int hours = int.Parse(digits[0]);
|
||||
int minutes = int.Parse(digits[1]);
|
||||
int seconds = int.Parse(digits[2]);
|
||||
int milliseconds = int.Parse(digits[3]);
|
||||
|
||||
result = (milliseconds / 1000.0) + (seconds + (minutes + (hours * 60)) * 60);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse subtitles in the SRT format and convert to a list of ordered Subtitle objects
|
||||
/// </summary>
|
||||
public static List<Subtitle> ParseSubtitlesSRT(string data)
|
||||
{
|
||||
List<Subtitle> result = null;
|
||||
|
||||
if (!string.IsNullOrEmpty(data))
|
||||
{
|
||||
data = data.Trim();
|
||||
var rx = new System.Text.RegularExpressions.Regex("\n\r|\r\n|\n|\r");
|
||||
string[] lines = rx.Split(data);
|
||||
|
||||
if (lines.Length >= 3)
|
||||
{
|
||||
result = new List<Subtitle>(256);
|
||||
|
||||
int count = 0;
|
||||
int index = 0;
|
||||
Subtitle subtitle = null;
|
||||
for (int i = 0; i < lines.Length; i++)
|
||||
{
|
||||
if (index == 0)
|
||||
{
|
||||
subtitle = new Subtitle();
|
||||
subtitle.index = count;// int.Parse(lines[i]);
|
||||
}
|
||||
else if (index == 1)
|
||||
{
|
||||
string[] times = lines[i].Split(new string[] { " --> " }, System.StringSplitOptions.RemoveEmptyEntries);
|
||||
if (times.Length == 2)
|
||||
{
|
||||
subtitle.timeStart = ParseTimeToSeconds(times[0]);
|
||||
subtitle.timeEnd = ParseTimeToSeconds(times[1]);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new System.FormatException("SRT format doesn't appear to be valid");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!string.IsNullOrEmpty(lines[i]))
|
||||
{
|
||||
if (index == 2)
|
||||
{
|
||||
subtitle.text = lines[i];
|
||||
}
|
||||
else
|
||||
{
|
||||
subtitle.text += "\n" + lines[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(lines[i]) && index > 1)
|
||||
{
|
||||
result.Add(subtitle);
|
||||
index = 0;
|
||||
count++;
|
||||
subtitle = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
index++;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle the last one
|
||||
if (subtitle != null)
|
||||
{
|
||||
result.Add(subtitle);
|
||||
subtitle = null;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("[AVProVideo] SRT format doesn't appear to be valid");
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: c21f230642ee9284eb9726613241c7bd
|
||||
timeCreated: 1548861442
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
702
Assets/AVProVideo/Runtime/Scripts/Internal/Utils/VideoRender.cs
Normal file
702
Assets/AVProVideo/Runtime/Scripts/Internal/Utils/VideoRender.cs
Normal file
@@ -0,0 +1,702 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2025 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
#if UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_TVOS || UNITY_VISIONOS
|
||||
#define UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
#endif
|
||||
|
||||
#define UNITY_PLATFORM_SUPPORTS_LINEAR
|
||||
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEngine.Rendering;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
#if AVPRO_FEATURE_VIDEORESOLVE
|
||||
[System.Serializable]
|
||||
public class VideoResolve : ITextureProducer
|
||||
{
|
||||
[SerializeField] VideoResolveOptions _options = VideoResolveOptions.Create();
|
||||
[SerializeField] RenderTexture _targetRenderTexture = null;
|
||||
[SerializeField] ScaleMode _targetRenderTextureScale = ScaleMode.ScaleToFit;
|
||||
|
||||
void SetSource(ITextureProducer textureSource)
|
||||
{
|
||||
//_commandBuffer.IssuePluginEvent(blahCallback, 0);
|
||||
//Graphics.ExecuteCommandBuffer(_commandBuffer);
|
||||
}
|
||||
|
||||
// ITextureProducer implementation
|
||||
|
||||
/// <inheritdoc/>
|
||||
public int GetTextureCount() { return 1; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Texture GetTexture(int index = 0) { return _texture; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public int GetTextureFrameCount() { return _textureSource.GetTextureFrameCount(); }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public bool SupportsTextureFrameCount() { return _textureSource.SupportsTextureFrameCount(); }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public long GetTextureTimeStamp() { return _textureSource.GetTextureTimeStamp(); }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public bool RequiresVerticalFlip() { return false; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public StereoPacking GetTextureStereoPacking() { return StereoPacking.None; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public TransparencyMode GetTextureTransparency() { return TransparencyMode.Transparent; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public AlphaPacking GetTextureAlphaPacking() { return AlphaPacking.None; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Matrix4x4 GetYpCbCrTransform() { return Matrix4x4.identity; }
|
||||
|
||||
private ITextureProducer _textureSource;
|
||||
private Texture _texture;
|
||||
private CommandBuffer _commandBuffer;
|
||||
}
|
||||
#endif
|
||||
|
||||
public struct LazyShaderProperty
|
||||
{
|
||||
public LazyShaderProperty(string name)
|
||||
{
|
||||
_name = name;
|
||||
_id = 0;
|
||||
}
|
||||
|
||||
public string Name { get { return _name;} }
|
||||
public int Id { get { if (_id == 0) { _id = Shader.PropertyToID(_name); } return _id; } }
|
||||
|
||||
private string _name;
|
||||
private int _id;
|
||||
}
|
||||
|
||||
/// <summary>Helper class for everything related to setting up materials for rendering/resolving videos</summary>
|
||||
public class VideoRender
|
||||
{
|
||||
public const string Shader_IMGUI = "AVProVideo/Internal/IMGUI/Texture Transparent";
|
||||
public const string Shader_Resolve = "AVProVideo/Internal/Resolve";
|
||||
public const string Shader_ResolveOES = "AVProVideo/Internal/ResolveOES";
|
||||
public const string Shader_Preview = "AVProVideo/Internal/Preview";
|
||||
|
||||
#if UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
public const string Keyword_UseYpCbCr = "USE_YPCBCR";
|
||||
#endif
|
||||
public const string Keyword_AlphaPackTopBottom = "ALPHAPACK_TOP_BOTTOM";
|
||||
public const string Keyword_AlphaPackLeftRight = "ALPHAPACK_LEFT_RIGHT";
|
||||
public const string Keyword_AlphaPackNone = "ALPHAPACK_NONE";
|
||||
public const string Keyword_StereoTopBottom = "STEREO_TOP_BOTTOM";
|
||||
public const string Keyword_StereoLeftRight = "STEREO_LEFT_RIGHT";
|
||||
public const string Keyword_StereoCustomUV = "STEREO_CUSTOM_UV";
|
||||
public const string Keyword_StereoTwoTextures = "STEREO_TWO_TEXTURES";
|
||||
public const string Keyword_StereoNone = "MONOSCOPIC";
|
||||
public const string Keyword_StereoDebug = "STEREO_DEBUG";
|
||||
public const string Keyword_LayoutEquirect180 = "LAYOUT_EQUIRECT180";
|
||||
public const string Keyword_LayoutNone = "LAYOUT_NONE";
|
||||
public const string Keyword_ForceEyeNone = "FORCEEYE_NONE";
|
||||
public const string Keyword_ForceEyeLeft = "FORCEEYE_LEFT";
|
||||
public const string Keyword_ForceEyeRight = "FORCEEYE_RIGHT";
|
||||
public const string Keyword_ApplyGamma = "APPLY_GAMMA";
|
||||
|
||||
public static readonly LazyShaderProperty PropChromaTex = new LazyShaderProperty("_ChromaTex");
|
||||
|
||||
// Default right-eye texture shader properties
|
||||
public static readonly LazyShaderProperty PropMainTex_R = new LazyShaderProperty("_MainTex_R");
|
||||
public static readonly LazyShaderProperty PropChromaTex_R = new LazyShaderProperty("_ChromaTex_R");
|
||||
|
||||
#if UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
public static readonly LazyShaderProperty PropYpCbCrTransform = new LazyShaderProperty("_YpCbCrTransform");
|
||||
public static readonly LazyShaderProperty PropUseYpCbCr = new LazyShaderProperty("_UseYpCbCr");
|
||||
#endif
|
||||
|
||||
public static readonly LazyShaderProperty PropVertScale = new LazyShaderProperty("_VertScale");
|
||||
public static readonly LazyShaderProperty PropApplyGamma = new LazyShaderProperty("_ApplyGamma");
|
||||
public static readonly LazyShaderProperty PropStereo = new LazyShaderProperty("Stereo");
|
||||
public static readonly LazyShaderProperty PropAlphaPack = new LazyShaderProperty("AlphaPack");
|
||||
public static readonly LazyShaderProperty PropLayout = new LazyShaderProperty("Layout");
|
||||
public static readonly LazyShaderProperty PropViewMatrix = new LazyShaderProperty("_ViewMatrix");
|
||||
public static readonly LazyShaderProperty PropTextureMatrix = new LazyShaderProperty("_MainTex_Xfrm");
|
||||
|
||||
public static string Keyword_UseHSBC = "USE_HSBC";
|
||||
public static readonly LazyShaderProperty PropHue = new LazyShaderProperty("_Hue");
|
||||
public static readonly LazyShaderProperty PropSaturation = new LazyShaderProperty("_Saturation");
|
||||
public static readonly LazyShaderProperty PropContrast = new LazyShaderProperty("_Contrast");
|
||||
public static readonly LazyShaderProperty PropBrightness = new LazyShaderProperty("_Brightness");
|
||||
public static readonly LazyShaderProperty PropInvGamma = new LazyShaderProperty("_InvGamma");
|
||||
|
||||
public static Material CreateResolveMaterial(bool usingAndroidOES)
|
||||
{
|
||||
return new Material(Shader.Find( usingAndroidOES ? VideoRender.Shader_ResolveOES : VideoRender.Shader_Resolve ));
|
||||
}
|
||||
|
||||
public static Material CreateIMGUIMaterial()
|
||||
{
|
||||
return new Material(Shader.Find(VideoRender.Shader_Preview));
|
||||
}
|
||||
|
||||
public static void SetupLayoutMaterial(Material material, VideoMapping mapping)
|
||||
{
|
||||
switch (mapping)
|
||||
{
|
||||
default:
|
||||
material.DisableKeyword(Keyword_LayoutEquirect180);
|
||||
material.EnableKeyword(Keyword_LayoutNone);
|
||||
break;
|
||||
// Only EquiRectangular180 currently does anything in the shader
|
||||
case VideoMapping.EquiRectangular180:
|
||||
material.DisableKeyword(Keyword_LayoutNone);
|
||||
material.EnableKeyword(Keyword_LayoutEquirect180);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public static void SetupStereoEyeModeMaterial(Material material, StereoEye mode)
|
||||
{
|
||||
switch (mode)
|
||||
{
|
||||
case StereoEye.Both:
|
||||
material.DisableKeyword(Keyword_ForceEyeLeft);
|
||||
material.DisableKeyword(Keyword_ForceEyeRight);
|
||||
material.EnableKeyword(Keyword_ForceEyeNone);
|
||||
break;
|
||||
case StereoEye.Left:
|
||||
material.DisableKeyword(Keyword_ForceEyeNone);
|
||||
material.DisableKeyword(Keyword_ForceEyeRight);
|
||||
material.EnableKeyword(Keyword_ForceEyeLeft);
|
||||
break;
|
||||
case StereoEye.Right:
|
||||
material.DisableKeyword(Keyword_ForceEyeNone);
|
||||
material.DisableKeyword(Keyword_ForceEyeLeft);
|
||||
material.EnableKeyword(Keyword_ForceEyeRight);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public static void SetupStereoMaterial(Material material, StereoPacking packing)
|
||||
{
|
||||
switch (packing)
|
||||
{
|
||||
case StereoPacking.Monoscopic:
|
||||
material.DisableKeyword(Keyword_StereoTopBottom);
|
||||
material.DisableKeyword(Keyword_StereoLeftRight);
|
||||
material.DisableKeyword(Keyword_StereoCustomUV);
|
||||
material.DisableKeyword(Keyword_StereoTwoTextures);
|
||||
material.EnableKeyword(Keyword_StereoNone);
|
||||
break;
|
||||
case StereoPacking.TopBottom:
|
||||
material.DisableKeyword(Keyword_StereoNone);
|
||||
material.DisableKeyword(Keyword_StereoLeftRight);
|
||||
material.DisableKeyword(Keyword_StereoCustomUV);
|
||||
material.DisableKeyword(Keyword_StereoTwoTextures);
|
||||
material.EnableKeyword(Keyword_StereoTopBottom);
|
||||
break;
|
||||
case StereoPacking.LeftRight:
|
||||
material.DisableKeyword(Keyword_StereoNone);
|
||||
material.DisableKeyword(Keyword_StereoTopBottom);
|
||||
material.DisableKeyword(Keyword_StereoTwoTextures);
|
||||
material.DisableKeyword(Keyword_StereoCustomUV);
|
||||
material.EnableKeyword(Keyword_StereoLeftRight);
|
||||
break;
|
||||
case StereoPacking.CustomUV:
|
||||
material.DisableKeyword(Keyword_StereoNone);
|
||||
material.DisableKeyword(Keyword_StereoTopBottom);
|
||||
material.DisableKeyword(Keyword_StereoLeftRight);
|
||||
material.DisableKeyword(Keyword_StereoTwoTextures);
|
||||
material.EnableKeyword(Keyword_StereoCustomUV);
|
||||
break;
|
||||
case StereoPacking.MultiviewLeftPrimary:
|
||||
case StereoPacking.MultiviewRightPrimary:
|
||||
material.DisableKeyword(Keyword_StereoNone);
|
||||
material.DisableKeyword(Keyword_StereoTopBottom);
|
||||
material.DisableKeyword(Keyword_StereoLeftRight);
|
||||
material.DisableKeyword(Keyword_StereoCustomUV);
|
||||
material.EnableKeyword(Keyword_StereoTwoTextures);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public static void SetupGlobalDebugStereoTinting(bool enabled)
|
||||
{
|
||||
if (enabled)
|
||||
{
|
||||
Shader.EnableKeyword(Keyword_StereoDebug);
|
||||
}
|
||||
else
|
||||
{
|
||||
Shader.DisableKeyword(Keyword_StereoDebug);
|
||||
}
|
||||
}
|
||||
|
||||
public static void SetupAlphaPackedMaterial(Material material, AlphaPacking packing)
|
||||
{
|
||||
switch (packing)
|
||||
{
|
||||
case AlphaPacking.None:
|
||||
material.DisableKeyword(Keyword_AlphaPackTopBottom);
|
||||
material.DisableKeyword(Keyword_AlphaPackLeftRight);
|
||||
material.EnableKeyword(Keyword_AlphaPackNone);
|
||||
break;
|
||||
case AlphaPacking.TopBottom:
|
||||
material.DisableKeyword(Keyword_AlphaPackNone);
|
||||
material.DisableKeyword(Keyword_AlphaPackLeftRight);
|
||||
material.EnableKeyword(Keyword_AlphaPackTopBottom);
|
||||
break;
|
||||
case AlphaPacking.LeftRight:
|
||||
material.DisableKeyword(Keyword_AlphaPackNone);
|
||||
material.DisableKeyword(Keyword_AlphaPackTopBottom);
|
||||
material.EnableKeyword(Keyword_AlphaPackLeftRight);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public static void SetupGammaMaterial(Material material, bool playerSupportsLinear)
|
||||
{
|
||||
#if UNITY_PLATFORM_SUPPORTS_LINEAR
|
||||
if (QualitySettings.activeColorSpace == ColorSpace.Linear && !playerSupportsLinear)
|
||||
{
|
||||
material.EnableKeyword(Keyword_ApplyGamma);
|
||||
}
|
||||
else
|
||||
{
|
||||
material.DisableKeyword(Keyword_ApplyGamma);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
public static void SetupTextureMatrix(Material material, float[] transform)
|
||||
{
|
||||
if (material == null)
|
||||
return;
|
||||
|
||||
if (transform == null || transform.Length != 6)
|
||||
transform = new float[6] { 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f };
|
||||
|
||||
Vector4 v0 = new Vector4(transform[0], transform[1], 0, 0);
|
||||
Vector4 v1 = new Vector4(transform[2], transform[3], 0, 0);
|
||||
Vector4 v2 = new Vector4(0, 0, 1, 0);
|
||||
Vector4 v3 = new Vector4(transform[4], transform[5], 0, 1);
|
||||
|
||||
material.SetMatrix(PropTextureMatrix.Id, new Matrix4x4(v0, v1, v2, v3));
|
||||
}
|
||||
|
||||
public static void SetupTextureMatrix(Material material, Matrix4x4 transform)
|
||||
{
|
||||
if (material == null)
|
||||
return;
|
||||
material.SetMatrix(PropTextureMatrix.Id, transform);
|
||||
}
|
||||
|
||||
#if UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
public static void SetupYpCbCrMaterial(Material material, bool enable, Matrix4x4 transform, Texture texture)
|
||||
{
|
||||
if (material.HasProperty(VideoRender.PropUseYpCbCr.Id))
|
||||
{
|
||||
if (enable)
|
||||
{
|
||||
material.EnableKeyword(VideoRender.Keyword_UseYpCbCr);
|
||||
material.SetMatrix(VideoRender.PropYpCbCrTransform.Id, transform);
|
||||
material.SetTexture(VideoRender.PropChromaTex.Id, texture);
|
||||
}
|
||||
else
|
||||
{
|
||||
material.DisableKeyword(VideoRender.Keyword_UseYpCbCr);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
public static void SetupVerticalFlipMaterial(Material material, bool flip)
|
||||
{
|
||||
material.SetFloat(VideoRender.PropVertScale.Id, flip?-1f:1f);
|
||||
}
|
||||
|
||||
public static Texture GetTexture(MediaPlayer mediaPlayer, int textureIndex)
|
||||
{
|
||||
Texture result = null;
|
||||
if (mediaPlayer != null)
|
||||
{
|
||||
if (mediaPlayer.UseResampler && mediaPlayer.FrameResampler != null && mediaPlayer.FrameResampler.OutputTexture != null)
|
||||
{
|
||||
if ( mediaPlayer.FrameResampler.OutputTexture.Length > textureIndex)
|
||||
{
|
||||
result = mediaPlayer.FrameResampler.OutputTexture[textureIndex];
|
||||
}
|
||||
}
|
||||
else if (mediaPlayer.TextureProducer != null)
|
||||
{
|
||||
if (mediaPlayer.TextureProducer.GetTextureCount() > textureIndex)
|
||||
{
|
||||
result = mediaPlayer.TextureProducer.GetTexture(textureIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static void SetupMaterialForMedia(Material material, MediaPlayer mediaPlayer, int texturePropId = -1, Texture fallbackTexture = null, bool forceFallbackTexture = false)
|
||||
{
|
||||
Debug.Assert(material != null);
|
||||
if (mediaPlayer != null)
|
||||
{
|
||||
Texture mainTexture = GetTexture(mediaPlayer, 0);
|
||||
Matrix4x4 textureTransform = Matrix4x4.identity;
|
||||
|
||||
bool isUsingYCbCr = mediaPlayer.IsUsingYCbCr();
|
||||
|
||||
Texture yCbCrTexture = isUsingYCbCr ? GetTexture(mediaPlayer, 1) : null;
|
||||
Matrix4x4 yCbCrTransform = Matrix4x4.identity;
|
||||
|
||||
StereoPacking stereoPacking = StereoPacking.Monoscopic;
|
||||
AlphaPacking alphaPacking = AlphaPacking.None;
|
||||
bool flipY = false;
|
||||
bool isLinear = false;
|
||||
|
||||
if (texturePropId != -1)
|
||||
{
|
||||
if (mainTexture == null || forceFallbackTexture)
|
||||
{
|
||||
mainTexture = fallbackTexture;
|
||||
}
|
||||
material.SetTexture(texturePropId, mainTexture);
|
||||
}
|
||||
|
||||
ITextureProducer textureProducer = mediaPlayer.TextureProducer;
|
||||
if (textureProducer != null)
|
||||
{
|
||||
flipY = textureProducer.RequiresVerticalFlip();
|
||||
if (isUsingYCbCr)
|
||||
{
|
||||
yCbCrTransform = textureProducer.GetYpCbCrTransform();
|
||||
}
|
||||
stereoPacking = textureProducer.GetTextureStereoPacking();
|
||||
alphaPacking = textureProducer.GetTextureAlphaPacking();
|
||||
textureTransform = textureProducer.GetTextureMatrix();
|
||||
}
|
||||
|
||||
if (mediaPlayer.Info != null)
|
||||
{
|
||||
isLinear = mediaPlayer.Info.PlayerSupportsLinearColorSpace();
|
||||
}
|
||||
|
||||
SetupMaterial(material, flipY, isLinear, yCbCrTransform, yCbCrTexture, textureTransform, mediaPlayer.VideoLayoutMapping, stereoPacking, alphaPacking);
|
||||
|
||||
if (stereoPacking == StereoPacking.MultiviewLeftPrimary || stereoPacking == StereoPacking.MultiviewRightPrimary)
|
||||
{
|
||||
#if UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
if (isUsingYCbCr)
|
||||
{
|
||||
material.SetTexture(PropMainTex_R.Id, GetTexture(mediaPlayer, 2));
|
||||
material.SetTexture(PropChromaTex_R.Id, GetTexture(mediaPlayer, 3));
|
||||
}
|
||||
else
|
||||
#endif
|
||||
{
|
||||
material.SetTexture(PropMainTex_R.Id, GetTexture(mediaPlayer, 1));
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (texturePropId != -1)
|
||||
{
|
||||
material.SetTexture(texturePropId, fallbackTexture);
|
||||
}
|
||||
SetupMaterial(material, false, true, Matrix4x4.identity, null, Matrix4x4.identity);
|
||||
}
|
||||
}
|
||||
|
||||
internal static void SetupMaterial(
|
||||
Material material,
|
||||
bool flipVertically,
|
||||
bool playerSupportsLinear,
|
||||
Matrix4x4 ycbcrTransform,
|
||||
Texture ycbcrTexture,
|
||||
Matrix4x4 textureTransform,
|
||||
VideoMapping mapping = VideoMapping.Normal,
|
||||
StereoPacking stereoPacking = StereoPacking.Monoscopic,
|
||||
AlphaPacking alphaPacking = AlphaPacking.None)
|
||||
{
|
||||
SetupVerticalFlipMaterial(material, flipVertically);
|
||||
|
||||
// Apply changes for layout
|
||||
if (material.HasProperty(VideoRender.PropLayout.Id))
|
||||
{
|
||||
VideoRender.SetupLayoutMaterial(material, mapping);
|
||||
}
|
||||
|
||||
// Apply changes for stereo videos
|
||||
if (material.HasProperty(VideoRender.PropStereo.Id))
|
||||
{
|
||||
VideoRender.SetupStereoMaterial(material, stereoPacking);
|
||||
}
|
||||
|
||||
// Apply changes for alpha videos
|
||||
if (material.HasProperty(VideoRender.PropAlphaPack.Id))
|
||||
{
|
||||
VideoRender.SetupAlphaPackedMaterial(material, alphaPacking);
|
||||
}
|
||||
|
||||
// Apply gamma correction
|
||||
#if UNITY_PLATFORM_SUPPORTS_LINEAR
|
||||
if (material.HasProperty(VideoRender.PropApplyGamma.Id))
|
||||
{
|
||||
VideoRender.SetupGammaMaterial(material, playerSupportsLinear);
|
||||
}
|
||||
#endif
|
||||
|
||||
// Adjust for cropping/orientation (when the decoder decodes in blocks that overrun the video frame size, it pads), OES only as we apply this lower down for none-OES
|
||||
VideoRender.SetupTextureMatrix(material, textureTransform);
|
||||
|
||||
#if UNITY_PLATFORM_SUPPORTS_YPCBCR
|
||||
VideoRender.SetupYpCbCrMaterial(material, ycbcrTexture != null, ycbcrTransform, ycbcrTexture);
|
||||
#endif
|
||||
}
|
||||
|
||||
[System.Flags]
|
||||
public enum ResolveFlags : int
|
||||
{
|
||||
Mipmaps = 1 << 0,
|
||||
PackedAlpha = 1 << 1,
|
||||
StereoLeft = 1 << 2,
|
||||
StereoRight = 1 << 3,
|
||||
ColorspaceSRGB = 1 << 4,
|
||||
}
|
||||
|
||||
public static void SetupResolveMaterial(Material material, VideoResolveOptions options)
|
||||
{
|
||||
if (options.IsColourAdjust())
|
||||
{
|
||||
material.EnableKeyword(VideoRender.Keyword_UseHSBC);
|
||||
material.SetFloat(VideoRender.PropHue.Id, options.hue);
|
||||
material.SetFloat(VideoRender.PropSaturation.Id, options.saturation);
|
||||
material.SetFloat(VideoRender.PropBrightness.Id, options.brightness);
|
||||
material.SetFloat(VideoRender.PropContrast.Id, options.contrast);
|
||||
material.SetFloat(VideoRender.PropInvGamma.Id, 1f / options.gamma);
|
||||
}
|
||||
else
|
||||
{
|
||||
material.DisableKeyword(VideoRender.Keyword_UseHSBC);
|
||||
}
|
||||
|
||||
material.color = options.tint;
|
||||
}
|
||||
|
||||
public static RenderTexture ResolveVideoToRenderTexture(Material resolveMaterial, RenderTexture targetTexture, ITextureProducer texture, ResolveFlags flags, ScaleMode scaleMode = ScaleMode.StretchToFill)
|
||||
{
|
||||
int targetWidth = texture.GetTexture(0).width;
|
||||
int targetHeight = texture.GetTexture(0).height;
|
||||
|
||||
StereoEye eyeMode = StereoEye.Both;
|
||||
if (((flags & ResolveFlags.StereoLeft) == ResolveFlags.StereoLeft) &&
|
||||
((flags & ResolveFlags.StereoRight) != ResolveFlags.StereoRight))
|
||||
{
|
||||
eyeMode = StereoEye.Left;
|
||||
}
|
||||
else if (((flags & ResolveFlags.StereoLeft) != ResolveFlags.StereoLeft) &&
|
||||
((flags & ResolveFlags.StereoRight) == ResolveFlags.StereoRight))
|
||||
{
|
||||
eyeMode = StereoEye.Right;
|
||||
}
|
||||
|
||||
// RJT NOTE: No longer passing in PAR as combined with larger videos (e.g. 8K+) it can lead to textures >16K which most platforms don't support
|
||||
// - Instead, the PAR is accounted for during drawing (which is more efficient too)
|
||||
// - https://github.com/RenderHeads/UnityPlugin-AVProVideo/issues/1297
|
||||
float pixelAspectRatio = 1.0f; // texture.GetTexturePixelAspectRatio();
|
||||
GetResolveTextureSize(
|
||||
texture.GetTextureAlphaPacking(),
|
||||
texture.GetTextureStereoPacking(),
|
||||
eyeMode,
|
||||
pixelAspectRatio,
|
||||
texture.GetTextureMatrix(),
|
||||
ref targetWidth,
|
||||
ref targetHeight);
|
||||
|
||||
if (targetTexture)
|
||||
{
|
||||
bool sizeChanged = (targetTexture.width != targetWidth) || (targetTexture.height != targetHeight);
|
||||
if (sizeChanged)
|
||||
{
|
||||
RenderTexture.ReleaseTemporary(targetTexture);
|
||||
targetTexture = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (!targetTexture)
|
||||
{
|
||||
GetCompatibleRenderTextureFormatOptions options = GetCompatibleRenderTextureFormatOptions.ForResolve;
|
||||
if (texture.GetTextureAlphaPacking() != AlphaPacking.None)
|
||||
{
|
||||
options |= GetCompatibleRenderTextureFormatOptions.RequiresAlpha;
|
||||
}
|
||||
RenderTextureFormat format = texture.GetCompatibleRenderTextureFormat(options);
|
||||
RenderTextureReadWrite readWrite = ((flags & ResolveFlags.ColorspaceSRGB) == ResolveFlags.ColorspaceSRGB) ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Linear;
|
||||
targetTexture = RenderTexture.GetTemporary(targetWidth, targetHeight, 0, format, readWrite);
|
||||
}
|
||||
|
||||
// Set target mipmap generation support
|
||||
{
|
||||
bool requiresMipmap = (flags & ResolveFlags.Mipmaps) == ResolveFlags.Mipmaps;
|
||||
bool requiresRecreate = (targetTexture.IsCreated() && targetTexture.useMipMap != requiresMipmap);
|
||||
if (requiresRecreate)
|
||||
{
|
||||
targetTexture.Release();
|
||||
}
|
||||
if (!targetTexture.IsCreated())
|
||||
{
|
||||
targetTexture.useMipMap = targetTexture.autoGenerateMips = requiresMipmap;
|
||||
targetTexture.Create();
|
||||
}
|
||||
}
|
||||
|
||||
// Render resolve blit
|
||||
// TODO: combine these two paths into a single material blit
|
||||
{
|
||||
bool prevSRGB = GL.sRGBWrite;
|
||||
GL.sRGBWrite = targetTexture.sRGB;
|
||||
RenderTexture prev = RenderTexture.active;
|
||||
if (scaleMode == ScaleMode.StretchToFill)
|
||||
{
|
||||
Graphics.Blit(texture.GetTexture(0), targetTexture, resolveMaterial);
|
||||
}
|
||||
else
|
||||
{
|
||||
RenderTexture.active = targetTexture;
|
||||
bool partialAreaRender = (scaleMode == ScaleMode.ScaleToFit);
|
||||
if (partialAreaRender)
|
||||
{
|
||||
GL.Clear(false, true, Color.black);
|
||||
}
|
||||
VideoRender.DrawTexture(new Rect(0f, 0f, targetTexture.width, targetTexture.height), texture.GetTexture(0), scaleMode, texture.GetTextureAlphaPacking(), texture.GetTexturePixelAspectRatio(), resolveMaterial);
|
||||
}
|
||||
RenderTexture.active = prev;
|
||||
GL.sRGBWrite = prevSRGB;
|
||||
}
|
||||
|
||||
return targetTexture;
|
||||
}
|
||||
|
||||
public static void GetResolveTextureSize(AlphaPacking alphaPacking, StereoPacking stereoPacking, StereoEye eyeMode, float pixelAspectRatio, Matrix4x4 textureXfrm, ref int width, ref int height)
|
||||
{
|
||||
Vector4 size = new Vector4(width, height, 0, 0);
|
||||
size = textureXfrm * size;
|
||||
width = (int)Mathf.Abs(size.x);
|
||||
height = (int)Mathf.Abs(size.y);
|
||||
|
||||
switch (alphaPacking)
|
||||
{
|
||||
case AlphaPacking.LeftRight:
|
||||
width /= 2;
|
||||
break;
|
||||
case AlphaPacking.TopBottom:
|
||||
height /= 2;
|
||||
break;
|
||||
}
|
||||
|
||||
if (eyeMode != StereoEye.Both)
|
||||
{
|
||||
switch (stereoPacking)
|
||||
{
|
||||
case StereoPacking.LeftRight:
|
||||
width /= 2;
|
||||
break;
|
||||
case StereoPacking.TopBottom:
|
||||
height /= 2;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (pixelAspectRatio > 0f)
|
||||
{
|
||||
if (pixelAspectRatio > 1f)
|
||||
{
|
||||
width = Mathf.RoundToInt(width * pixelAspectRatio);
|
||||
}
|
||||
else if (pixelAspectRatio < 1f)
|
||||
{
|
||||
height = Mathf.RoundToInt(height / pixelAspectRatio);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static bool RequiresResolve(ITextureProducer texture)
|
||||
{
|
||||
return texture.GetTextureAlphaPacking() != AlphaPacking.None ||
|
||||
texture.RequiresVerticalFlip() ||
|
||||
texture.GetTextureStereoPacking() != StereoPacking.Monoscopic ||
|
||||
texture.GetTextureCount() > 1;
|
||||
}
|
||||
|
||||
public static void DrawTexture(Rect destRect, Texture texture, ScaleMode scaleMode, AlphaPacking alphaPacking, float pixelAspectRatio, Material material)
|
||||
{
|
||||
if (Event.current == null || Event.current.type == EventType.Repaint)
|
||||
{
|
||||
int sourceWidth = texture.width;
|
||||
int sourceHeight = texture.height;
|
||||
Matrix4x4 textureXfrm = Matrix4x4.identity;
|
||||
GetResolveTextureSize(alphaPacking, StereoPacking.Unknown, StereoEye.Both, pixelAspectRatio, textureXfrm, ref sourceWidth, ref sourceHeight);
|
||||
|
||||
float sourceRatio = (float)sourceWidth / (float)sourceHeight;
|
||||
Rect sourceRect = new Rect(0f, 0f, 1f, 1f);
|
||||
switch (scaleMode)
|
||||
{
|
||||
case ScaleMode.ScaleAndCrop:
|
||||
{
|
||||
float destRatio = destRect.width / destRect.height;
|
||||
if (destRatio > sourceRatio)
|
||||
{
|
||||
float adjust = sourceRatio / destRatio;
|
||||
sourceRect = new Rect(0f, (1f - adjust) * 0.5f, 1f, adjust);
|
||||
}
|
||||
else
|
||||
{
|
||||
float adjust = destRatio / sourceRatio;
|
||||
sourceRect = new Rect(0.5f - adjust * 0.5f, 0f, adjust, 1f);
|
||||
}
|
||||
}
|
||||
break;
|
||||
case ScaleMode.ScaleToFit:
|
||||
{
|
||||
float destRatio = destRect.width / destRect.height;
|
||||
if (destRatio > sourceRatio)
|
||||
{
|
||||
float adjust = sourceRatio / destRatio;
|
||||
destRect = new Rect(destRect.xMin + destRect.width * (1f - adjust) * 0.5f, destRect.yMin, adjust * destRect.width, destRect.height);
|
||||
}
|
||||
else
|
||||
{
|
||||
float adjust = destRatio / sourceRatio;
|
||||
destRect = new Rect(destRect.xMin, destRect.yMin + destRect.height * (1f - adjust) * 0.5f, destRect.width, adjust * destRect.height);
|
||||
}
|
||||
}
|
||||
break;
|
||||
case ScaleMode.StretchToFill:
|
||||
break;
|
||||
}
|
||||
|
||||
GL.PushMatrix();
|
||||
if (RenderTexture.active == null)
|
||||
{
|
||||
//GL.LoadPixelMatrix();
|
||||
GL.LoadPixelMatrix(0f, Screen.width, Screen.height, 0f);
|
||||
}
|
||||
else
|
||||
{
|
||||
GL.LoadPixelMatrix(0f, RenderTexture.active.width, RenderTexture.active.height, 0f);
|
||||
}
|
||||
Graphics.DrawTexture(destRect, texture, sourceRect, 0, 0, 0, 0, GUI.color, material);
|
||||
GL.PopMatrix();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: a928f61fef33d1d4986b2190310027bc
|
||||
timeCreated: 1547737745
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
282
Assets/AVProVideo/Runtime/Scripts/Internal/Variants.cs
Normal file
282
Assets/AVProVideo/Runtime/Scripts/Internal/Variants.cs
Normal file
@@ -0,0 +1,282 @@
|
||||
//-----------------------------------------------------------------------------
|
||||
// Copyright 2015-2024 RenderHeads Ltd. All rights reserved.
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using UnityEngine;
|
||||
|
||||
namespace RenderHeads.Media.AVProVideo
|
||||
{
|
||||
public enum VideoRange
|
||||
{
|
||||
SDR,
|
||||
HLG,
|
||||
PQ
|
||||
}
|
||||
|
||||
public enum CodecType: uint
|
||||
{
|
||||
ac_3 = 0x61632d33,
|
||||
alac = 0x616c6163,
|
||||
avc1 = 0x61766331,
|
||||
avc3 = 0x61766333,
|
||||
dvh1 = 0x64766831,
|
||||
dvhe = 0x64766865,
|
||||
ec_3 = 0x65632d33,
|
||||
fLaC = 0x664c6143,
|
||||
hev1 = 0x68657631,
|
||||
hvc1 = 0x68766331,
|
||||
mjpg = 0x6d6a7067,
|
||||
mp4a = 0x6d703461,
|
||||
stpp = 0x73747070,
|
||||
wvtt = 0x77767474,
|
||||
unknown = 0
|
||||
}
|
||||
|
||||
public enum VariantFlags : int
|
||||
{
|
||||
Default = ( 1 << 0 ),
|
||||
Unsupported = ( 1 << 1 )
|
||||
}
|
||||
|
||||
public class Variant
|
||||
{
|
||||
private int m_iId = -1;
|
||||
private int m_iWidth = 0;
|
||||
private int m_iHeight = 0;
|
||||
private int m_iPeakDataRate = 0;
|
||||
private int m_iAverageDataRate = 0;
|
||||
private CodecType m_VideoCodecType = CodecType.unknown;
|
||||
private float m_fFrameRate = 0;
|
||||
private VideoRange m_eVideoRange;
|
||||
private CodecType m_AudioCodecType = CodecType.unknown;
|
||||
private VariantFlags m_Flags = 0;
|
||||
|
||||
public Variant(
|
||||
int iId,
|
||||
int iWidth,
|
||||
int iHeight,
|
||||
int iPeakDataRate,
|
||||
int iAverageDataRate = 0,
|
||||
CodecType videoCodecType = CodecType.unknown,
|
||||
float fFrameRate = 0,
|
||||
VideoRange eVideoRange = VideoRange.SDR,
|
||||
CodecType audioCodecType = CodecType.unknown,
|
||||
VariantFlags flags = 0
|
||||
)
|
||||
{
|
||||
m_iId = iId;
|
||||
m_iWidth = iWidth;
|
||||
m_iHeight = iHeight;
|
||||
m_iPeakDataRate = iPeakDataRate;
|
||||
m_iAverageDataRate = iAverageDataRate;
|
||||
m_VideoCodecType = videoCodecType;
|
||||
m_fFrameRate = fFrameRate;
|
||||
m_eVideoRange = eVideoRange;
|
||||
m_AudioCodecType = audioCodecType;
|
||||
m_Flags = flags;
|
||||
}
|
||||
|
||||
public int Id
|
||||
{
|
||||
get { return m_iId; }
|
||||
}
|
||||
|
||||
public int Width
|
||||
{
|
||||
get { return m_iWidth; }
|
||||
}
|
||||
|
||||
public int Height
|
||||
{
|
||||
get { return m_iHeight; }
|
||||
}
|
||||
|
||||
public int PeakDataRate
|
||||
{
|
||||
get { return m_iPeakDataRate; }
|
||||
}
|
||||
|
||||
public int AverageDataRate
|
||||
{
|
||||
get { return m_iAverageDataRate; }
|
||||
}
|
||||
|
||||
public float FrameRate
|
||||
{
|
||||
get { return m_fFrameRate; }
|
||||
}
|
||||
|
||||
public VideoRange VideoRange
|
||||
{
|
||||
get { return m_eVideoRange; }
|
||||
}
|
||||
|
||||
public CodecType VideoCodecType
|
||||
{
|
||||
get { return m_VideoCodecType; }
|
||||
}
|
||||
|
||||
public bool IsUnsupported
|
||||
{
|
||||
get { return ( ( m_Flags & VariantFlags.Unsupported ) == VariantFlags.Unsupported ); }
|
||||
}
|
||||
|
||||
public string VideoCodecName
|
||||
{
|
||||
get
|
||||
{
|
||||
switch (m_VideoCodecType)
|
||||
{
|
||||
case CodecType.avc1:
|
||||
case CodecType.avc3:
|
||||
return "H264";
|
||||
|
||||
case CodecType.dvh1:
|
||||
case CodecType.dvhe:
|
||||
return "Dolby Vision";
|
||||
|
||||
case CodecType.hev1:
|
||||
case CodecType.hvc1:
|
||||
return "HEVC";
|
||||
|
||||
case CodecType.mjpg:
|
||||
return "MJPEG";
|
||||
|
||||
default:
|
||||
return "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public CodecType AudioCodecType
|
||||
{
|
||||
get { return m_AudioCodecType; }
|
||||
}
|
||||
|
||||
public string AudioCodecName
|
||||
{
|
||||
get
|
||||
{
|
||||
switch (m_AudioCodecType)
|
||||
{
|
||||
case CodecType.ac_3:
|
||||
return "AC-3";
|
||||
|
||||
case CodecType.alac:
|
||||
return "Apple Lossless";
|
||||
|
||||
case CodecType.ec_3:
|
||||
return "EC-3";
|
||||
|
||||
case CodecType.fLaC:
|
||||
return "FLAC";
|
||||
|
||||
case CodecType.mp4a:
|
||||
// Could be something else but this is most likely, requires passing audio subtype
|
||||
return "AAC";
|
||||
|
||||
default:
|
||||
return "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Variant s_Auto = new Variant(-1, 0, 0, 0);
|
||||
public static Variant Auto
|
||||
{
|
||||
get { return s_Auto; }
|
||||
}
|
||||
}
|
||||
|
||||
public interface IVariants: IEnumerable
|
||||
{
|
||||
int Count { get; }
|
||||
Variant Current { get; }
|
||||
Variant this[int index] { get; }
|
||||
Variant GetSelectedVariant();
|
||||
void SelectVariant(Variant variant);
|
||||
}
|
||||
|
||||
public partial class BaseMediaPlayer : IVariants
|
||||
{
|
||||
protected List<Variant> _variants = new List<Variant>();
|
||||
|
||||
public int Count
|
||||
{
|
||||
get
|
||||
{
|
||||
return _variants.Count;
|
||||
}
|
||||
}
|
||||
|
||||
public Variant Current
|
||||
{
|
||||
get
|
||||
{
|
||||
return GetSelectedVariant();
|
||||
}
|
||||
}
|
||||
|
||||
public Variant this[int index]
|
||||
{
|
||||
get
|
||||
{
|
||||
return _variants[index];
|
||||
}
|
||||
}
|
||||
|
||||
public virtual IEnumerator GetEnumerator()
|
||||
{
|
||||
return _variants.GetEnumerator();
|
||||
}
|
||||
|
||||
public virtual Variant GetSelectedVariant()
|
||||
{
|
||||
return Variant.Auto;
|
||||
}
|
||||
|
||||
public virtual void SelectVariant(Variant variant)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
protected virtual void UpdateVariants()
|
||||
{
|
||||
_variants.Clear();
|
||||
int count = InternalGetVariantCount();
|
||||
for (int i = 0; i < count; ++i)
|
||||
{
|
||||
Variant variant = InternalGetVariantAtIndex(i);
|
||||
if (variant != null)
|
||||
{
|
||||
_variants.Add(variant);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the list by codec, then peak bitrate
|
||||
_variants.Sort( (x, y) =>
|
||||
{
|
||||
int iReturn = x.VideoCodecType.CompareTo( y.VideoCodecType );
|
||||
if( iReturn == 0 )
|
||||
{
|
||||
iReturn = x.PeakDataRate.CompareTo( y.PeakDataRate );
|
||||
}
|
||||
return iReturn;
|
||||
} );
|
||||
}
|
||||
|
||||
internal virtual int InternalGetVariantCount()
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
internal virtual Variant InternalGetVariantAtIndex(int index)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user