重新导入obi

This commit is contained in:
2026-04-06 11:35:18 +08:00
parent 05fa2d6e5e
commit ae3002a0e2
1643 changed files with 232496 additions and 13 deletions

View File

@@ -0,0 +1,452 @@
#if (OBI_BURST && OBI_MATHEMATICS && OBI_COLLECTIONS)
using System;
using Unity.Jobs;
using Unity.Burst;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Mathematics;
namespace Obi
{
public class BurstDensityConstraints : BurstConstraintsImpl<BurstDensityConstraintsBatch>
{
public NativeList<int> fluidParticles;
public NativeArray<float4> eta;
public NativeArray<float4> smoothPositions;
public NativeArray<float3x3> anisotropies;
public BurstDensityConstraints(BurstSolverImpl solver) : base(solver, Oni.ConstraintType.Density)
{
fluidParticles = new NativeList<int>(Allocator.Persistent);
}
public override IConstraintsBatchImpl CreateConstraintsBatch()
{
var dataBatch = new BurstDensityConstraintsBatch(this);
batches.Add(dataBatch);
return dataBatch;
}
public override void Dispose()
{
fluidParticles.Dispose();
}
public override void RemoveBatch(IConstraintsBatchImpl batch)
{
batches.Remove(batch as BurstDensityConstraintsBatch);
batch.Destroy();
}
protected override JobHandle EvaluateSequential(JobHandle inputDeps, float stepTime, float substepTime, int substeps)
{
return EvaluateParallel(inputDeps, stepTime, substepTime, substeps);
}
protected override JobHandle EvaluateParallel(JobHandle inputDeps, float stepTime, float substepTime, int substeps)
{
inputDeps = UpdateInteractions(inputDeps);
// evaluate all batches as a chain of dependencies:
for (int i = 0; i < batches.Count; ++i)
{
if (batches[i].enabled)
{
inputDeps = batches[i].Evaluate(inputDeps, stepTime, substepTime, substeps);
m_Solver.ScheduleBatchedJobsIfNeeded();
}
}
// calculate per-particle lambdas:
inputDeps = CalculateLambdas(inputDeps, substepTime);
// then apply them:
for (int i = 0; i < batches.Count; ++i)
{
if (batches[i].enabled)
{
inputDeps = batches[i].Apply(inputDeps, substepTime);
m_Solver.ScheduleBatchedJobsIfNeeded();
}
}
return inputDeps;
}
public JobHandle ApplyVelocityCorrections(JobHandle inputDeps, float deltaTime)
{
eta = new NativeArray<float4>(((BurstSolverImpl)solver).particleCount, Allocator.TempJob);
for (int i = 0; i < batches.Count; ++i)
{
if (batches[i].enabled)
{
inputDeps = batches[i].CalculateViscosityAndNormals(inputDeps, deltaTime);
m_Solver.ScheduleBatchedJobsIfNeeded();
}
}
for (int i = 0; i < batches.Count; ++i)
{
if (batches[i].enabled)
{
inputDeps = batches[i].CalculateVorticity(inputDeps);
m_Solver.ScheduleBatchedJobsIfNeeded();
}
}
inputDeps = ApplyVorticityAndAtmosphere(inputDeps, deltaTime);
m_Solver.ScheduleBatchedJobsIfNeeded();
return inputDeps;
}
public JobHandle CalculateAnisotropyLaplacianSmoothing(JobHandle inputDeps)
{
// if the constraints are deactivated or we need no anisotropy:
if (((BurstSolverImpl)solver).abstraction.parameters.maxAnisotropy <= 1)
return IdentityAnisotropy(inputDeps);
smoothPositions = new NativeArray<float4>(((BurstSolverImpl)solver).particleCount, Allocator.TempJob);
anisotropies = new NativeArray<float3x3>(((BurstSolverImpl)solver).particleCount, Allocator.TempJob);
for (int i = 0; i < batches.Count; ++i)
{
if (batches[i].enabled)
{
inputDeps = batches[i].AccumulateSmoothPositions(inputDeps);
m_Solver.ScheduleBatchedJobsIfNeeded();
}
}
inputDeps = AverageSmoothPositions(inputDeps);
for (int i = 0; i < batches.Count; ++i)
{
if (batches[i].enabled)
{
inputDeps = batches[i].AccumulateAnisotropy(inputDeps);
m_Solver.ScheduleBatchedJobsIfNeeded();
}
}
return AverageAnisotropy(inputDeps);
}
private JobHandle UpdateInteractions(JobHandle inputDeps)
{
// clear existing fluid data:
var clearData = new ClearFluidDataJob()
{
fluidParticles = fluidParticles.AsDeferredJobArray(),
fluidData = ((BurstSolverImpl)solver).abstraction.fluidData.AsNativeArray<float4>(),
};
inputDeps = clearData.Schedule(fluidParticles.Length, 64, inputDeps);
// update fluid interactions:
var updateInteractions = new UpdateInteractionsJob()
{
pairs = m_Solver.fluidInteractions,
positions = m_Solver.positions,
radii = m_Solver.smoothingRadii,
densityKernel = new Poly6Kernel(((BurstSolverImpl)solver).abstraction.parameters.mode == Oni.SolverParameters.Mode.Mode2D),
gradientKernel = new SpikyKernel(((BurstSolverImpl)solver).abstraction.parameters.mode == Oni.SolverParameters.Mode.Mode2D),
};
return updateInteractions.Schedule(((BurstSolverImpl)solver).fluidInteractions.Length, 64, inputDeps);
}
private JobHandle CalculateLambdas(JobHandle inputDeps, float deltaTime)
{
// calculate lagrange multipliers:
var calculateLambdas = new CalculateLambdasJob()
{
fluidParticles = fluidParticles.AsDeferredJobArray(),
invMasses = m_Solver.invMasses,
radii = m_Solver.smoothingRadii,
restDensities = m_Solver.restDensities,
surfaceTension = m_Solver.surfaceTension,
densityKernel = new Poly6Kernel(m_Solver.abstraction.parameters.mode == Oni.SolverParameters.Mode.Mode2D),
gradientKernel = new SpikyKernel(m_Solver.abstraction.parameters.mode == Oni.SolverParameters.Mode.Mode2D),
normals = m_Solver.normals,
vorticity = m_Solver.vorticities,
fluidData = m_Solver.fluidData
};
return calculateLambdas.Schedule(fluidParticles.Length,64,inputDeps);
}
private JobHandle ApplyVorticityAndAtmosphere(JobHandle inputDeps, float deltaTime)
{
// calculate lagrange multipliers:
var conf = new ApplyVorticityConfinementAndAtmosphere()
{
fluidParticles = fluidParticles.AsDeferredJobArray(),
wind = m_Solver.wind,
vorticities = m_Solver.vorticities,
eta = eta,
atmosphericDrag = m_Solver.athmosphericDrag,
atmosphericPressure = m_Solver.athmosphericPressure,
vorticityConfinement = m_Solver.vortConfinement,
restDensities = m_Solver.restDensities,
normals = m_Solver.normals,
fluidData = m_Solver.fluidData,
velocities = m_Solver.velocities,
dt = deltaTime
};
return conf.Schedule(fluidParticles.Length, 64, inputDeps);
}
private JobHandle IdentityAnisotropy(JobHandle inputDeps)
{
var idAnisotropy = new IdentityAnisotropyJob()
{
fluidParticles = fluidParticles.AsDeferredJobArray(),
principalAxes = m_Solver.anisotropies,
radii = m_Solver.principalRadii
};
return idAnisotropy.Schedule(fluidParticles.Length, 64, inputDeps);
}
private JobHandle AverageSmoothPositions(JobHandle inputDeps)
{
var average = new AverageSmoothPositionsJob()
{
fluidParticles = fluidParticles.AsDeferredJobArray(),
renderablePositions = m_Solver.renderablePositions,
smoothPositions = smoothPositions
};
return average.Schedule(fluidParticles.Length, 64, inputDeps);
}
private JobHandle AverageAnisotropy(JobHandle inputDeps)
{
var average = new AverageAnisotropyJob()
{
fluidParticles = fluidParticles.AsDeferredJobArray(),
renderablePositions = m_Solver.renderablePositions,
smoothPositions = smoothPositions,
principalRadii = m_Solver.principalRadii,
anisotropies = anisotropies,
maxAnisotropy = m_Solver.abstraction.parameters.maxAnisotropy,
principalAxes = m_Solver.anisotropies
};
return average.Schedule(fluidParticles.Length, 64, inputDeps);
}
[BurstCompile]
public struct ClearFluidDataJob : IJobParallelFor
{
[ReadOnly] public NativeArray<int> fluidParticles;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> fluidData;
public void Execute(int i)
{
fluidData[fluidParticles[i]] = float4.zero;
}
}
[BurstCompile]
public struct UpdateInteractionsJob : IJobParallelFor
{
[ReadOnly] public NativeArray<float4> positions;
[ReadOnly] public NativeArray<float> radii;
[ReadOnly] public Poly6Kernel densityKernel;
[ReadOnly] public SpikyKernel gradientKernel;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<FluidInteraction> pairs;
[ReadOnly] public BatchData batchData;
public void Execute(int i)
{
var pair = pairs[i];
// calculate normalized gradient vector:
pair.gradient = (positions[pair.particleA] - positions[pair.particleB]);
float distance = math.length(pair.gradient);
pair.gradient /= distance + math.FLT_MIN_NORMAL;
// calculate and store average density and gradient kernels:
pair.avgKernel = (densityKernel.W(distance, radii[pair.particleA]) +
densityKernel.W(distance, radii[pair.particleB])) * 0.5f;
pair.avgGradient = (gradientKernel.W(distance, radii[pair.particleA]) +
gradientKernel.W(distance, radii[pair.particleB])) * 0.5f;
pairs[i] = pair;
}
}
[BurstCompile]
public struct CalculateLambdasJob : IJobParallelFor
{
[ReadOnly] public NativeArray<int> fluidParticles;
[ReadOnly] public NativeArray<float> invMasses;
[ReadOnly] public NativeArray<float> radii;
[ReadOnly] public NativeArray<float> restDensities;
[ReadOnly] public NativeArray<float> surfaceTension;
[ReadOnly] public Poly6Kernel densityKernel;
[ReadOnly] public SpikyKernel gradientKernel;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> normals;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> vorticity;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> fluidData;
public void Execute(int p)
{
int i = fluidParticles[p];
normals[i] = float4.zero;
vorticity[i] = float4.zero;
float4 data = fluidData[i];
float grad = gradientKernel.W(0, radii[i]) / invMasses[i] / restDensities[i];
// self particle contribution to density and gradient:
data += new float4(densityKernel.W(0, radii[i]), 0, grad, grad * grad + data[2] * data[2]);
// weight by mass:
data[0] /= invMasses[i];
// evaluate density constraint (clamp pressure):
float constraint = math.max(-0.5f * surfaceTension[i], data[0] / restDensities[i] - 1);
// calculate lambda:
data[1] = -constraint / (invMasses[i] * data[3] + math.FLT_MIN_NORMAL);
fluidData[i] = data;
}
}
[BurstCompile]
public struct ApplyVorticityConfinementAndAtmosphere : IJobParallelFor
{
[ReadOnly] public NativeArray<int> fluidParticles;
[ReadOnly] public NativeArray<float4> wind;
[ReadOnly] public NativeArray<float4> vorticities;
[ReadOnly] public NativeArray<float> atmosphericDrag;
[ReadOnly] public NativeArray<float> atmosphericPressure;
[ReadOnly] public NativeArray<float> vorticityConfinement;
[ReadOnly] public NativeArray<float> restDensities;
[ReadOnly] public NativeArray<float4> normals;
[ReadOnly] public NativeArray<float4> fluidData;
[DeallocateOnJobCompletion] [ReadOnly] public NativeArray<float4> eta;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> velocities;
[ReadOnly] public float dt;
public void Execute(int p)
{
int i = fluidParticles[p];
//atmospheric drag:
float4 velocityDiff = velocities[i] - wind[i];
// particles near the surface should experience drag:
velocities[i] -= atmosphericDrag[i] * velocityDiff * math.max(0, 1 - fluidData[i][0] / restDensities[i]) * dt;
// ambient pressure:
velocities[i] += atmosphericPressure[i] * normals[i] * dt;
// apply vorticity confinement:
velocities[i] += new float4(math.cross(math.normalizesafe(eta[i]).xyz,vorticities[i].xyz), 0) * vorticityConfinement[i] * dt;
}
}
[BurstCompile]
public struct IdentityAnisotropyJob : IJobParallelFor
{
[ReadOnly] public NativeArray<int> fluidParticles;
[ReadOnly] public NativeArray<float4> radii;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> principalAxes;
public void Execute(int p)
{
int i = fluidParticles[p];
// align the principal axes of the particle with the solver axes:
principalAxes[i * 3] = new float4(1,0,0,radii[i].x);
principalAxes[i * 3 + 1] = new float4(0,1,0,radii[i].x);
principalAxes[i * 3 + 2] = new float4(0,0,1,radii[i].x);
}
}
[BurstCompile]
public struct AverageSmoothPositionsJob : IJobParallelFor
{
[ReadOnly] public NativeArray<int> fluidParticles;
[ReadOnly] public NativeArray<float4> renderablePositions;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> smoothPositions;
public void Execute(int p)
{
int i = fluidParticles[p];
if (smoothPositions[i].w > 0)
smoothPositions[i] /= smoothPositions[i].w;
else
smoothPositions[i] = renderablePositions[i];
}
}
[BurstCompile]
public struct AverageAnisotropyJob : IJobParallelFor
{
[ReadOnly] public NativeArray<int> fluidParticles;
[ReadOnly] public NativeArray<float4> principalRadii;
[ReadOnly] public float maxAnisotropy;
[ReadOnly]
[DeallocateOnJobCompletion]
public NativeArray<float4> smoothPositions;
[ReadOnly]
[DeallocateOnJobCompletion]
public NativeArray<float3x3> anisotropies;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> renderablePositions;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> principalAxes;
public void Execute(int p)
{
int i = fluidParticles[p];
if (smoothPositions[i].w > 0 && (anisotropies[i].c0[0] + anisotropies[i].c1[1] + anisotropies[i].c2[2]) > 0.01f)
{
float3 singularValues;
float3x3 u;
BurstMath.EigenSolve(anisotropies[i] / smoothPositions[i].w, out singularValues, out u);
float max = singularValues[0];
float3 s = math.max(singularValues,new float3(max / maxAnisotropy)) / max * principalRadii[i].x;
principalAxes[i * 3] = new float4(u.c0, s.x);
principalAxes[i * 3 + 1] = new float4(u.c1, s.y);
principalAxes[i * 3 + 2] = new float4(u.c2, s.z);
}
else
{
float radius = principalRadii[i].x / maxAnisotropy;
principalAxes[i * 3] = new float4(1, 0, 0, radius);
principalAxes[i * 3 + 1] = new float4(0, 1, 0, radius);
principalAxes[i * 3 + 2] = new float4(0, 0, 1, radius);
}
renderablePositions[i] = smoothPositions[i];
}
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: df96ac4db14c846ea99a6ebc5771098f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,371 @@
#if (OBI_BURST && OBI_MATHEMATICS && OBI_COLLECTIONS)
using UnityEngine;
using Unity.Jobs;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Mathematics;
using Unity.Burst;
using System.Collections;
namespace Obi
{
public class BurstDensityConstraintsBatch : BurstConstraintsBatchImpl, IDensityConstraintsBatchImpl
{
public BatchData batchData;
public BurstDensityConstraintsBatch(BurstDensityConstraints constraints)
{
m_Constraints = constraints;
m_ConstraintType = Oni.ConstraintType.Density;
}
public override JobHandle Initialize(JobHandle inputDeps, float substepTime)
{
return inputDeps;
}
public override JobHandle Evaluate(JobHandle inputDeps, float stepTime, float substepTime, int substeps)
{
// update densities and gradients:
var updateDensities = new UpdateDensitiesJob()
{
pairs = ((BurstSolverImpl)constraints.solver).fluidInteractions,
positions = solverImplementation.positions,
invMasses = solverImplementation.invMasses,
restDensities = solverImplementation.restDensities,
diffusion = solverImplementation.diffusion,
userData = solverImplementation.userData,
fluidData = solverImplementation.fluidData,
batchData = batchData,
dt = substepTime
};
int batchCount = batchData.isLast ? batchData.workItemCount : 1;
return updateDensities.Schedule(batchData.workItemCount, batchCount, inputDeps);
}
public override JobHandle Apply(JobHandle inputDeps, float substepTime)
{
var parameters = solverAbstraction.GetConstraintParameters(m_ConstraintType);
// update densities and gradients:
var apply = new ApplyDensityConstraintsJob()
{
invMasses = solverImplementation.invMasses,
radii = solverImplementation.smoothingRadii,
restDensities = solverImplementation.restDensities,
surfaceTension = solverImplementation.surfaceTension,
pairs = ((BurstSolverImpl)constraints.solver).fluidInteractions,
densityKernel = new Poly6Kernel(solverAbstraction.parameters.mode == Oni.SolverParameters.Mode.Mode2D),
positions = solverImplementation.positions,
fluidData = solverImplementation.fluidData,
batchData = batchData,
sorFactor = parameters.SORFactor
};
int batchCount = batchData.isLast ? batchData.workItemCount : 1;
return apply.Schedule(batchData.workItemCount, batchCount, inputDeps);
}
public JobHandle CalculateViscosityAndNormals(JobHandle inputDeps, float deltaTime)
{
var viscosity = new NormalsViscosityAndVorticityJob()
{
positions = solverImplementation.positions,
invMasses = solverImplementation.invMasses,
radii = solverImplementation.smoothingRadii,
restDensities = solverImplementation.restDensities,
viscosities = solverImplementation.viscosities,
fluidData = solverImplementation.fluidData,
pairs = ((BurstSolverImpl)constraints.solver).fluidInteractions,
velocities = solverImplementation.velocities,
vorticities = solverImplementation.vorticities,
normals = solverImplementation.normals,
batchData = batchData
};
int batchCount = batchData.isLast ? batchData.workItemCount : 1;
return viscosity.Schedule(batchData.workItemCount, batchCount, inputDeps);
}
public JobHandle CalculateVorticity(JobHandle inputDeps)
{
var eta = new CalculateVorticityEta()
{
invMasses = solverImplementation.invMasses,
restDensities = solverImplementation.restDensities,
pairs = ((BurstSolverImpl)constraints.solver).fluidInteractions,
vorticities = solverImplementation.vorticities,
eta = ((BurstDensityConstraints)this.constraints).eta,
batchData = batchData
};
int batchCount = batchData.isLast ? batchData.workItemCount : 1;
return eta.Schedule(batchData.workItemCount, batchCount, inputDeps);
}
public JobHandle AccumulateSmoothPositions(JobHandle inputDeps)
{
var accumulateSmooth = new AccumulateSmoothPositionsJob()
{
renderablePositions = solverImplementation.renderablePositions,
smoothPositions = ((BurstDensityConstraints)this.constraints).smoothPositions,
radii = solverImplementation.smoothingRadii,
densityKernel = new Poly6Kernel(solverAbstraction.parameters.mode == Oni.SolverParameters.Mode.Mode2D),
pairs = ((BurstSolverImpl)constraints.solver).fluidInteractions,
batchData = batchData
};
int batchCount = batchData.isLast ? batchData.workItemCount : 1;
return accumulateSmooth.Schedule(batchData.workItemCount, batchCount, inputDeps);
}
public JobHandle AccumulateAnisotropy(JobHandle inputDeps)
{
var accumulateAnisotropy = new AccumulateAnisotropyJob()
{
renderablePositions = solverImplementation.renderablePositions,
smoothPositions = ((BurstDensityConstraints)this.constraints).smoothPositions,
anisotropies = ((BurstDensityConstraints)this.constraints).anisotropies,
pairs = ((BurstSolverImpl)constraints.solver).fluidInteractions,
batchData = batchData
};
int batchCount = batchData.isLast ? batchData.workItemCount : 1;
return accumulateAnisotropy.Schedule(batchData.workItemCount, batchCount, inputDeps);
}
[BurstCompile]
public struct UpdateDensitiesJob : IJobParallelFor
{
[ReadOnly] public NativeArray<float4> positions;
[ReadOnly] public NativeArray<float> invMasses;
[ReadOnly] public NativeArray<float> restDensities;
[ReadOnly] public NativeArray<float> diffusion;
[ReadOnly] public NativeArray<FluidInteraction> pairs;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> userData;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> fluidData;
[ReadOnly] public BatchData batchData;
[ReadOnly] public float dt;
public void Execute(int workItemIndex)
{
int start, end;
batchData.GetConstraintRange(workItemIndex, out start, out end);
for (int i = start; i < end; ++i)
{
var pair = pairs[i];
float restVolumeA = 1.0f / invMasses[pair.particleA] / restDensities[pair.particleA];
float restVolumeB = 1.0f / invMasses[pair.particleB] / restDensities[pair.particleB];
float gradA = restVolumeB * pair.avgGradient;
float gradB = restVolumeA * pair.avgGradient;
float vA = restVolumeB / restVolumeA;
float vB = restVolumeA / restVolumeB;
// accumulate pbf data (density, gradients):
fluidData[pair.particleA] += new float4(vA * pair.avgKernel, 0, gradA, gradA * gradA);
fluidData[pair.particleB] += new float4(vB * pair.avgKernel, 0, gradB, gradB * gradB);
// property diffusion:
float diffusionSpeed = (diffusion[pair.particleA] + diffusion[pair.particleB]) * pair.avgKernel * dt;
float4 userDelta = (userData[pair.particleB] - userData[pair.particleA]) * diffusionSpeed;
userData[pair.particleA] += vA * userDelta;
userData[pair.particleB] -= vB * userDelta;
}
}
}
[BurstCompile]
public struct ApplyDensityConstraintsJob : IJobParallelFor
{
[ReadOnly] public NativeArray<float> invMasses;
[ReadOnly] public NativeArray<float> radii;
[ReadOnly] public NativeArray<float> restDensities;
[ReadOnly] public NativeArray<float> surfaceTension;
[ReadOnly] public NativeArray<FluidInteraction> pairs;
[ReadOnly] public Poly6Kernel densityKernel;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> positions;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> fluidData;
[ReadOnly] public BatchData batchData;
[ReadOnly] public float sorFactor;
public void Execute(int workItemIndex)
{
int start, end;
batchData.GetConstraintRange(workItemIndex, out start, out end);
for (int i = start; i < end; ++i)
{
var pair = pairs[i];
float restVolumeA = 1.0f / invMasses[pair.particleA] / restDensities[pair.particleA];
float restVolumeB = 1.0f / invMasses[pair.particleB] / restDensities[pair.particleB];
// calculate tensile instability correction factor:
float wAvg = pair.avgKernel / ((densityKernel.W(0, radii[pair.particleA]) + densityKernel.W(0, radii[pair.particleB])) * 0.5f);
float scorrA = -(0.001f + 0.2f * surfaceTension[pair.particleA]) * wAvg / (invMasses[pair.particleA] * fluidData[pair.particleA][3]);
float scorrB = -(0.001f + 0.2f * surfaceTension[pair.particleB]) * wAvg / (invMasses[pair.particleB] * fluidData[pair.particleB][3]);
// calculate position delta:
float4 delta = pair.gradient * pair.avgGradient * ((fluidData[pair.particleA][1] + scorrA) * restVolumeB + (fluidData[pair.particleB][1] + scorrB) * restVolumeA) * sorFactor;
positions[pair.particleA] += delta * invMasses[pair.particleA];
positions[pair.particleB] -= delta * invMasses[pair.particleB];
}
}
}
[BurstCompile]
public struct NormalsViscosityAndVorticityJob : IJobParallelFor
{
[ReadOnly] public NativeArray<float4> positions;
[ReadOnly] public NativeArray<float> invMasses;
[ReadOnly] public NativeArray<float> radii;
[ReadOnly] public NativeArray<float> restDensities;
[ReadOnly] public NativeArray<float> viscosities;
[ReadOnly] public NativeArray<float4> fluidData;
[ReadOnly] public NativeArray<FluidInteraction> pairs;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> velocities;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> vorticities;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> normals;
[ReadOnly] public BatchData batchData;
public void Execute(int workItemIndex)
{
int start, end;
batchData.GetConstraintRange(workItemIndex, out start, out end);
for (int i = start; i < end; ++i)
{
var pair = pairs[i];
float restVolumeA = 1.0f / invMasses[pair.particleA] / restDensities[pair.particleA];
float restVolumeB = 1.0f / invMasses[pair.particleB] / restDensities[pair.particleB];
// XSPH viscosity:
float viscosityCoeff = math.min(viscosities[pair.particleA], viscosities[pair.particleB]);
float4 relVelocity = velocities[pair.particleB] - velocities[pair.particleA];
float4 viscosity = viscosityCoeff * relVelocity * pair.avgKernel;
velocities[pair.particleA] += viscosity * restVolumeB;
velocities[pair.particleB] -= viscosity * restVolumeA;
// calculate vorticity:
float4 vgrad = pair.gradient * pair.avgGradient;
float4 vorticity = new float4(math.cross(relVelocity.xyz,vgrad.xyz),0);
vorticities[pair.particleA] += vorticity * restVolumeB;
vorticities[pair.particleB] += vorticity * restVolumeA;
// calculate color field normal:
float radius = (radii[pair.particleA] + radii[pair.particleB]) * 0.5f;
normals[pair.particleA] += vgrad * radius / invMasses[pair.particleB] / fluidData[pair.particleB][0];
normals[pair.particleB] -= vgrad * radius / invMasses[pair.particleA] / fluidData[pair.particleA][0];
}
}
}
[BurstCompile]
public struct CalculateVorticityEta : IJobParallelFor
{
[ReadOnly] public NativeArray<float4> vorticities;
[ReadOnly] public NativeArray<float> invMasses;
[ReadOnly] public NativeArray<float> restDensities;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<FluidInteraction> pairs;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> eta;
[ReadOnly] public BatchData batchData;
public void Execute(int workItemIndex)
{
int start, end;
batchData.GetConstraintRange(workItemIndex, out start, out end);
for (int i = start; i < end; ++i)
{
var pair = pairs[i];
float4 vgrad = pair.gradient * pair.avgGradient;
eta[pair.particleA] += math.length(vorticities[pair.particleA]) * vgrad / invMasses[pair.particleB] / restDensities[pair.particleB];
eta[pair.particleB] -= math.length(vorticities[pair.particleB]) * vgrad / invMasses[pair.particleA] / restDensities[pair.particleA];
}
}
}
[BurstCompile]
public struct AccumulateSmoothPositionsJob : IJobParallelFor
{
[ReadOnly] public NativeArray<float4> renderablePositions;
[ReadOnly] public NativeArray<float> radii;
[ReadOnly] public Poly6Kernel densityKernel;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float4> smoothPositions;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<FluidInteraction> pairs;
[ReadOnly] public BatchData batchData;
public void Execute(int workItemIndex)
{
int start, end;
batchData.GetConstraintRange(workItemIndex, out start, out end);
for (int i = start; i < end; ++i)
{
var pair = pairs[i];
float4 gradient = (renderablePositions[pair.particleA] - renderablePositions[pair.particleB]);
float distance = math.length(gradient);
pair.avgKernel = (densityKernel.W(distance, radii[pair.particleA]) +
densityKernel.W(distance, radii[pair.particleB])) * 0.5f;
smoothPositions[pair.particleA] += new float4(renderablePositions[pair.particleB].xyz,1) * pair.avgKernel;
smoothPositions[pair.particleB] += new float4(renderablePositions[pair.particleA].xyz,1) * pair.avgKernel;
pairs[i] = pair;
}
}
}
[BurstCompile]
public struct AccumulateAnisotropyJob : IJobParallelFor
{
[ReadOnly] public NativeArray<float4> renderablePositions;
[ReadOnly] public NativeArray<float4> smoothPositions;
[ReadOnly] public NativeArray<FluidInteraction> pairs;
[NativeDisableContainerSafetyRestriction][NativeDisableParallelForRestriction] public NativeArray<float3x3> anisotropies;
[ReadOnly] public BatchData batchData;
public void Execute(int workItemIndex)
{
int start, end;
batchData.GetConstraintRange(workItemIndex, out start, out end);
for (int i = start; i < end; ++i)
{
var pair = pairs[i];
float4 distanceA = renderablePositions[pair.particleB] - smoothPositions[pair.particleA];
float4 distanceB = renderablePositions[pair.particleA] - smoothPositions[pair.particleB];
anisotropies[pair.particleA] += BurstMath.multrnsp(distanceA,distanceA) * pair.avgKernel;
anisotropies[pair.particleB] += BurstMath.multrnsp(distanceB,distanceB) * pair.avgKernel;
}
}
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8292e6ef6129f47abaee4fb2cb49055e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 5a128a7c745c84794a944362f49011fc
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,36 @@
#if (OBI_BURST && OBI_MATHEMATICS && OBI_COLLECTIONS)
using System;
using Unity.Mathematics;
namespace Obi
{
public struct Poly6Kernel
{
public float norm;
public bool norm2D;
public Poly6Kernel(bool norm2D)
{
this.norm2D = norm2D;
if (norm2D)
norm = 4.0f / math.PI;
else
norm = 315.0f / (64.0f * math.PI);
}
public float W(float r, float h)
{
float h2 = h * h;
float h4 = h2 * h2;
float h8 = h4 * h4;
float rl = math.min(r, h);
float hr = h2 - rl * rl;
if (norm2D)
return norm / h8 * hr * hr * hr;
return norm / (h8 * h) * hr * hr * hr;
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7a3990134524143ac852b488554f6d4e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,35 @@
#if (OBI_BURST && OBI_MATHEMATICS && OBI_COLLECTIONS)
using System;
using Unity.Mathematics;
namespace Obi
{
public struct SpikyKernel
{
public float norm;
public bool norm2D;
public SpikyKernel(bool norm2D)
{
this.norm2D = norm2D;
if (norm2D)
norm = -30.0f / math.PI;
else
norm = -45.0f / math.PI;
}
public float W(float r, float h)
{
float h2 = h * h;
float h4 = h2 * h2;
float rl = math.min(r, h);
float hr = h - rl;
if (norm2D)
return norm / (h4 * h) * hr * hr;
return norm / (h4 * h2) * hr * hr;
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a407989bfa0664e9ab75773d1808f549
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: