Initial project commit

This commit is contained in:
2026-01-08 16:50:20 +00:00
commit f0c5a8b267
29596 changed files with 4861782 additions and 0 deletions

View File

@@ -0,0 +1,189 @@
using System;
using NUnit.Framework;
using UnityEngine.Scripting;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Jobs;
using Unity.Jobs.LowLevel.Unsafe;
using Unity.Burst;
using System.Diagnostics;
namespace Unity.Jobs.Tests.ManagedJobs
{
internal class BurstScheduleTests : JobTestsFixtureBasic
{
[BurstDiscard]
static public void TestBurstCompiled(ref bool falseIfNot)
{
falseIfNot = false;
}
[BurstCompile(CompileSynchronously = true)]
static public bool IsBurstEnabled()
{
bool burstCompiled = true;
TestBurstCompiled(ref burstCompiled);
return burstCompiled;
}
[BurstCompile(CompileSynchronously = true)]
struct SimpleIJobParallelForDefer : IJobParallelForDefer
{
public NativeArray<int> executed;
public void Execute(int index)
{
executed[0] = 1;
}
[BurstCompile(CompileSynchronously = true)]
public static int TestBurstScheduleJob(JobRunType runType, ref RewindableAllocator allocator)
{
bool burstCompiled = true;
TestBurstCompiled(ref burstCompiled);
var dummyList = new NativeList<int>(Allocator.Temp);
dummyList.Add(5);
var job = new SimpleIJobParallelForDefer() { executed = new NativeArray<int>(1, allocator.ToAllocator) };
switch (runType)
{
case JobRunType.Schedule: job.Schedule(dummyList, 1).Complete(); break;
case JobRunType.ScheduleByRef: job.ScheduleByRef(dummyList, 1).Complete(); break;
}
dummyList.Dispose();
int ret = (burstCompiled ? 2 : 0) + job.executed[0];
job.executed.Dispose();
return ret;
}
}
[TestCase(JobRunType.Schedule)]
[TestCase(JobRunType.ScheduleByRef)]
public unsafe void IJobParallelForDefer_Jobs_FromBurst(JobRunType runType)
{
if (!IsBurstEnabled())
return;
int ret = SimpleIJobParallelForDefer.TestBurstScheduleJob(runType, ref RwdAllocator);
Assert.IsTrue((ret & 2) != 0, "Job schedule site not burst compiled");
Assert.IsTrue((ret & 1) != 0, "Job with burst compiled schedule site didn't execute");
}
[BurstCompile(CompileSynchronously = true)]
struct SimpleIJobParallelForBatch : IJobParallelForBatch
{
public NativeArray<int> executed;
public void Execute(int startIndex, int count)
{
executed[0] = 1;
}
[BurstCompile(CompileSynchronously = true)]
public static int TestBurstScheduleJob(JobRunType runType, ref RewindableAllocator allocator)
{
bool burstCompiled = true;
TestBurstCompiled(ref burstCompiled);
var job = new SimpleIJobParallelForBatch() { executed = new NativeArray<int>(1, allocator.ToAllocator) };
switch (runType)
{
case JobRunType.Schedule: job.ScheduleBatch(1, 1).Complete(); break;
case JobRunType.ScheduleByRef: job.ScheduleBatchByRef(1, 1).Complete(); break;
case JobRunType.Run: job.RunBatch(1); break;
case JobRunType.RunByRef: job.RunBatchByRef(1); break;
}
int ret = (burstCompiled ? 2 : 0) + job.executed[0];
job.executed.Dispose();
return ret;
}
}
[TestCase(JobRunType.Schedule)]
[TestCase(JobRunType.ScheduleByRef)]
[TestCase(JobRunType.Run)]
[TestCase(JobRunType.RunByRef)]
public unsafe void IJobParallelForBatch_Jobs_FromBurst(JobRunType runType)
{
if (!IsBurstEnabled())
return;
int ret = SimpleIJobParallelForBatch.TestBurstScheduleJob(runType, ref RwdAllocator);
Assert.IsTrue((ret & 2) != 0, "Job schedule site not burst compiled");
Assert.IsTrue((ret & 1) != 0, "Job with burst compiled schedule site didn't execute");
}
[BurstCompile(CompileSynchronously = true)]
struct SimpleIJobFilter : IJobFilter
{
public NativeArray<int> executed;
public bool Execute(int index)
{
executed[0] = 1;
return false;
}
[BurstCompile(CompileSynchronously = true)]
public static int TestBurstScheduleJob(JobRunType runType, ref RewindableAllocator allocator)
{
bool burstCompiled = true;
TestBurstCompiled(ref burstCompiled);
var dummyList = new NativeList<int>(Allocator.Temp);
dummyList.Add(5);
var job = new SimpleIJobFilter() { executed = new NativeArray<int>(1, allocator.ToAllocator) };
switch (runType)
{
case JobRunType.Schedule:
job.ScheduleFilter(dummyList).Complete();
job.ScheduleAppend(dummyList, 1).Complete();
break;
case JobRunType.ScheduleByRef:
job.ScheduleFilterByRef(dummyList).Complete();
job.ScheduleAppendByRef(dummyList, 1).Complete();
break;
case JobRunType.Run:
job.RunFilter(dummyList);
job.RunAppend(dummyList, 1);
break;
case JobRunType.RunByRef:
job.RunFilterByRef(dummyList);
job.RunAppendByRef(dummyList, 1);
break;
}
dummyList.Dispose();
int ret = (burstCompiled ? 2 : 0) + job.executed[0];
job.executed.Dispose();
return ret;
}
}
[TestCase(JobRunType.Schedule)]
[TestCase(JobRunType.ScheduleByRef)]
[TestCase(JobRunType.Run)]
[TestCase(JobRunType.RunByRef)]
public unsafe void IJobFilter_Jobs_FromBurst(JobRunType runType)
{
if (!IsBurstEnabled())
return;
int ret = SimpleIJobFilter.TestBurstScheduleJob(runType, ref RwdAllocator);
Assert.IsTrue((ret & 2) != 0, "Job schedule site not burst compiled");
Assert.IsTrue((ret & 1) != 0, "Job with burst compiled schedule site didn't execute");
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f93f25e0a74e18d4d850b4f26fb47d7d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,156 @@
using NUnit.Framework;
using System;
using Unity.Jobs;
using Unity.Collections;
using Unity.Jobs.Tests.ManagedJobs;
#pragma warning disable 0219
internal class ParallelFilterJobTests : JobTestsFixtureBasic
{
struct NativeListAddMod7Job : IJob
{
NativeList<int> list;
int produceCount;
public NativeListAddMod7Job(NativeList<int> list, int produceCount)
{
this.list = list;
this.produceCount = produceCount;
}
public void Execute()
{
for (int index = 0; index != produceCount; index++)
{
if (index % 7 == 0)
list.Add(index);
}
}
}
struct FilterMod7Job : IJobFilter
{
public bool Execute(int index)
{
return index % 7 == 0;
}
}
struct FilterAllJob : IJobFilter
{
public bool Execute(int index)
{
return true;
}
}
[Test]
public void AddElementForEach([Values] bool userFilterJob)
{
var list = new NativeList<int>(0, RwdAllocator.ToAllocator);
list.Add(-1);
list.Add(-2);
if (userFilterJob)
{
var job = new FilterMod7Job();
job.ScheduleAppend(list, 1000).Complete();
}
else
{
var job = new NativeListAddMod7Job(list, 1000);
job.Schedule().Complete();
}
int counter = 2;
for (int i = 0; i != 1000; i++)
{
if (i % 7 == 0)
{
Assert.AreEqual(i, list[counter]);
counter++;
}
}
Assert.AreEqual(-1, list[0]);
Assert.AreEqual(-2, list[1]);
Assert.AreEqual(counter, list.Length);
}
[Test]
public void FilterProduceChained()
{
var list = new NativeList<int>(3, RwdAllocator.ToAllocator);
var jobHandle = new FilterMod7Job().ScheduleAppend(list, 14);
jobHandle = new FilterAllJob().ScheduleAppend(list, 2, jobHandle);
jobHandle.Complete();
Assert.AreEqual(0, list[0]);
Assert.AreEqual(7, list[1]);
Assert.AreEqual(0, list[2]);
Assert.AreEqual(1, list[3]);
Assert.AreEqual(4, list.Length);
}
[Test]
public void FilterAppendChained()
{
var list = new NativeList<int>(3, RwdAllocator.ToAllocator);
var jobHandle = new FilterMod7Job().ScheduleAppend(list, 14);
jobHandle = new FilterAllJob().ScheduleAppend(list, 2, jobHandle);
jobHandle.Complete();
Assert.AreEqual(0, list[0]);
Assert.AreEqual(7, list[1]);
Assert.AreEqual(0, list[2]);
Assert.AreEqual(1, list[3]);
Assert.AreEqual(4, list.Length);
}
[Test]
public void FilterPreviousChained()
{
var list = new NativeList<int>(3, RwdAllocator.ToAllocator);
var jobHandle = new FilterAllJob().ScheduleAppend(list, 14);
jobHandle = new FilterMod7Job().ScheduleFilter(list, jobHandle);
jobHandle.Complete();
Assert.AreEqual(2, list.Length);
Assert.AreEqual(0, list[0]);
Assert.AreEqual(7, list[1]);
}
struct MinMaxRestrictionJob : IJobFilter
{
public NativeArray<float> array;
public MinMaxRestrictionJob(NativeArray<float> array) { this.array = array; }
public bool Execute(int index)
{
array[index] = 5;
var localArray = array;
Assert.Throws<IndexOutOfRangeException>(() => { localArray[50] = 5; });
return true;
}
}
[Test]
[Ignore("DOTS-1959 Currently thats legal, but only because filter jobs aren't implemented as parallel for right now...")]
public void AccessingWritable()
{
var list = new NativeList<int>(0, Allocator.Persistent);
var array = new NativeArray<float>(51, Allocator.Persistent);
var jobHandle = new MinMaxRestrictionJob(array).ScheduleAppend(list, 50);
new MinMaxRestrictionJob(array).ScheduleFilter(list, jobHandle).Complete();
Assert.AreEqual(50, list.Length);
list.Dispose();
array.Dispose();
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 551e05d9d28513c4fb893599f9237377
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,85 @@
using System;
using NUnit.Framework;
using Unity.Collections;
using Unity.Jobs;
namespace Unity.Jobs.Tests.ManagedJobs
{
internal class JobStressTests : JobTestsFixture
{
struct JobSetIndexValue : IJobParallelFor
{
public NativeArray<int> value;
public void Execute(int index)
{
value[index] = index;
}
}
[Test]
public void StressTestParallelFor()
{
StressTestParallelForIterations(1, 5000);
}
public void StressTestParallelForIterations(int amount, int amountOfData)
{
for (var k = 0; k != amount; k++)
{
var len = UnityEngine.Random.Range(1, amountOfData);
JobSetIndexValue job1;
job1.value = CollectionHelper.CreateNativeArray<int>(len, RwdAllocator.ToAllocator);
JobSetIndexValue job2;
job2.value = CollectionHelper.CreateNativeArray<int>(len, RwdAllocator.ToAllocator);
var job1Handle = job1.Schedule(len, UnityEngine.Random.Range(1, 1024));
var job2Handle = job2.Schedule(len, UnityEngine.Random.Range(1, 1024));
job2Handle.Complete();
job1Handle.Complete();
for (var i = 0; i < len; i++)
{
Assert.AreEqual(i, job1.value[i]);
Assert.AreEqual(i, job2.value[i]);
}
}
}
struct JobSetValue : IJob
{
public int expected;
public NativeArray<int> value;
public void Execute()
{
value[0] = value[0] + 1;
}
}
[Test]
public void DeepDependencyChain()
{
var array = new NativeArray<int>(1, Allocator.Persistent);
var jobHandle = new JobHandle();
const int depth = 10000;
for (var i = 0; i < depth; i++)
{
var job = new JobSetValue
{
value = array,
expected = i
};
jobHandle = job.Schedule(jobHandle);
}
jobHandle.Complete();
Assert.AreEqual(depth, array[0]);
array.Dispose();
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 36d0e1be06be93249a0ffa0c8356e69e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,655 @@
using System;
using NUnit.Framework;
using UnityEngine.Scripting;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Jobs;
using Unity.Jobs.LowLevel.Unsafe;
using Unity.Burst;
using System.Diagnostics;
using Unity.Collections.Tests;
[assembly: RegisterGenericJobType(typeof(Unity.Jobs.Tests.ManagedJobs.MyGenericJobDefer<int>))]
[assembly: RegisterGenericJobType(typeof(Unity.Jobs.Tests.ManagedJobs.MyGenericJobDefer<double>))]
[assembly: RegisterGenericJobType(typeof(Unity.Jobs.Tests.ManagedJobs.MyGenericJobDefer<float>))]
[assembly: RegisterGenericJobType(typeof(Unity.Jobs.Tests.ManagedJobs.GenericContainerJobDefer<NativeList<int>, int>))]
namespace Unity.Jobs.Tests.ManagedJobs
{
internal enum JobRunType
{
Schedule,
ScheduleByRef,
Run,
RunByRef,
}
[JobProducerType(typeof(IJobTestExtensions.JobTestProducer<>))]
internal interface IJobTest
{
void Execute();
}
internal interface IJobTestInherit : IJob
{
}
internal static class IJobTestExtensions
{
internal struct JobTestWrapper<T> where T : struct
{
internal T JobData;
[NativeDisableContainerSafetyRestriction]
internal NativeArray<byte> ProducerResourceToClean;
}
internal struct JobTestProducer<T> where T : struct, IJobTest
{
internal static readonly SharedStatic<IntPtr> s_JobReflectionData = SharedStatic<IntPtr>.GetOrCreate<JobTestProducer<T>>();
[BurstDiscard]
internal static void Initialize()
{
if (s_JobReflectionData.Data == IntPtr.Zero)
s_JobReflectionData.Data = JobsUtility.CreateJobReflectionData(typeof(JobTestWrapper<T>), typeof(T), (ExecuteJobFunction)Execute);
}
public delegate void ExecuteJobFunction(ref JobTestWrapper<T> jobWrapper, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex);
public unsafe static void Execute(ref JobTestWrapper<T> jobWrapper, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex)
{
jobWrapper.JobData.Execute();
}
}
public static void EarlyJobInit<T>()
where T : struct, IJobTest
{
JobTestProducer<T>.Initialize();
}
static IntPtr GetReflectionData<T>()
where T : struct, IJobTest
{
JobTestProducer<T>.Initialize();
var reflectionData = JobTestProducer<T>.s_JobReflectionData.Data;
CollectionHelper.CheckReflectionDataCorrect<T>(reflectionData);
return reflectionData;
}
public static unsafe JobHandle ScheduleTest<T>(this T jobData, NativeArray<byte> dataForProducer, JobHandle dependsOn = new JobHandle()) where T : struct, IJobTest
{
JobTestWrapper<T> jobTestWrapper = new JobTestWrapper<T>
{
JobData = jobData,
ProducerResourceToClean = dataForProducer
};
var scheduleParams = new JobsUtility.JobScheduleParameters(
UnsafeUtility.AddressOf(ref jobTestWrapper),
GetReflectionData<T>(),
dependsOn,
ScheduleMode.Parallel
);
return JobsUtility.Schedule(ref scheduleParams);
}
}
[JobProducerType(typeof(IJobTestInheritProducerExtensions.JobTestProducer<>))]
internal interface IJobTestInheritWithProducer : IJob
{
void Execute(bool empty);
}
internal static class IJobTestInheritProducerExtensions
{
internal struct JobTestWrapper<T> where T : struct
{
internal T JobData;
internal byte Empty;
}
internal struct JobTestProducer<T> where T : struct, IJobTestInheritWithProducer
{
internal static readonly SharedStatic<IntPtr> jobReflectionData = SharedStatic<IntPtr>.GetOrCreate<JobTestProducer<T>>();
[BurstDiscard]
internal static void Initialize()
{
if (jobReflectionData.Data == IntPtr.Zero)
jobReflectionData.Data = JobsUtility.CreateJobReflectionData(typeof(JobTestWrapper<T>), typeof(T), (ExecuteJobFunction)Execute);
}
public delegate void ExecuteJobFunction(ref JobTestWrapper<T> jobWrapper, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex);
public unsafe static void Execute(ref JobTestWrapper<T> jobWrapper, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex)
{
jobWrapper.JobData.Execute(jobWrapper.Empty != 0);
}
}
public static void EarlyJobInit<T>()
where T : struct, IJobTestInheritWithProducer
{
JobTestProducer<T>.Initialize();
}
static IntPtr GetReflectionData<T>()
where T : struct, IJobTestInheritWithProducer
{
JobTestProducer<T>.Initialize();
var reflectionData = JobTestProducer<T>.jobReflectionData.Data;
CollectionHelper.CheckReflectionDataCorrect<T>(reflectionData);
return reflectionData;
}
unsafe public static JobHandle Schedule<T>(this T jobData, bool empty, JobHandle dependsOn = new JobHandle()) where T : struct, IJobTestInheritWithProducer
{
JobTestWrapper<T> jobTestWrapper = new JobTestWrapper<T>
{
JobData = jobData,
Empty = (byte)(empty ? 1 : 0)
};
var scheduleParams = new JobsUtility.JobScheduleParameters(
UnsafeUtility.AddressOf(ref jobTestWrapper),
GetReflectionData<T>(),
dependsOn,
ScheduleMode.Parallel
);
return JobsUtility.Schedule(ref scheduleParams);
}
}
internal struct MyGenericResizeJob<T> : IJob where T : unmanaged
{
public int m_ListLength;
public NativeList<T> m_GenericList;
public void Execute()
{
m_GenericList.Resize(m_ListLength, NativeArrayOptions.UninitializedMemory);
}
}
internal struct MyGenericJobDefer<T> : IJobParallelForDefer where T: unmanaged
{
public T m_Value;
[NativeDisableParallelForRestriction]
public NativeList<T> m_GenericList;
public void Execute(int index)
{
m_GenericList[index] = m_Value;
}
}
internal struct GenericContainerResizeJob<T, U> : IJob
where T : unmanaged, INativeList<U>
where U : unmanaged
{
public int m_ListLength;
public T m_GenericList;
public void Execute()
{
m_GenericList.Length = m_ListLength;
}
}
internal struct GenericContainerJobDefer<T, U> : IJobParallelForDefer
where T : unmanaged, INativeList<U>
where U : unmanaged
{
public U m_Value;
[NativeDisableParallelForRestriction]
public T m_GenericList;
public void Execute(int index)
{
m_GenericList[index] = m_Value;
}
}
internal class JobTests : JobTestsFixture
{
public void ScheduleGenericContainerJob<T, U>(T container, U value)
where T : unmanaged, INativeList<U>
where U : unmanaged
{
var j0 = new GenericContainerResizeJob<T, U>();
var length = 5;
j0.m_ListLength = length;
j0.m_GenericList = container;
var handle0 = j0.Schedule();
var j1 = new GenericContainerJobDefer<T, U>();
j1.m_Value = value;
j1.m_GenericList = j0.m_GenericList;
INativeList<U> iList = j0.m_GenericList;
j1.Schedule((NativeList<U>)iList, 1, handle0).Complete();
Assert.AreEqual(length, j1.m_GenericList.Length);
for (int i = 0; i != j1.m_GenericList.Length; i++)
Assert.AreEqual(value, j1.m_GenericList[i]);
}
[Test]
public void ValidateContainerSafetyInGenericJob_ContainerIsGenericParameter()
{
var list = new NativeList<int>(1, RwdAllocator.ToAllocator);
ScheduleGenericContainerJob(list, 5);
}
public void GenericScheduleJobPair<T>(T value) where T : unmanaged
{
var j0 = new MyGenericResizeJob<T>();
var length = 5;
j0.m_ListLength = length;
j0.m_GenericList = new NativeList<T>(1, RwdAllocator.ToAllocator);
var handle0 = j0.Schedule();
var j1 = new MyGenericJobDefer<T>();
j1.m_Value = value;
j1.m_GenericList = j0.m_GenericList;
j1.Schedule(j0.m_GenericList, 1, handle0).Complete();
Assert.AreEqual(length, j1.m_GenericList.Length);
for (int i = 0; i != j1.m_GenericList.Length; i++)
Assert.AreEqual(value, j1.m_GenericList[i]);
}
[Test]
public void ScheduleGenericJobPairFloat()
{
GenericScheduleJobPair(10f);
}
[Test]
public void ScheduleGenericJobPairDouble()
{
GenericScheduleJobPair<double>(10.0);
}
[Test]
public void ScheduleGenericJobPairInt()
{
GenericScheduleJobPair(20);
}
#if ENABLE_UNITY_COLLECTIONS_CHECKS
[Test]
public void SchedulingGenericJobUnsafelyThrows()
{
var j0 = new MyGenericResizeJob<int>();
var length = 5;
j0.m_ListLength = length;
j0.m_GenericList = new NativeList<int>(1, RwdAllocator.ToAllocator);
var handle0 = j0.Schedule();
var j1 = new MyGenericJobDefer<int>();
j1.m_Value = 6;
j1.m_GenericList = j0.m_GenericList;
Assert.Throws<InvalidOperationException>(()=>j1.Schedule(j0.m_GenericList, 1).Complete());
handle0.Complete();
}
#endif
struct DontReferenceThisTypeOutsideOfThisTest { public int v; }
[Test]
[TestRequiresCollectionChecks]
public void SchedulingGenericJobFromGenericContextUnsafelyThrows()
{
var list = new NativeList<DontReferenceThisTypeOutsideOfThisTest>(1, RwdAllocator.ToAllocator);
ScheduleGenericJobUnsafely(list, new DontReferenceThisTypeOutsideOfThisTest { v = 5 });
}
void ScheduleGenericJobUnsafely<T, U>(T container, U value)
where T : unmanaged, INativeList<U>
where U : unmanaged
{
var j0 = new GenericContainerResizeJob<T, U>();
var length = 5;
j0.m_ListLength = length;
j0.m_GenericList = container;
var handle0 = j0.Schedule();
var j1 = new GenericContainerJobDefer<T, U>();
j1.m_Value = value;
j1.m_GenericList = j0.m_GenericList;
INativeList<U> iList = j0.m_GenericList;
Assert.Throws<InvalidOperationException>(()=>j1.Schedule((NativeList<U>)iList, 1).Complete());
handle0.Complete(); // complete this so we can dispose the nativelist
}
/*
* these two tests used to test that a job that inherited from both IJob and IJobParallelFor would work as expected
* but that's probably crazy.
*/
/*[Test]
public void Scheduling()
{
var job = data.Schedule();
job.Complete();
ExpectOutputSumOfInput0And1();
}*/
/*[Test]
public void Scheduling_With_Dependencies()
{
data.input0 = input0;
data.input1 = input1;
data.output = output2;
var job1 = data.Schedule();
// Schedule job2 with dependency against the first job
data.input0 = output2;
data.input1 = input2;
data.output = output;
var job2 = data.Schedule(job1);
// Wait for completion
job2.Complete();
ExpectOutputSumOfInput0And1And2();
}*/
[Test]
public void ForEach_Scheduling_With_Dependencies()
{
data.input0 = input0;
data.input1 = input1;
data.output = output2;
var job1 = data.Schedule(output.Length, 1);
// Schedule job2 with dependency against the first job
data.input0 = output2;
data.input1 = input2;
data.output = output;
var job2 = data.Schedule(output.Length, 1, job1);
// Wait for completion
job2.Complete();
ExpectOutputSumOfInput0And1And2();
}
struct EmptyComputeParallelForJob : IJobParallelFor
{
public void Execute(int i)
{
}
}
[Test]
public void ForEach_Scheduling_With_Zero_Size()
{
var test = new EmptyComputeParallelForJob();
var job = test.Schedule(0, 1);
job.Complete();
}
[Test]
public void Deallocate_Temp_NativeArray_From_Job()
{
TestDeallocateNativeArrayFromJob(RwdAllocator.ToAllocator);
}
[Test]
public void Deallocate_Persistent_NativeArray_From_Job()
{
TestDeallocateNativeArrayFromJob(Allocator.Persistent);
}
private void TestDeallocateNativeArrayFromJob(Allocator label)
{
var tempNativeArray = CollectionHelper.CreateNativeArray<int>(expectedInput0, label);
var copyAndDestroyJob = new CopyAndDestroyNativeArrayParallelForJob
{
input = tempNativeArray,
output = output
};
// NativeArray can safely be accessed before scheduling
Assert.AreEqual(10, tempNativeArray.Length);
tempNativeArray[0] = tempNativeArray[0];
var job = copyAndDestroyJob.Schedule(copyAndDestroyJob.input.Length, 1);
job.Complete();
// Need to dispose because the allocator may be Allocator.Persistent.
tempNativeArray.Dispose();
Assert.AreEqual(expectedInput0, copyAndDestroyJob.output.ToArray());
}
#if ENABLE_UNITY_COLLECTIONS_CHECKS
public struct NestedDeallocateStruct
{
public NativeArray<int> input;
}
public struct TestNestedDeallocate : IJob
{
public NestedDeallocateStruct nested;
public NativeArray<int> output;
public void Execute()
{
for (int i = 0; i < nested.input.Length; ++i)
output[i] = nested.input[i];
}
}
[Test]
public void TestNestedDeallocateOnJobCompletion()
{
var tempNativeArray = CollectionHelper.CreateNativeArray<int>(10, RwdAllocator.ToAllocator);
var outNativeArray = CollectionHelper.CreateNativeArray<int>(10, RwdAllocator.ToAllocator);
for (int i = 0; i < 10; i++)
tempNativeArray[i] = i;
var job = new TestNestedDeallocate
{
nested = new NestedDeallocateStruct() { input = tempNativeArray },
output = outNativeArray
};
var handle = job.Schedule();
handle.Complete();
RwdAllocator.Rewind();
#if ENABLE_UNITY_COLLECTIONS_CHECKS
// Ensure released safety handle indicating invalid buffer
Assert.Throws<ObjectDisposedException>(() => { AtomicSafetyHandle.CheckExistsAndThrow(NativeArrayUnsafeUtility.GetAtomicSafetyHandle(tempNativeArray)); });
Assert.Throws<ObjectDisposedException>(() => { AtomicSafetyHandle.CheckExistsAndThrow(NativeArrayUnsafeUtility.GetAtomicSafetyHandle(job.nested.input)); });
#endif
}
public struct TestJobProducerJob : IJobTest
{
public NativeArray<int> jobStructData;
public void Execute()
{
}
}
[Test]
public void TestJobProducerCleansUp()
{
var tempNativeArray = CollectionHelper.CreateNativeArray<int>(10, RwdAllocator.ToAllocator);
var tempNativeArray2 = CollectionHelper.CreateNativeArray<byte>(16, RwdAllocator.ToAllocator);
var job = new TestJobProducerJob
{
jobStructData = tempNativeArray,
};
var handle = job.ScheduleTest(tempNativeArray2);
handle.Complete();
RwdAllocator.Rewind();
#if ENABLE_UNITY_COLLECTIONS_CHECKS
// Check job data
Assert.Throws<ObjectDisposedException>(() => { AtomicSafetyHandle.CheckExistsAndThrow(NativeArrayUnsafeUtility.GetAtomicSafetyHandle(tempNativeArray)); });
Assert.Throws<ObjectDisposedException>(() => { AtomicSafetyHandle.CheckExistsAndThrow(NativeArrayUnsafeUtility.GetAtomicSafetyHandle(job.jobStructData)); });
// Check job producer
Assert.Throws<ObjectDisposedException>(() => { AtomicSafetyHandle.CheckExistsAndThrow(NativeArrayUnsafeUtility.GetAtomicSafetyHandle(tempNativeArray2)); });
#endif
}
public struct CopyJob : IJob
{
public NativeList<int> List1;
public NativeList<int> List2;
public void Execute()
{
List1 = List2;
}
}
[Test]
public unsafe void TestContainerCopy_EnsureSafetyHandlesCopyAndDisposeProperly()
{
var list1 = new NativeList<int>(10, RwdAllocator.ToAllocator);
var list2 = new NativeList<int>(10, RwdAllocator.ToAllocator);
list1.Add(1);
list2.Add(2);
var job = new CopyJob
{
List1 = list1,
List2 = list2
};
job.Schedule().Complete();
list1.Dispose();
list2.Dispose();
}
#endif
struct LargeJobParallelForDefer : IJobParallelForDefer
{
public FixedString4096Bytes StrA;
public FixedString4096Bytes StrB;
public FixedString4096Bytes StrC;
public FixedString4096Bytes StrD;
[NativeDisableParallelForRestriction]
public NativeArray<int> TotalLengths;
[ReadOnly]
public NativeList<float> Unused; // Schedule() from NativeList.Length requires that the list be passed into the job
public void Execute(int index)
{
TotalLengths[0] = StrA.Length + StrB.Length + StrC.Length + StrD.Length;
}
}
public enum IterationCountMode
{
List, Pointer
}
[Test]
public unsafe void IJobParallelForDefer_LargeJobStruct_ScheduleRefWorks(
[Values(IterationCountMode.List, IterationCountMode.Pointer)] IterationCountMode countMode)
{
using(var lengths = CollectionHelper.CreateNativeArray<int>(1, RwdAllocator.ToAllocator))
{
var dummyList = new NativeList<float>(RwdAllocator.ToAllocator);
dummyList.Add(5.0f);
var job = new LargeJobParallelForDefer
{
StrA = "A",
StrB = "BB",
StrC = "CCC",
StrD = "DDDD",
TotalLengths = lengths,
Unused = dummyList,
};
if (countMode == IterationCountMode.List)
{
Assert.DoesNotThrow(() => job.ScheduleByRef(dummyList, 1).Complete());
}
else if (countMode == IterationCountMode.Pointer)
{
var lengthArray = CollectionHelper.CreateNativeArray<int>(1, RwdAllocator.ToAllocator);
lengthArray[0] = 1;
Assert.DoesNotThrow(() => job.ScheduleByRef((int*)lengthArray.GetUnsafePtr(), 1).Complete());
}
}
}
[BurstCompile(CompileSynchronously = true)]
public struct InheritJob : IJobTestInherit
{
public NativeList<int> List1;
public NativeList<int> List2;
public void Execute()
{
List1[0] = List2[0];
}
}
[Test]
public void InheritInterfaceJobWorks()
{
var l1 = new NativeList<int>(4, RwdAllocator.ToAllocator);
l1.Add(3);
var l2 = new NativeList<int>(4, RwdAllocator.ToAllocator);
l2.Add(17);
var job = new InheritJob { List1 = l1, List2 = l2 };
job.Schedule().Complete();
Assert.IsTrue(l1[0] == 17);
l2.Dispose();
l1.Dispose();
}
[BurstCompile(CompileSynchronously = true)]
public struct InheritWithProducerJob : IJobTestInheritWithProducer
{
public NativeList<int> List1;
public NativeList<int> List2;
public void Execute()
{
List2[0] = List1[0];
}
public void Execute(bool empty)
{
List1[0] = List2[0];
}
}
[Test]
public void InheritInterfaceWithProducerJobWorks()
{
var l1 = new NativeList<int>(4, RwdAllocator.ToAllocator);
l1.Add(3);
var l2 = new NativeList<int>(4, RwdAllocator.ToAllocator);
l2.Add(17);
var job = new InheritWithProducerJob { List1 = l1, List2 = l2 };
job.Schedule(false).Complete();
Assert.IsTrue(l1[0] == 17);
l1[0] = 3;
job.Schedule().Complete();
Assert.IsTrue(l2[0] == 3);
l2.Dispose();
l1.Dispose();
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8693443c8d9368d4b8b9f2beea09d699
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,167 @@
using NUnit.Framework;
using Unity.Collections;
using Unity.Jobs;
namespace Unity.Jobs.Tests.ManagedJobs
{
internal class JobTestsFixtureBasic
{
AllocatorHelper<RewindableAllocator> m_AllocatorHelper;
protected ref RewindableAllocator RwdAllocator => ref m_AllocatorHelper.Allocator;
[OneTimeSetUp]
public virtual void OneTimeSetUp()
{
m_AllocatorHelper = new AllocatorHelper<RewindableAllocator>(Allocator.Persistent);
m_AllocatorHelper.Allocator.Initialize(128 * 1024, true);
}
[OneTimeTearDown]
public virtual void OneTimeTearDown()
{
m_AllocatorHelper.Allocator.Dispose();
m_AllocatorHelper.Dispose();
}
[TearDown]
public void Teardown()
{
RwdAllocator.Rewind();
// This is test only behavior for determinism. Rewind twice such that all
// tests start with an allocator containing only one memory block.
RwdAllocator.Rewind();
}
}
internal class JobTestsFixture : JobTestsFixtureBasic
{
/*
* this used to test both, and maybe it should again, but we have to make GetExecuteMethod() work with
* multiple interfaces, hopefully in a non-global way
*/
public struct SumDataParallelForJob : /*IJob,*/ IJobParallelFor
{
[ReadOnly] public NativeArray<int> input0;
[ReadOnly] public NativeArray<int> input1;
public NativeArray<int> output;
/* public void Execute()
{
for (var i = 0; i < output.Length; ++i)
output[i] = input0[i] + input1[i];
}*/
public void Execute(int i)
{
output[i] = input0[i] + input1[i];
}
}
public struct CopyAndDestroyNativeArrayParallelForJob : IJobParallelFor
{
[ReadOnlyAttribute]
public NativeArray<int> input;
public NativeArray<int> output;
public void Execute(int i)
{
output[i] = input[i];
}
}
public SumDataParallelForJob data;
public int[] expectedInput0;
public NativeArray<int> input0;
public NativeArray<int> input1;
public NativeArray<int> input2;
public NativeArray<int> output;
public NativeArray<int> output2;
[SetUp]
public void Init()
{
expectedInput0 = new int[10];
input0 = new NativeArray<int>(10, Allocator.Persistent);
input1 = new NativeArray<int>(10, Allocator.Persistent);
input2 = new NativeArray<int>(10, Allocator.Persistent);
output = new NativeArray<int>(10, Allocator.Persistent);
output2 = new NativeArray<int>(10, Allocator.Persistent);
for (var i = 0; i < output.Length; i++)
{
expectedInput0[i] = i;
input0[i] = i;
input1[i] = 10 * i;
input2[i] = 100 * i;
output[i] = 0;
output2[i] = 0;
}
data.input0 = input0;
data.input1 = input1;
data.output = output;
}
public void ExpectOutputSumOfInput0And1()
{
for (var i = 0; i != output.Length; i++)
Assert.AreEqual(input0[i] + input1[i], output[i]);
}
public void ExpectOutputSumOfInput0And1And2()
{
for (var i = 0; i != output.Length; i++)
Assert.AreEqual(input0[i] + input1[i] + input2[i], output[i]);
}
[TearDown]
public void Cleanup()
{
try
{
input0.Dispose();
}
catch
{
}
try
{
input1.Dispose();
}
catch
{
}
try
{
input2.Dispose();
}
catch
{
}
try
{
output.Dispose();
}
catch
{
}
try
{
output2.Dispose();
}
catch
{
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 69135939d4cd2cf4fbecfe92b80f882a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,111 @@
using NUnit.Framework;
using Unity.Collections;
using Unity.Jobs;
namespace Unity.Jobs.Tests.ManagedJobs
{
internal class JobTests_CombineDependencies : JobTestsFixtureBasic
{
struct ArrayJob1 : IJob
{
public NativeArray<int> data;
public void Execute()
{
data[0] = data[0] + 1;
}
}
struct ArrayJob2 : IJob
{
public NativeArray<int> a;
public NativeArray<int> b;
public void Execute()
{
a[0] = a[0] + 100;
b[0] = b[0] + 100;
}
}
[Test]
public void CombineDependenciesWorks()
{
var arrayA = new NativeArray<int>(2, Allocator.Persistent);
var arrayB = new NativeArray<int>(2, Allocator.Persistent);
var jobA = new ArrayJob1 {data = arrayA};
var jobAHandle = jobA.Schedule();
var jobB = new ArrayJob1 {data = arrayB};
var jobBHandle = jobB.Schedule();
var combinedHandle = JobHandle.CombineDependencies(jobAHandle, jobBHandle);
var job2 = new ArrayJob2
{
a = arrayA,
b = arrayB
};
job2.Schedule(combinedHandle).Complete();
for (int i = 0; i < arrayA.Length; ++i)
{
Assert.AreEqual(arrayA[0], arrayB[0]);
}
arrayA.Dispose();
arrayB.Dispose();
}
public void DeepCombineDependencies(int depth, int arraySize)
{
var arrays = new NativeArray<int>[arraySize];
for (var i = 0; i < arrays.Length; i++)
{
arrays[i] = new NativeArray<int>(1, Allocator.Persistent);
arrays[i][0] = 0;
}
var handles = new NativeArray<JobHandle>(arrays.Length, Allocator.Persistent);
var previousJobHandle = new JobHandle();
for (var i = 0; i < depth; i++)
{
for (var a = 0; a != arrays.Length; a++)
{
var job = new ArrayJob1 {data = arrays[a]};
handles[a] = job.Schedule(previousJobHandle);
}
var combinedHandle = JobHandle.CombineDependencies(handles);
var job2 = new ArrayJob2
{
a = arrays[0],
b = arrays[1]
};
previousJobHandle = job2.Schedule(combinedHandle);
}
previousJobHandle.Complete();
Assert.AreEqual(100 * depth + depth, arrays[0][0]);
Assert.AreEqual(100 * depth + depth, arrays[1][0]);
for (var i = 2; i < arrays.Length; i++)
Assert.AreEqual(depth, arrays[i][0]);
for (var a = 0; a != arrays.Length; a++)
arrays[a].Dispose();
handles.Dispose();
}
[Test]
public void DeepCombineDependenciesWorks()
{
DeepCombineDependencies(5, 21);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ecbee97f2678c1747802e50945a31f1d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,200 @@
using System;
using NUnit.Framework;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Jobs;
using Unity.Jobs.LowLevel.Unsafe;
using Unity.Jobs.Tests.ManagedJobs;
internal class NativeListDeferredArrayTests : JobTestsFixtureBasic
{
private bool JobsDebuggerWasEnabled;
struct AliasJob : IJob
{
public NativeArray<int> array;
public NativeList<int> list;
public void Execute()
{
}
}
struct SetListLengthJob : IJob
{
public int ResizeLength;
public NativeList<int> list;
public void Execute()
{
list.Resize(ResizeLength, NativeArrayOptions.UninitializedMemory);
}
}
struct SetArrayValuesJobParallel : IJobParallelForDefer
{
public NativeArray<int> array;
public void Execute(int index)
{
array[index] = array.Length;
}
}
struct GetArrayValuesJobParallel : IJobParallelForDefer
{
[ReadOnly]
public NativeArray<int> array;
public void Execute(int index)
{
}
}
struct ParallelForWithoutList : IJobParallelForDefer
{
public void Execute(int index)
{
}
}
[SetUp]
public void NativeListDeferredArrayTestsSetup()
{
// Many ECS tests will only pass if the Jobs Debugger enabled;
// force it enabled for all tests, and restore the original value at teardown.
JobsDebuggerWasEnabled = JobsUtility.JobDebuggerEnabled;
#if ENABLE_UNITY_COLLECTIONS_CHECKS
JobsUtility.JobDebuggerEnabled = true;
#endif
}
[Test]
public void ResizedListToDeferredJobArray([Values(0, 1, 2, 3, 4, 5, 6, 42, 97, 1023)] int length)
{
var list = new NativeList<int>(RwdAllocator.ToAllocator);
var setLengthJob = new SetListLengthJob { list = list, ResizeLength = length };
var jobHandle = setLengthJob.Schedule();
var setValuesJob = new SetArrayValuesJobParallel { array = list.AsDeferredJobArray() };
setValuesJob.Schedule(list, 3, jobHandle).Complete();
Assert.AreEqual(length, list.Length);
for (int i = 0; i != list.Length; i++)
Assert.AreEqual(length, list[i]);
}
[Test]
public unsafe void DeferredParallelForFromIntPtr()
{
int length = 10;
var lengthValue = CollectionHelper.CreateNativeArray<int>(1, RwdAllocator.ToAllocator);
lengthValue[0] = length;
var array = CollectionHelper.CreateNativeArray<int>(length, RwdAllocator.ToAllocator);
var setValuesJob = new SetArrayValuesJobParallel { array = array };
setValuesJob.Schedule((int*)lengthValue.GetUnsafePtr(), 3).Complete();
for (int i = 0; i != array.Length; i++)
Assert.AreEqual(length, array[i]);
}
[Test]
public void ResizeListBeforeSchedule([Values(5)] int length)
{
var list = new NativeList<int>(RwdAllocator.ToAllocator);
var setLengthJob = new SetListLengthJob { list = list, ResizeLength = length }.Schedule();
var setValuesJob = new SetArrayValuesJobParallel { array = list.AsDeferredJobArray() };
setLengthJob.Complete();
setValuesJob.Schedule(list, 3).Complete();
Assert.AreEqual(length, list.Length);
for (int i = 0; i != list.Length; i++)
Assert.AreEqual(length, list[i]);
}
#if ENABLE_UNITY_COLLECTIONS_CHECKS
[Test]
public void ResizedListToDeferredJobArray()
{
var list = new NativeList<int>(RwdAllocator.ToAllocator);
list.Add(1);
var array = list.AsDeferredJobArray();
#pragma warning disable 0219 // assigned but its value is never used
Assert.Throws<IndexOutOfRangeException>(() => { var value = array[0]; });
#pragma warning restore 0219
Assert.AreEqual(0, array.Length);
}
[Test]
public void ResizeListWhileJobIsRunning()
{
var list = new NativeList<int>(RwdAllocator.ToAllocator);
list.Resize(42, NativeArrayOptions.UninitializedMemory);
var setValuesJob = new GetArrayValuesJobParallel { array = list.AsDeferredJobArray() };
var jobHandle = setValuesJob.Schedule(list, 3);
Assert.Throws<InvalidOperationException>(() => list.Resize(1, NativeArrayOptions.UninitializedMemory));
jobHandle.Complete();
}
[Test]
public void AliasArrayThrows()
{
var list = new NativeList<int>(RwdAllocator.ToAllocator);
var aliasJob = new AliasJob { list = list, array = list.AsDeferredJobArray() };
Assert.Throws<InvalidOperationException>(() => aliasJob.Schedule());
}
[Test]
public void DeferredListMustExistInJobData()
{
var list = new NativeList<int>(RwdAllocator.ToAllocator);
var job = new ParallelForWithoutList();
Assert.Throws<InvalidOperationException>(() => job.Schedule(list, 64));
}
[Test]
public void DeferredListCantBeDeletedWhileJobIsRunning()
{
var list = new NativeList<int>(RwdAllocator.ToAllocator);
list.Resize(42, NativeArrayOptions.UninitializedMemory);
var setValuesJob = new GetArrayValuesJobParallel { array = list.AsDeferredJobArray() };
var jobHandle = setValuesJob.Schedule(list, 3);
Assert.Throws<InvalidOperationException>(() => list.Dispose());
jobHandle.Complete();
}
[Test]
public void DeferredArrayCantBeAccessedOnMainthread()
{
var list = new NativeList<int>(RwdAllocator.ToAllocator);
list.Add(1);
var defer = list.AsDeferredJobArray();
Assert.AreEqual(0, defer.Length);
Assert.Throws<IndexOutOfRangeException>(() => defer[0] = 5);
}
#endif
[TearDown]
public void TearDown()
{
#if ENABLE_UNITY_COLLECTIONS_CHECKS
JobsUtility.JobDebuggerEnabled = JobsDebuggerWasEnabled;
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5b263730bb62fa74190582398189b6f9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: