AcesDisplayMapperFeatureProcessor.cpp 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467
  1. /*
  2. * Copyright (c) Contributors to the Open 3D Engine Project.
  3. * For complete copyright and license terms please see the LICENSE at the root of this distribution.
  4. *
  5. * SPDX-License-Identifier: Apache-2.0 OR MIT
  6. *
  7. */
  8. #include <Atom/Feature/ACES/AcesDisplayMapperFeatureProcessor.h>
  9. #include <ACES/Aces.h>
  10. #include <LookupTable/LookupTableAsset.h>
  11. #include <Atom/RHI/Factory.h>
  12. #include <Atom/RHI/RHISystemInterface.h>
  13. #include <Atom/RPI.Public/Image/ImageSystemInterface.h>
  14. #include <Atom/RPI.Public/Image/StreamingImagePool.h>
  15. #include <Atom/RPI.Reflect/Asset/AssetUtils.h>
  16. #include <AzCore/Debug/Trace.h>
  17. namespace
  18. {
  19. static const AZ::RHI::Format LutFormat = AZ::RHI::Format::R16G16B16A16_FLOAT;
  20. uint16_t ConvertFloatToHalf(const float Value)
  21. {
  22. uint32_t result;
  23. uint32_t uiValue = ((uint32_t*)(&Value))[0];
  24. uint32_t sign = (uiValue & 0x80000000U) >> 16U; // Sign shifted two bytes right for combining with return
  25. uiValue = uiValue & 0x7FFFFFFFU; // Hack off the sign
  26. if (uiValue > 0x47FFEFFFU)
  27. {
  28. // The number is too large to be represented as a half. Saturate to infinity.
  29. result = 0x7FFFU;
  30. }
  31. else
  32. {
  33. if (uiValue < 0x38800000U)
  34. {
  35. // The number is too small to be represented as a normalized half.
  36. // Convert it to a denormalized value.
  37. uint32_t shift = 113U - (uiValue >> 23U);
  38. uiValue = (0x800000U | (uiValue & 0x7FFFFFU)) >> shift;
  39. }
  40. else
  41. {
  42. // Rebias the exponent to represent the value as a normalized half.
  43. uiValue += 0xC8000000U;
  44. }
  45. result = ((uiValue + 0x0FFFU + ((uiValue >> 13U) & 1U)) >> 13U) & 0x7FFFU;
  46. }
  47. // Add back sign and return
  48. return static_cast<uint16_t>(result | sign);
  49. }
  50. }
  51. namespace AZ::Render
  52. {
  53. void AcesDisplayMapperFeatureProcessor::Reflect(ReflectContext* context)
  54. {
  55. if (auto* serializeContext = azrtti_cast<SerializeContext*>(context))
  56. {
  57. serializeContext
  58. ->Class<AcesDisplayMapperFeatureProcessor, FeatureProcessor>()
  59. ->Version(0);
  60. }
  61. }
  62. void AcesDisplayMapperFeatureProcessor::Activate()
  63. {
  64. }
  65. void AcesDisplayMapperFeatureProcessor::Deactivate()
  66. {
  67. m_ownedLuts.clear();
  68. }
  69. void AcesDisplayMapperFeatureProcessor::Simulate(const FeatureProcessor::SimulatePacket& packet)
  70. {
  71. AZ_PROFILE_FUNCTION(AzRender);
  72. AZ_UNUSED(packet);
  73. }
  74. void AcesDisplayMapperFeatureProcessor::Render([[maybe_unused]] const FeatureProcessor::RenderPacket& packet)
  75. {
  76. }
  77. void AcesDisplayMapperFeatureProcessor::ApplyLdrOdtParameters(DisplayMapperParameters* displayMapperParameters)
  78. {
  79. AZ_Assert(displayMapperParameters != nullptr, "The pOutParameters must not to be null pointer.");
  80. if (displayMapperParameters == nullptr)
  81. {
  82. return;
  83. }
  84. // These values in the ODT parameter are taken from the reference ACES transform.
  85. //
  86. // The original ACES references.
  87. // Common:
  88. // https://github.com/ampas/aces-dev/blob/master/transforms/ctl/lib/ACESlib.ODT_Common.ctl
  89. // For sRGB:
  90. // https://github.com/ampas/aces-dev/tree/master/transforms/ctl/odt/sRGB
  91. displayMapperParameters->m_cinemaLimits[0] = 0.02f;
  92. displayMapperParameters->m_cinemaLimits[1] = 48.0f;
  93. displayMapperParameters->m_acesSplineParams = GetAcesODTParameters(OutputDeviceTransformType_48Nits);
  94. displayMapperParameters->m_OutputDisplayTransformFlags = AlterSurround | ApplyDesaturation | ApplyCATD60toD65;
  95. displayMapperParameters->m_OutputDisplayTransformMode = Srgb;
  96. ColorConvertionMatrixType colorMatrixType = XYZ_To_Rec709;
  97. switch (displayMapperParameters->m_OutputDisplayTransformMode)
  98. {
  99. case Srgb:
  100. colorMatrixType = XYZ_To_Rec709;
  101. break;
  102. case PerceptualQuantizer:
  103. case Ldr:
  104. colorMatrixType = XYZ_To_Bt2020;
  105. break;
  106. default:
  107. break;
  108. }
  109. displayMapperParameters->m_XYZtoDisplayPrimaries = GetColorConvertionMatrix(colorMatrixType);
  110. displayMapperParameters->m_surroundGamma = 0.9811f;
  111. displayMapperParameters->m_gamma = 2.2f;
  112. }
  113. void AcesDisplayMapperFeatureProcessor::ApplyHdrOdtParameters(DisplayMapperParameters* displayMapperParameters, const OutputDeviceTransformType& odtType)
  114. {
  115. AZ_Assert(displayMapperParameters != nullptr, "The pOutParameters must not to be null pointer.");
  116. if (displayMapperParameters == nullptr)
  117. {
  118. return;
  119. }
  120. // Dynamic range limit values taken from NVIDIA HDR sample.
  121. // These values represent and low and high end of the dynamic range in terms of stops from middle grey (0.18)
  122. float lowerDynamicRangeInStops = -12.f;
  123. float higherDynamicRangeInStops = 10.f;
  124. const float MIDDLE_GREY = 0.18f;
  125. switch (odtType)
  126. {
  127. case OutputDeviceTransformType_1000Nits:
  128. higherDynamicRangeInStops = 10.f;
  129. break;
  130. case OutputDeviceTransformType_2000Nits:
  131. higherDynamicRangeInStops = 11.f;
  132. break;
  133. case OutputDeviceTransformType_4000Nits:
  134. higherDynamicRangeInStops = 12.f;
  135. break;
  136. default:
  137. AZ_Assert(false, "Invalid output device transform type.");
  138. break;
  139. }
  140. displayMapperParameters->m_cinemaLimits[0] = MIDDLE_GREY * exp2(lowerDynamicRangeInStops);
  141. displayMapperParameters->m_cinemaLimits[1] = MIDDLE_GREY * exp2(higherDynamicRangeInStops);
  142. displayMapperParameters->m_acesSplineParams = GetAcesODTParameters(odtType);
  143. displayMapperParameters->m_OutputDisplayTransformFlags = AlterSurround | ApplyDesaturation | ApplyCATD60toD65;
  144. displayMapperParameters->m_OutputDisplayTransformMode = PerceptualQuantizer;
  145. ColorConvertionMatrixType colorMatrixType = XYZ_To_Bt2020;
  146. displayMapperParameters->m_XYZtoDisplayPrimaries = GetColorConvertionMatrix(colorMatrixType);
  147. // Surround gamma value is from the dim surround gamma from the ACES reference transforms.
  148. // https://github.com/ampas/aces-dev/blob/master/transforms/ctl/lib/ACESlib.ODT_Common.ctl
  149. displayMapperParameters->m_surroundGamma = 0.9811f;
  150. displayMapperParameters->m_gamma = 1.0f; // gamma not used with perceptual quantizer, but just set to 1.0 anyways
  151. }
  152. OutputDeviceTransformType AcesDisplayMapperFeatureProcessor::GetOutputDeviceTransformType(RHI::Format bufferFormat)
  153. {
  154. OutputDeviceTransformType outputDeviceTransformType = OutputDeviceTransformType_48Nits;
  155. if (bufferFormat == RHI::Format::R8G8B8A8_UNORM ||
  156. bufferFormat == RHI::Format::B8G8R8A8_UNORM)
  157. {
  158. outputDeviceTransformType = OutputDeviceTransformType_48Nits;
  159. }
  160. else if (bufferFormat == RHI::Format::R10G10B10A2_UNORM)
  161. {
  162. outputDeviceTransformType = OutputDeviceTransformType_1000Nits;
  163. }
  164. else
  165. {
  166. AZ_Assert(false, "Not yet supported.");
  167. // To work normally on unsupported environment, initialize the display parameters by OutputDeviceTransformType_48Nits.
  168. outputDeviceTransformType = OutputDeviceTransformType_48Nits;
  169. }
  170. return outputDeviceTransformType;
  171. }
  172. void AcesDisplayMapperFeatureProcessor::GetAcesDisplayMapperParameters(DisplayMapperParameters* displayMapperParameters, OutputDeviceTransformType odtType)
  173. {
  174. switch (odtType)
  175. {
  176. case OutputDeviceTransformType_48Nits:
  177. ApplyLdrOdtParameters(displayMapperParameters);
  178. break;
  179. case OutputDeviceTransformType_1000Nits:
  180. case OutputDeviceTransformType_2000Nits:
  181. case OutputDeviceTransformType_4000Nits:
  182. ApplyHdrOdtParameters(displayMapperParameters, odtType);
  183. break;
  184. default:
  185. AZ_Assert(false, "This ODT type[%d] is not supported.", odtType);
  186. break;
  187. }
  188. }
  189. void AcesDisplayMapperFeatureProcessor::GetOwnedLut(DisplayMapperLut& displayMapperLut, const AZ::Name& lutName)
  190. {
  191. auto it = m_ownedLuts.find(lutName);
  192. if (it == m_ownedLuts.end())
  193. {
  194. InitializeLutImage(lutName);
  195. it = m_ownedLuts.find(lutName);
  196. AZ_Assert(it != m_ownedLuts.end(), "AcesDisplayMapperFeatureProcessor unable to create LUT %s", lutName.GetCStr());
  197. }
  198. displayMapperLut = it->second;
  199. }
  200. void AcesDisplayMapperFeatureProcessor::GetDisplayMapperLut(DisplayMapperLut& displayMapperLut)
  201. {
  202. const AZ::Name acesLutName("AcesLutImage");
  203. auto it = m_ownedLuts.find(acesLutName);
  204. if (it == m_ownedLuts.end())
  205. {
  206. InitializeLutImage(acesLutName);
  207. it = m_ownedLuts.find(acesLutName);
  208. AZ_Assert(it != m_ownedLuts.end(), "AcesDisplayMapperFeatureProcessor unable to create ACES LUT image");
  209. }
  210. displayMapperLut = it->second;
  211. }
  212. void AcesDisplayMapperFeatureProcessor::GetLutFromAssetLocation(DisplayMapperAssetLut& displayMapperAssetLut, const AZStd::string& assetPath)
  213. {
  214. Data::AssetId assetId = RPI::AssetUtils::GetAssetIdForProductPath(assetPath.c_str(), RPI::AssetUtils::TraceLevel::Error);
  215. GetLutFromAssetId(displayMapperAssetLut, assetId);
  216. }
  217. void AcesDisplayMapperFeatureProcessor::GetLutFromAssetId(DisplayMapperAssetLut& displayMapperAssetLut, const AZ::Data::AssetId assetId)
  218. {
  219. if (!assetId.IsValid())
  220. {
  221. return;
  222. }
  223. // Check first if this already exists
  224. auto it = m_assetLuts.find(assetId.ToString<AZStd::string>());
  225. if (it != m_assetLuts.end())
  226. {
  227. displayMapperAssetLut = it->second;
  228. return;
  229. }
  230. // Read the lut which is a .3dl file embedded within an azasset file.
  231. Data::Asset<RPI::AnyAsset> asset = RPI::AssetUtils::LoadAssetById<RPI::AnyAsset>(assetId, RPI::AssetUtils::TraceLevel::Error);
  232. const LookupTableAsset* lutAsset = RPI::GetDataFromAnyAsset<LookupTableAsset>(asset);
  233. if (lutAsset == nullptr)
  234. {
  235. AZ_Error("AcesDisplayMapperFeatureProcessor", false, "Unable to read LUT from asset.");
  236. asset.Release();
  237. return;
  238. }
  239. // The first row of numbers in a 3dl file is a number of vertices that partition the space from [0,..1023]
  240. // This assumes that the vertices are evenly spaced apart. Non-uniform spacing is supported by the format,
  241. // but haven't been encountered yet.
  242. const size_t lutSize = lutAsset->m_intervals.size();
  243. if (lutSize == 0)
  244. {
  245. AZ_Error("AcesDisplayMapperFeatureProcessor", false, "Lut asset has invalid size.");
  246. asset.Release();
  247. return;
  248. }
  249. // Create a buffer of half floats from the LUT and use it to initialize a 3d texture.
  250. const size_t kChannels = 4;
  251. const size_t kChannelBytes = 2;
  252. const size_t bytesPerRow = lutSize * kChannels * kChannelBytes;
  253. const size_t bytesPerSlice = bytesPerRow * lutSize;
  254. AZStd::vector<uint16_t> u16Buffer;
  255. const size_t bufferSize = lutSize * lutSize * lutSize * kChannels;
  256. u16Buffer.resize(bufferSize);
  257. for (size_t slice = 0; slice < lutSize; slice++)
  258. {
  259. for (size_t column = 0; column < lutSize; column++)
  260. {
  261. for (size_t row = 0; row < lutSize; row++)
  262. {
  263. // Index in the LUT texture data
  264. size_t idx = (column * kChannels) +
  265. ((bytesPerRow * row) / kChannelBytes) +
  266. ((bytesPerSlice * slice) / kChannelBytes);
  267. // Vertices the .3dl file are listed first by increasing slice, then row, and finally column coordinate
  268. // This corresponds to blue, green, and red channels, respectively.
  269. size_t assetIdx = slice + lutSize * row + (lutSize * lutSize * column);
  270. AZ::u64 red = lutAsset->m_values[assetIdx * 3 + 0];
  271. AZ::u64 green = lutAsset->m_values[assetIdx * 3 + 1];
  272. AZ::u64 blue = lutAsset->m_values[assetIdx * 3 + 2];
  273. // The vertices in the file are given as a positive integer value in [0,..4095] and need to be normalized
  274. constexpr float NormalizeValue = 4095.0f;
  275. u16Buffer[idx + 0] = ConvertFloatToHalf(static_cast<float>(red) / NormalizeValue);
  276. u16Buffer[idx + 1] = ConvertFloatToHalf(static_cast<float>(green) / NormalizeValue);
  277. u16Buffer[idx + 2] = ConvertFloatToHalf(static_cast<float>(blue) / NormalizeValue);
  278. u16Buffer[idx + 3] = 0x3b00; // 1.0 in half
  279. }
  280. }
  281. }
  282. asset.Release();
  283. Data::Instance<RPI::StreamingImagePool> streamingImagePool = RPI::ImageSystemInterface::Get()->GetSystemStreamingPool();
  284. RHI::Size imageSize;
  285. imageSize.m_width = static_cast<uint32_t>(lutSize);
  286. imageSize.m_height = static_cast<uint32_t>(lutSize);
  287. imageSize.m_depth = static_cast<uint32_t>(lutSize);
  288. size_t imageDataSize = bytesPerSlice * lutSize;
  289. Data::Instance<RPI::StreamingImage> lutStreamingImage = RPI::StreamingImage::CreateFromCpuData(
  290. *streamingImagePool, RHI::ImageDimension::Image3D, imageSize, LutFormat, u16Buffer.data(), imageDataSize);
  291. AZ_Error("AcesDisplayMapperFeatureProcessor", lutStreamingImage, "Failed to initialize the lut assetId %s.", assetId.ToString<AZStd::string>().c_str());
  292. DisplayMapperAssetLut assetLut;
  293. assetLut.m_lutStreamingImage = lutStreamingImage;
  294. // Add to the list of LUT asset resources
  295. m_assetLuts.insert(AZStd::pair<AZStd::string, DisplayMapperAssetLut>(assetId.ToString<AZStd::string>(), assetLut));
  296. displayMapperAssetLut = assetLut;
  297. }
  298. void AcesDisplayMapperFeatureProcessor::InitializeImagePool()
  299. {
  300. m_displayMapperImagePool = aznew RHI::ImagePool;
  301. m_displayMapperImagePool->SetName(Name("DisplayMapperImagePool"));
  302. RHI::ImagePoolDescriptor imagePoolDesc = {};
  303. imagePoolDesc.m_bindFlags = RHI::ImageBindFlags::ShaderReadWrite;
  304. imagePoolDesc.m_budgetInBytes = ImagePoolBudget;
  305. RHI::ResultCode resultCode = m_displayMapperImagePool->Init(imagePoolDesc);
  306. if (resultCode != RHI::ResultCode::Success)
  307. {
  308. AZ_Error("AcesDisplayMapperFeatureProcessor", false, "Failed to initialize image pool.");
  309. return;
  310. }
  311. }
  312. void AcesDisplayMapperFeatureProcessor::InitializeLutImage(const AZ::Name& lutName)
  313. {
  314. if (!m_displayMapperImagePool)
  315. {
  316. InitializeImagePool();
  317. }
  318. DisplayMapperLut lutResource;
  319. lutResource.m_lutImage = aznew RHI::Image;
  320. lutResource.m_lutImage->SetName(lutName);
  321. RHI::ImageInitRequest imageRequest;
  322. imageRequest.m_image = lutResource.m_lutImage.get();
  323. static const int LutSize = 32;
  324. imageRequest.m_descriptor = RHI::ImageDescriptor::Create3D(RHI::ImageBindFlags::ShaderReadWrite, LutSize, LutSize, LutSize, LutFormat);
  325. RHI::ResultCode resultCode = m_displayMapperImagePool->InitImage(imageRequest);
  326. if (resultCode != RHI::ResultCode::Success)
  327. {
  328. AZ_Error("AcesDisplayMapperFeatureProcessor", false, "Failed to initialize LUT image.");
  329. return;
  330. }
  331. lutResource.m_lutImageViewDescriptor = RHI::ImageViewDescriptor::Create(LutFormat, 0, 0);
  332. lutResource.m_lutImageView = lutResource.m_lutImage->BuildImageView(lutResource.m_lutImageViewDescriptor);
  333. if (!lutResource.m_lutImageView.get())
  334. {
  335. AZ_Error("AcesDisplayMapperFeatureProcessor", false, "Failed to initialize LUT image view.");
  336. return;
  337. }
  338. // Add to the list of lut resources
  339. lutResource.m_lutImageView->SetName(lutName);
  340. m_ownedLuts[lutName] = lutResource;
  341. }
  342. ShaperParams AcesDisplayMapperFeatureProcessor::GetShaperParameters(ShaperPresetType shaperPreset, float customMinEv, float customMaxEv)
  343. {
  344. // Default is a linear shaper with bias 0.0 and scale 1.0. That is, fx = x*1.0 + 0.0
  345. ShaperParams shaperParams = { ShaperType::Linear, 0.0, 1.f };
  346. switch (shaperPreset)
  347. {
  348. case ShaperPresetType::None:
  349. break;
  350. case ShaperPresetType::Log2_48Nits:
  351. shaperParams = GetAcesShaperParameters(OutputDeviceTransformType::OutputDeviceTransformType_48Nits);
  352. break;
  353. case ShaperPresetType::Log2_1000Nits:
  354. shaperParams = GetAcesShaperParameters(OutputDeviceTransformType::OutputDeviceTransformType_1000Nits);
  355. break;
  356. case ShaperPresetType::Log2_2000Nits:
  357. shaperParams = GetAcesShaperParameters(OutputDeviceTransformType::OutputDeviceTransformType_2000Nits);
  358. break;
  359. case ShaperPresetType::Log2_4000Nits:
  360. shaperParams = GetAcesShaperParameters(OutputDeviceTransformType::OutputDeviceTransformType_4000Nits);
  361. break;
  362. case ShaperPresetType::LinearCustomRange:
  363. {
  364. // Map the range min exposure - max exposure to 0-1. Convert EV values to linear values here to avoid that work in the shader.
  365. // Shader equation becomes (x - bias) / scale;
  366. constexpr float MediumGray = 0.18f;
  367. const float minValue = MediumGray * powf(2, customMinEv);
  368. const float maxValue = MediumGray * powf(2, customMaxEv);
  369. shaperParams.m_type = ShaperType::Linear;
  370. shaperParams.m_scale = 1.0f / (maxValue - minValue);
  371. shaperParams.m_bias = -minValue * shaperParams.m_scale;
  372. break;
  373. }
  374. case ShaperPresetType::Log2CustomRange:
  375. shaperParams = GetLog2ShaperParameters(customMinEv, customMaxEv);
  376. break;
  377. case ShaperPresetType::PqSmpteSt2084:
  378. shaperParams.m_type = ShaperType::PqSmpteSt2084;
  379. break;
  380. default:
  381. AZ_Error("DisplayMapperPass", false, "Invalid shaper preset type.");
  382. break;
  383. }
  384. return shaperParams;
  385. }
  386. void AcesDisplayMapperFeatureProcessor::GetDefaultDisplayMapperConfiguration(DisplayMapperConfigurationDescriptor& config)
  387. {
  388. // Default configuration is ACES with LDR color grading LUT disabled.
  389. config.m_operationType = DisplayMapperOperationType::Aces;
  390. config.m_ldrGradingLutEnabled = false;
  391. config.m_ldrColorGradingLut.Release();
  392. }
  393. void AcesDisplayMapperFeatureProcessor::RegisterDisplayMapperConfiguration(const DisplayMapperConfigurationDescriptor& config)
  394. {
  395. m_displayMapperConfiguration = config;
  396. }
  397. void AcesDisplayMapperFeatureProcessor::UnregisterDisplayMapperConfiguration()
  398. {
  399. m_displayMapperConfiguration.reset();
  400. }
  401. const DisplayMapperConfigurationDescriptor* AcesDisplayMapperFeatureProcessor::GetDisplayMapperConfiguration()
  402. {
  403. return m_displayMapperConfiguration.has_value() ? &m_displayMapperConfiguration.value() : nullptr;
  404. }
  405. } // namespace AZ::Render