/external/deqp/external/vulkancts/modules/vulkan/dynamic_state/ |
H A D | vktDynamicStateBaseClass.hpp | 101 vk::Move<vk::VkCommandBuffer> m_cmdBuffer; member in class:vkt::DynamicState::DynamicStateBaseClass
|
H A D | vktDynamicStateDSTests.cpp | 95 vk::Move<vk::VkCommandBuffer> m_cmdBuffer; member in class:vkt::DynamicState::__anon3809::DepthStencilBaseCase 293 m_cmdBuffer = vk::allocateCommandBuffer(m_vk, device, &cmdBufferAllocateInfo); 311 m_vk.beginCommandBuffer(*m_cmdBuffer, &beginInfo); 313 initialTransitionColor2DImage(m_vk, *m_cmdBuffer, m_colorTargetImage->object(), vk::VK_IMAGE_LAYOUT_GENERAL); 314 initialTransitionDepthStencil2DImage(m_vk, *m_cmdBuffer, m_depthStencilImage->object(), vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 0, vk::VK_ACCESS_TRANSFER_WRITE_BIT); 317 m_vk.cmdClearColorImage(*m_cmdBuffer, m_colorTargetImage->object(), 323 m_vk.cmdClearDepthStencilImage(*m_cmdBuffer, m_depthStencilImage->object(), 333 m_vk.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 341 transition2DImage(m_vk, *m_cmdBuffer, m_depthStencilImage->object(), vk::VK_IMAGE_ASPECT_DEPTH_BIT | vk::VK_IMAGE_ASPECT_STENCIL_BIT, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, vk::VK_ACCESS_TRANSFER_WRITE_BIT, vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT); 343 m_vk.cmdBeginRenderPass(*m_cmdBuffer, [all...] |
H A D | vktDynamicStateRSTests.cpp | 86 vk::Move<vk::VkCommandBuffer> m_cmdBuffer; member in class:vkt::DynamicState::__anon3812::DepthBiasBaseCase 265 m_cmdBuffer = vk::allocateCommandBuffer(m_vk, device, &cmdBufferAllocateInfo); 283 m_vk.beginCommandBuffer(*m_cmdBuffer, &beginInfo); 285 initialTransitionColor2DImage(m_vk, *m_cmdBuffer, m_colorTargetImage->object(), vk::VK_IMAGE_LAYOUT_GENERAL); 286 initialTransitionDepthStencil2DImage(m_vk, *m_cmdBuffer, m_depthStencilImage->object(), vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 0, vk::VK_ACCESS_TRANSFER_WRITE_BIT); 289 m_vk.cmdClearColorImage(*m_cmdBuffer, m_colorTargetImage->object(), 296 m_vk.cmdClearDepthStencilImage(*m_cmdBuffer, m_depthStencilImage->object(), 308 m_vk.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 316 transition2DImage(m_vk, *m_cmdBuffer, m_depthStencilImage->object(), vk::VK_IMAGE_ASPECT_DEPTH_BIT | vk::VK_IMAGE_ASPECT_STENCIL_BIT, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, vk::VK_ACCESS_TRANSFER_WRITE_BIT, vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT); 318 m_vk.cmdBeginRenderPass(*m_cmdBuffer, [all...] |
/external/deqp/external/vulkancts/modules/vulkan/draw/ |
H A D | vktDrawBaseClass.hpp | 126 vk::Move<vk::VkCommandBuffer> m_cmdBuffer; member in class:vkt::Draw::DrawTestsBaseClass
|
/external/deqp/external/vulkancts/modules/vulkan/pipeline/ |
H A D | vktPipelineImageSamplingInstance.hpp | 104 vk::Move<vk::VkCommandBuffer> m_cmdBuffer; member in class:vkt::pipeline::ImageSamplingInstance
|
H A D | vktPipelineTimestampTests.cpp | 653 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::pipeline::__anon3859::TimestampTestInstance 729 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdAllocateParams); 769 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 771 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT); 776 vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++); 779 VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer)); 800 &m_cmdBuffer.get(), // const VkCommandBuffer* pCommandBuffers; 1297 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 1299 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 1302 vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPoo [all...] |
H A D | vktPipelineBlendTests.cpp | 152 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::pipeline::__anon3834::BlendTestInstance 753 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo); 755 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 757 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 760 vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); 768 vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines[quadNdx]); 769 vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset); 770 vk.cmdDraw(*m_cmdBuffer, (deUint32)(m_vertices.size() / BlendTest::QUAD_COUNT), 1, 0, 0); 773 vk.cmdEndRenderPass(*m_cmdBuffer); 774 VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer)); [all...] |
H A D | vktPipelineDepthTests.cpp | 166 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::pipeline::__anon3839::DepthTestInstance 776 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo); 778 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 780 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 783 vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); 791 vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines[quadNdx]); 792 vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset); 793 vk.cmdDraw(*m_cmdBuffer, (deUint32)(m_vertices.size() / DepthTest::QUAD_COUNT), 1, 0, 0); 796 vk.cmdEndRenderPass(*m_cmdBuffer); 797 VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer)); [all...] |
H A D | vktPipelineInputAssemblyTests.cpp | 179 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::pipeline::__anon3848::InputAssemblyInstance 1416 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo); 1418 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 1420 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 1423 vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); 1427 vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipeline); 1428 vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset); 1429 vk.cmdBindIndexBuffer(*m_cmdBuffer, *m_indexBuffer, 0, m_indexType); 1430 vk.cmdDrawIndexed(*m_cmdBuffer, (deUint32)m_indices.size(), 1, 0, 0, 0); 1432 vk.cmdEndRenderPass(*m_cmdBuffer); [all...] |
H A D | vktPipelinePushConstantTests.cpp | 168 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::pipeline::__anon3850::PushConstantGraphicsTestInstance 1009 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo); 1011 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 1013 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 1016 vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); 1028 vk.cmdPushConstants(*m_cmdBuffer, *m_pipelineLayout, m_pushConstantRange[rangeNdx].range.shaderStage, m_pushConstantRange[rangeNdx].range.offset, m_pushConstantRange[rangeNdx].range.size, value); 1033 vk.cmdPushConstants(*m_cmdBuffer, *m_pipelineLayout, m_pushConstantRange[rangeNdx].range.shaderStage, m_pushConstantRange[rangeNdx].update.offset, m_pushConstantRange[rangeNdx].update.size, value); 1045 vk.cmdPushConstants(*m_cmdBuffer, *m_pipelineLayout, m_pushConstantRange[0].range.shaderStage, m_pushConstantRange[0].range.offset, m_pushConstantRange[0].range.size, &triangleNdx); 1048 vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines); 1049 vk.cmdBindVertexBuffers(*m_cmdBuffer, 1216 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::pipeline::__anon3850::PushConstantComputeTestInstance [all...] |
H A D | vktPipelineStencilTests.cpp | 170 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::pipeline::__anon3856::StencilTestInstance 864 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo); 866 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 868 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 871 vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); 879 vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines[quadNdx]); 880 vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset); 881 vk.cmdDraw(*m_cmdBuffer, (deUint32)(m_vertices.size() / StencilTest::QUAD_COUNT), 1, 0, 0); 884 vk.cmdEndRenderPass(*m_cmdBuffer); 885 VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer)); [all...] |
H A D | vktPipelineCacheTests.cpp | 561 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::pipeline::__anon3836::CacheTestInstance 599 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdAllocateParams); 651 &m_cmdBuffer.get(), // const VkCommandBuffer* pCommandBuffers; 1128 vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); 1130 vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline); 1132 vk.cmdBindVertexBuffers(*m_cmdBuffer, 0u, 1u, &m_vertexBuffer.get(), &offsets); 1133 vk.cmdDraw(*m_cmdBuffer, (deUint32)m_vertices.size(), 1u, 0u, 0u); 1135 vk.cmdEndRenderPass(*m_cmdBuffer); 1150 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 1152 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BI [all...] |
H A D | vktPipelineMultisampleTests.cpp | 252 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::pipeline::__anon3849::MultisampleRenderer 1789 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo); 1791 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 1793 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 1796 vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); 1800 vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipeline); 1801 vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset); 1802 vk.cmdDraw(*m_cmdBuffer, (deUint32)vertices.size(), 1, 0, 0); 1804 vk.cmdEndRenderPass(*m_cmdBuffer); 1806 VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer)); [all...] |
H A D | vktPipelineVertexInputTests.cpp | 241 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::pipeline::__anon3861::VertexInputInstance 1070 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo); 1072 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 1074 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 1077 vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); 1079 vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipeline); 1088 vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, (deUint32)vertexBuffers.size(), vertexBuffers.data(), bindingOffsets.data()); 1098 vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, firstHalfLength, vertexBuffers.data(), bindingOffsets.data()); 1101 vk.cmdBindVertexBuffers(*m_cmdBuffer, firstHalfLength, secondHalfLength, 1106 vk.cmdDraw(*m_cmdBuffer, [all...] |
/external/deqp/external/vulkancts/modules/vulkan/api/ |
H A D | vktApiBufferViewAccessTests.cpp | 105 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::api::__anon3755::BufferViewTestInstance 673 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferParams); 675 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 697 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &initialImageBarrier); 699 vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE); 703 vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines); 704 vk.cmdBindDescriptorSets(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1, &*m_descriptorSet, 0u, DE_NULL); 705 vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), vertexBufferOffset); 706 vk.cmdDraw(*m_cmdBuffer, (deUint32)m_vertices.size(), 1, 0, 0); 707 vk.cmdEndRenderPass(*m_cmdBuffer); [all...] |
H A D | vktApiCopiesAndBlittingTests.cpp | 99 Move<VkCommandBuffer> m_cmdBuffer; member in class:vkt::api::__anon3760::CopiesAndBlittingTestInstance 164 m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo); 758 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 759 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, DE_LENGTH_OF_ARRAY(imageBarriers), imageBarriers); 760 vk.cmdCopyImage(*m_cmdBuffer, m_source.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_destination.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (deUint32)m_params.regions.size(), imageCopies); 761 VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer)); 771 &m_cmdBuffer.get(), // const VkCommandBuffer* pCommandBuffers; 940 VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo)); 941 vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &srcBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL); 942 vk.cmdCopyBuffer(*m_cmdBuffer, m_sourc [all...] |
/external/deqp/external/vulkancts/modules/vulkan/shaderrender/ |
H A D | vktShaderRender.hpp | 469 vk::Move<vk::VkCommandBuffer> m_cmdBuffer; member in class:vkt::sr::ShaderRenderCaseInstance
|
/external/deqp/external/vulkancts/modules/vulkan/ |
H A D | vktShaderLibrary.cpp | 1377 const Unique<vk::VkCommandBuffer> m_cmdBuffer; member in class:vkt::__anon3943::ShaderCaseInstance 1419 , m_cmdBuffer (allocateCommandBuffer(context, *m_cmdPool)) 1490 VK_CHECK(vkd.beginCommandBuffer(*m_cmdBuffer, &beginInfo)); 1521 vkd.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, (vk::VkDependencyFlags)0, 1540 vkd.cmdBeginRenderPass(*m_cmdBuffer, &passBeginInfo, vk::VK_SUBPASS_CONTENTS_INLINE); 1543 vkd.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline); 1546 vkd.cmdBindDescriptorSets(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &*m_descriptorSet, 0u, DE_NULL); 1552 vkd.cmdBindVertexBuffers(*m_cmdBuffer, 0u, numBuffers, buffers, offsets); 1555 vkd.cmdBindIndexBuffer (*m_cmdBuffer, *m_posNdxBuffer, (vk::VkDeviceSize)INDICES_OFFSET, vk::VK_INDEX_TYPE_UINT16); 1556 vkd.cmdDrawIndexed (*m_cmdBuffer, [all...] |