Snap for 5455383 from db1f80ba923b9e89177562203c21192f8e7340c3 to pie-cts-release

Change-Id: I3952f757a9e5a84fa525346e47d5f7b2a972aca4
diff --git a/external/vulkancts/modules/vulkan/memory/vktMemoryPipelineBarrierTests.cpp b/external/vulkancts/modules/vulkan/memory/vktMemoryPipelineBarrierTests.cpp
index 053d59a..2a8bba2 100644
--- a/external/vulkancts/modules/vulkan/memory/vktMemoryPipelineBarrierTests.cpp
+++ b/external/vulkancts/modules/vulkan/memory/vktMemoryPipelineBarrierTests.cpp
@@ -217,6 +217,34 @@
 	ACCESS_LAST
 };
 
+Access accessFlagToAccess (vk::VkAccessFlagBits flag)
+{
+	switch (flag)
+	{
+	case vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT:			return ACCESS_INDIRECT_COMMAND_READ_BIT;
+	case vk::VK_ACCESS_INDEX_READ_BIT:						return ACCESS_INDEX_READ_BIT;
+	case vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT:			return ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
+	case vk::VK_ACCESS_UNIFORM_READ_BIT:					return ACCESS_UNIFORM_READ_BIT;
+	case vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT:			return ACCESS_INPUT_ATTACHMENT_READ_BIT;
+	case vk::VK_ACCESS_SHADER_READ_BIT:						return ACCESS_SHADER_READ_BIT;
+	case vk::VK_ACCESS_SHADER_WRITE_BIT:					return ACCESS_SHADER_WRITE_BIT;
+	case vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT:			return ACCESS_COLOR_ATTACHMENT_READ_BIT;
+	case vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT:			return ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+	case vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT:	return ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
+	case vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT:	return ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
+	case vk::VK_ACCESS_TRANSFER_READ_BIT:					return ACCESS_TRANSFER_READ_BIT;
+	case vk::VK_ACCESS_TRANSFER_WRITE_BIT:					return ACCESS_TRANSFER_WRITE_BIT;
+	case vk::VK_ACCESS_HOST_READ_BIT:						return ACCESS_HOST_READ_BIT;
+	case vk::VK_ACCESS_HOST_WRITE_BIT:						return ACCESS_HOST_WRITE_BIT;
+	case vk::VK_ACCESS_MEMORY_READ_BIT:						return ACCESS_MEMORY_READ_BIT;
+	case vk::VK_ACCESS_MEMORY_WRITE_BIT:					return ACCESS_MEMORY_WRITE_BIT;
+
+	default:
+		DE_FATAL("Unknown access flags");
+		return ACCESS_LAST;
+	}
+}
+
 // Sequential stage enums
 enum PipelineStage
 {
@@ -239,9 +267,9 @@
 	PIPELINESTAGE_LAST
 };
 
-PipelineStage pipelineStageFlagToPipelineStage (vk::VkPipelineStageFlagBits flags)
+PipelineStage pipelineStageFlagToPipelineStage (vk::VkPipelineStageFlagBits flag)
 {
-	switch (flags)
+	switch (flag)
 	{
 		case vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT:						return PIPELINESTAGE_TOP_OF_PIPE_BIT;
 		case vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT:					return PIPELINESTAGE_BOTTOM_OF_PIPE_BIT;
@@ -7488,9 +7516,9 @@
 	const vk::VkPipelineStageFlags	m_allowedStages;
 	const vk::VkAccessFlags			m_allowedAccesses;
 
-	// [dstStage][srcStage] = srcAccesses
-	// In stage dstStage write srcAccesses from srcStage are not yet available
-	vk::VkAccessFlags				m_unavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
+	// [dstStage][srcStage][dstAccess] = srcAccesses
+	// In stage dstStage write srcAccesses from srcStage are not yet available for dstAccess
+	vk::VkAccessFlags				m_unavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST][ACCESS_LAST];
 	// Latest pipeline transition is not available in stage
 	bool							m_unavailableLayoutTransition[PIPELINESTAGE_LAST];
 	// [dstStage] = dstAccesses
@@ -7531,7 +7559,15 @@
 
 			// There are no write operations that are not yet available
 			// initially.
-			m_unavailableWriteOperations[dstStage][srcStage] = 0;
+			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
+			{
+				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
+
+				if ((dstAccess_ & m_allowedAccesses) == 0)
+					continue;
+
+				m_unavailableWriteOperations[dstStage][srcStage][dstAccess] = 0;
+			}
 		}
 	}
 }
@@ -7582,8 +7618,16 @@
 			// Mark all accesses from all stages invisible
 			m_invisibleOperations[dstStage] |= m_allowedAccesses;
 
-			// Mark write access from srcStage unavailable to all stages
-			m_unavailableWriteOperations[dstStage][srcStage] |= access;
+			// Mark write access from srcStage unavailable to all stages for all accesses
+			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
+			{
+				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
+
+				if ((dstAccess_ & m_allowedAccesses) == 0)
+					continue;
+
+				m_unavailableWriteOperations[dstStage][srcStage][dstAccess] |= access;
+			}
 		}
 	}
 }
@@ -7643,7 +7687,7 @@
 			dstAccesses |= m_invisibleOperations[dstStage];
 		}
 
-		// Make sure all write operations fro mall stages are available
+		// Make sure all write operations from all stages are available
 		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
 		{
 			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
@@ -7651,11 +7695,19 @@
 			if ((srcStage_ & m_allowedStages) == 0)
 				continue;
 
-			if (m_unavailableWriteOperations[dstStage][srcStage])
+			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
 			{
-				dstStages |= dstStage_;
-				srcStages |= dstStage_;
-				srcAccesses |= m_unavailableWriteOperations[dstStage][srcStage];
+				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
+
+				if ((dstAccess_ & m_allowedAccesses) == 0)
+					continue;
+
+				if (m_unavailableWriteOperations[dstStage][srcStage][dstAccess])
+				{
+					dstStages |= dstStage_;
+					srcStages |= dstStage_;
+					srcAccesses |= m_unavailableWriteOperations[dstStage][srcStage][dstAccess];
+				}
 			}
 
 			if (m_unavailableLayoutTransition[dstStage] && !m_unavailableLayoutTransition[srcStage])
@@ -7675,9 +7727,9 @@
 }
 
 void CacheState::checkImageLayoutBarrier (vk::VkPipelineStageFlags	srcStages,
-										 vk::VkAccessFlags			srcAccesses,
-										 vk::VkPipelineStageFlags	dstStages,
-										 vk::VkAccessFlags			dstAccesses)
+										  vk::VkAccessFlags			srcAccesses,
+										  vk::VkPipelineStageFlags	dstStages,
+										  vk::VkAccessFlags			dstAccesses)
 {
 	DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
 	DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
@@ -7727,10 +7779,18 @@
 				if ((srcStage_ & m_allowedStages) == 0)
 					continue;
 
-				if (m_unavailableWriteOperations[dstStage][srcStage] != (getWriteAccessFlags() & m_allowedAccesses))
+				for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
 				{
-					anyWriteAvailable = true;
-					break;
+					const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
+
+					if ((dstAccess_ & m_allowedAccesses) == 0)
+						continue;
+
+					if (m_unavailableWriteOperations[dstStage][srcStage][dstAccess] != (getWriteAccessFlags() & m_allowedAccesses))
+					{
+						anyWriteAvailable = true;
+						break;
+					}
 				}
 			}
 		}
@@ -7771,7 +7831,15 @@
 				continue;
 
 			// All write operations are available after layout transition
-			m_unavailableWriteOperations[dstStage][srcStage] = 0;
+			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
+			{
+				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
+
+				if ((dstAccess_ & m_allowedAccesses) == 0)
+					continue;
+
+				m_unavailableWriteOperations[dstStage][srcStage][dstAccess] = 0;
+			}
 		}
 	}
 }
@@ -7789,7 +7857,7 @@
 	// Transitivity
 	{
 		vk::VkPipelineStageFlags		oldIncompleteOperations[PIPELINESTAGE_LAST];
-		vk::VkAccessFlags				oldUnavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
+		vk::VkAccessFlags				oldUnavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST][ACCESS_LAST];
 		bool							oldUnavailableLayoutTransition[PIPELINESTAGE_LAST];
 
 		deMemcpy(oldIncompleteOperations, m_incompleteOperations, sizeof(oldIncompleteOperations));
@@ -7824,7 +7892,15 @@
 						continue;
 
 					// Writes that are available in srcStage are also available in dstStage
-					m_unavailableWriteOperations[dstStage][sharedStage] &= oldUnavailableWriteOperations[srcStage][sharedStage];
+					for (vk::VkAccessFlags sharedAccess_ = 1; sharedAccess_ <= m_allowedAccesses; sharedAccess_ <<= 1)
+					{
+						const Access sharedAccess = accessFlagToAccess((vk::VkAccessFlagBits)sharedAccess_);
+
+						if ((sharedAccess_ & m_allowedAccesses) == 0)
+							continue;
+
+						m_unavailableWriteOperations[dstStage][sharedStage][sharedAccess] &= oldUnavailableWriteOperations[srcStage][sharedStage][sharedAccess];
+					}
 				}
 			}
 		}
@@ -7849,12 +7925,20 @@
 			if ((srcStage_ & m_allowedStages) == 0)
 				continue;
 
-			// Make srcAccesses from srcStage available in dstStage
-			if ((srcStage_ & srcStages) != 0)
-				m_unavailableWriteOperations[dstStage][srcStage] &= ~srcAccesses;
+			// Make srcAccesses from srcStage available in dstStage for dstAccess
+			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
+			{
+				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
 
-			if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
-				allWritesAvailable = false;
+				if ((dstAccess_ & m_allowedAccesses) == 0)
+					continue;
+
+				if (((srcStage_ & srcStages) != 0) && ((dstAccess_ & dstAccesses) != 0))
+					m_unavailableWriteOperations[dstStage][srcStage][dstAccess] &= ~srcAccesses;
+
+				if (m_unavailableWriteOperations[dstStage][srcStage][dstAccess] != 0)
+					allWritesAvailable = false;
+			}
 		}
 
 		// If all writes are available in dstStage make dstAccesses also visible
@@ -7891,9 +7975,17 @@
 			if ((srcStage_ & m_allowedStages) == 0)
 				continue;
 
-			// Some write operations are not available yet
-			if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
-				return false;
+			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
+			{
+				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
+
+				if ((dstAccess_ & m_allowedAccesses) == 0)
+					continue;
+
+				// Some write operations are not available yet
+				if (m_unavailableWriteOperations[dstStage][srcStage][dstAccess] != 0)
+					return false;
+			}
 		}
 	}
 
@@ -8157,7 +8249,7 @@
 		{
 			ops.push_back(OP_PIPELINE_BARRIER_GLOBAL);
 
-			if (state.hasImage)
+			if (state.hasImage && (state.imageLayout != vk::VK_IMAGE_LAYOUT_UNDEFINED))
 				ops.push_back(OP_PIPELINE_BARRIER_IMAGE);
 
 			if (state.hasBuffer)
diff --git a/external/vulkancts/modules/vulkan/subgroups/vktSubgroupsTestsUtils.cpp b/external/vulkancts/modules/vulkan/subgroups/vktSubgroupsTestsUtils.cpp
index 410089f..f6f5d98 100644
--- a/external/vulkancts/modules/vulkan/subgroups/vktSubgroupsTestsUtils.cpp
+++ b/external/vulkancts/modules/vulkan/subgroups/vktSubgroupsTestsUtils.cpp
@@ -573,7 +573,7 @@
 								context.getDevice(), &bufferCreateInfo);
 		vk::VkMemoryRequirements req = getBufferMemoryRequirements(
 										   context.getDeviceInterface(), context.getDevice(), *m_buffer);
-		req.size *= 2;
+		req.size *= 4;
 		m_allocation = context.getDefaultAllocator().allocate(
 						   req, MemoryRequirement::HostVisible);
 		VK_CHECK(context.getDeviceInterface().bindBufferMemory(
diff --git a/external/vulkancts/modules/vulkan/synchronization/vktSynchronizationOperationSingleQueueTests.cpp b/external/vulkancts/modules/vulkan/synchronization/vktSynchronizationOperationSingleQueueTests.cpp
index 979f269..22f3a53 100644
--- a/external/vulkancts/modules/vulkan/synchronization/vktSynchronizationOperationSingleQueueTests.cpp
+++ b/external/vulkancts/modules/vulkan/synchronization/vktSynchronizationOperationSingleQueueTests.cpp
@@ -232,6 +232,12 @@
 				writeSync.imageLayout, readSync.imageLayout, m_resource->getImage().handle, m_resource->getImage().subresourceRange);
 			vk.cmdPipelineBarrier(cmdBuffers[WRITE],  writeSync.stageMask, readSync.stageMask, (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL, 1u, &barrier);
 		}
+		else
+		{
+			const VkBufferMemoryBarrier barrier = makeBufferMemoryBarrier(writeSync.accessMask, readSync.accessMask,
+				m_resource->getBuffer().handle, 0, VK_WHOLE_SIZE);
+			vk.cmdPipelineBarrier(cmdBuffers[WRITE],  writeSync.stageMask, readSync.stageMask, (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &barrier, 0u, (const VkImageMemoryBarrier*)DE_NULL);
+		}
 
 		endCommandBuffer(vk, cmdBuffers[WRITE]);
 
diff --git a/modules/egl/teglGetFrameTimestampsTests.cpp b/modules/egl/teglGetFrameTimestampsTests.cpp
index 3c0c02f..f369e83 100644
--- a/modules/egl/teglGetFrameTimestampsTests.cpp
+++ b/modules/egl/teglGetFrameTimestampsTests.cpp
@@ -636,7 +636,7 @@
 			check_lt<EGLnsecsANDROID>(m_result, 1000000, frame.compositeInterval, "Reported refresh rate greater than 1kHz.");
 			check_lt<EGLnsecsANDROID>(m_result, frame.compositeInterval, 1000000000, "Reported refresh rate less than 1Hz.");
 			check_lt<EGLnsecsANDROID>(m_result, 0, frame.compositeToPresentLatency, "Composite to present latency must be greater than 0.");
-			check_lt(m_result, frame.compositeToPresentLatency, frame.compositeInterval * 3, "Composite to present latency is more than 3 vsyncs.");
+			check_lt(m_result, frame.compositeToPresentLatency, frame.compositeInterval * 4, "Composite to present latency is more than 4 vsyncs.");
 			const EGLnsecsANDROID minDeadline = now;
 			check_lt(m_result, minDeadline, frame.compositeDeadline, "Next composite deadline is in the past.");
 			const EGLnsecsANDROID maxDeadline = now + frame.compositeInterval * 2;
@@ -655,25 +655,25 @@
 			frame.swapBufferBeginNs = getNanoseconds();
 			EGLU_CHECK_CALL(egl, swapBuffers(display, *surface));
 
-			// All timestamps from 5 frames ago should definitely be available.
-			const size_t frameDelay = 5;
+			// All timestamps from 6 frames ago should definitely be available.
+			const size_t frameDelay = 6;
 			if (i >= frameDelay)
 			{
 				// \todo [2017-01-25 brianderson] Remove this work around once reads done is fixed.
 				const bool verifyReadsDone	=	i > (frameDelay + 3);
-				FrameTimes&		frame5ago	=	frameTimes[i-frameDelay];
+				FrameTimes&		frame6ago	=	frameTimes[i-frameDelay];
 				std::vector<EGLnsecsANDROID> supportedValues(supportedNames.size(), 0);
 
 				CHECK_NAKED_EGL_CALL(egl, m_eglGetFrameTimestampsANDROID(
-					display, *surface, frame5ago.frameId, static_cast<eglw::EGLint>(supportedNames.size()),
+					display, *surface, frame6ago.frameId, static_cast<eglw::EGLint>(supportedNames.size()),
 					&supportedNames[0], &supportedValues[0]));
-				populateFrameTimes(&frame5ago, timestamps, supportedValues);
+				populateFrameTimes(&frame6ago, timestamps, supportedValues);
 
-				verifySingleFrame(frame5ago, m_result, verifyReadsDone);
+				verifySingleFrame(frame6ago, m_result, verifyReadsDone);
 				if (i >= frameDelay + 1)
 				{
-					FrameTimes& frame6ago = frameTimes[i-frameDelay-1];
-					verifyNeighboringFrames(frame6ago, frame5ago, m_result);
+					FrameTimes& frame7ago = frameTimes[i-frameDelay-1];
+					verifyNeighboringFrames(frame7ago, frame6ago, m_result);
 				}
 			}
 		}
diff --git a/modules/gles31/functional/es31fSSBOLayoutCase.cpp b/modules/gles31/functional/es31fSSBOLayoutCase.cpp
index 226c9eb..3665383 100644
--- a/modules/gles31/functional/es31fSSBOLayoutCase.cpp
+++ b/modules/gles31/functional/es31fSSBOLayoutCase.cpp
@@ -1282,12 +1282,17 @@
 	}
 }
 
-string generateComputeShader (glu::GLSLVersion glslVersion, const ShaderInterface& interface, const BufferLayout& layout, const vector<BlockDataPtr>& comparePtrs, const vector<BlockDataPtr>& writePtrs)
+string generateComputeShader (const glw::Functions& gl, glu::GLSLVersion glslVersion, const ShaderInterface& interface, const BufferLayout& layout, const vector<BlockDataPtr>& comparePtrs, const vector<BlockDataPtr>& writePtrs)
 {
 	std::ostringstream src;
+	glw::GLint maxShaderStorageBufferBindings;
+	glw::GLint maxComputeShaderStorageBlocks;
 
 	DE_ASSERT(glslVersion == glu::GLSL_VERSION_310_ES || glslVersion == glu::GLSL_VERSION_430);
 
+	gl.getIntegerv(GL_MAX_SHADER_STORAGE_BUFFER_BINDINGS, &maxShaderStorageBufferBindings);
+	gl.getIntegerv(GL_MAX_COMPUTE_SHADER_STORAGE_BLOCKS, &maxComputeShaderStorageBlocks);
+
 	src << glu::getGLSLVersionDeclaration(glslVersion) << "\n";
 	src << "layout(local_size_x = 1) in;\n";
 	src << "\n";
@@ -1307,6 +1312,15 @@
 
 			bindingPoint += block.isArray() ? block.getArraySize() : 1;
 		}
+
+		if (bindingPoint > maxShaderStorageBufferBindings)
+		{
+			throw tcu::NotSupportedError("Test requires support for more SSBO bindings than implementation exposes");
+		}
+		if (bindingPoint > maxComputeShaderStorageBlocks)
+		{
+			throw tcu::NotSupportedError("Test requires support for more compute shader storage blocks than implementation exposes");
+		}
 	}
 
 	// Atomic counter for counting passed invocations.
@@ -2109,7 +2123,7 @@
 	generateValues			(refLayout, writeData.pointers, deStringHash(getName()) ^ 0x25ca4e7);
 	copyNonWrittenData		(m_interface, refLayout, initialData.pointers, writeData.pointers);
 
-	const glu::ShaderProgram program(m_renderCtx, glu::ProgramSources() << glu::ComputeSource(generateComputeShader(m_glslVersion, m_interface, refLayout, initialData.pointers, writeData.pointers)));
+	const glu::ShaderProgram program(m_renderCtx, glu::ProgramSources() << glu::ComputeSource(generateComputeShader(gl, m_glslVersion, m_interface, refLayout, initialData.pointers, writeData.pointers)));
 	log << program;
 
 	if (!program.isOk())