Merge two implementations of ValidateMemorySemantics (#2175)

Fixes #2170
This commit is contained in:
Ryan Harrison 2018-12-06 14:38:15 -05:00 committed by GitHub
parent aa270e568a
commit cf37ab7213
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 294 additions and 344 deletions

View File

@ -62,6 +62,7 @@ SPVTOOLS_SRC_FILES := \
source/val/validate_interfaces.cpp \
source/val/validate_instruction.cpp \
source/val/validate_memory.cpp \
source/val/validate_memory_semantics.cpp \
source/val/validate_mode_setting.cpp \
source/val/validate_layout.cpp \
source/val/validate_literals.cpp \

View File

@ -421,6 +421,7 @@ static_library("spvtools_val") {
"source/val/validate_literals.cpp",
"source/val/validate_logicals.cpp",
"source/val/validate_memory.cpp",
"source/val/validate_memory_semantics.cpp",
"source/val/validate_mode_setting.cpp",
"source/val/validate_non_uniform.cpp",
"source/val/validate_primitives.cpp",

View File

@ -316,6 +316,7 @@ set(SPIRV_SOURCES
${CMAKE_CURRENT_SOURCE_DIR}/val/validate_literals.cpp
${CMAKE_CURRENT_SOURCE_DIR}/val/validate_logicals.cpp
${CMAKE_CURRENT_SOURCE_DIR}/val/validate_memory.cpp
${CMAKE_CURRENT_SOURCE_DIR}/val/validate_memory_semantics.cpp
${CMAKE_CURRENT_SOURCE_DIR}/val/validate_mode_setting.cpp
${CMAKE_CURRENT_SOURCE_DIR}/val/validate_non_uniform.cpp
${CMAKE_CURRENT_SOURCE_DIR}/val/validate_primitives.cpp

View File

@ -21,175 +21,13 @@
#include "source/spirv_target_env.h"
#include "source/util/bitutils.h"
#include "source/val/instruction.h"
#include "source/val/validate_memory_semantics.h"
#include "source/val/validate_scopes.h"
#include "source/val/validation_state.h"
namespace spvtools {
namespace val {
// Validates a Memory Semantics operand.
spv_result_t ValidateMemorySemantics(ValidationState_t& _,
const Instruction* inst,
uint32_t operand_index) {
const SpvOp opcode = inst->opcode();
bool is_int32 = false, is_const_int32 = false;
uint32_t flags = 0;
auto memory_semantics_id = inst->GetOperandAs<const uint32_t>(operand_index);
std::tie(is_int32, is_const_int32, flags) =
_.EvalInt32IfConst(memory_semantics_id);
if (!is_int32) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": expected Memory Semantics to be 32-bit int";
}
if (!is_const_int32) {
if (_.HasCapability(SpvCapabilityShader)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "Memory Semantics ids must be OpConstant when Shader "
"capability is present";
}
return SPV_SUCCESS;
}
if (_.memory_model() == SpvMemoryModelVulkanKHR &&
flags & SpvMemorySemanticsSequentiallyConsistentMask) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "SequentiallyConsistent memory "
"semantics cannot be used with "
"the VulkanKHR memory model.";
}
if (spvtools::utils::CountSetBits(
flags &
(SpvMemorySemanticsAcquireMask | SpvMemorySemanticsReleaseMask |
SpvMemorySemanticsAcquireReleaseMask |
SpvMemorySemanticsSequentiallyConsistentMask)) > 1) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": no more than one of the following Memory Semantics bits can "
"be set at the same time: Acquire, Release, AcquireRelease or "
"SequentiallyConsistent";
}
if (flags & SpvMemorySemanticsUniformMemoryMask &&
!_.HasCapability(SpvCapabilityShader)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics UniformMemory requires capability Shader";
}
if (flags & SpvMemorySemanticsAtomicCounterMemoryMask &&
!_.HasCapability(SpvCapabilityAtomicStorage)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics UniformMemory requires capability "
"AtomicStorage";
}
if (flags & SpvMemorySemanticsOutputMemoryKHRMask &&
!_.HasCapability(SpvCapabilityVulkanMemoryModelKHR)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics OutputMemoryKHR requires capability "
<< "VulkanMemoryModelKHR";
}
if (flags & SpvMemorySemanticsMakeAvailableKHRMask &&
!_.HasCapability(SpvCapabilityVulkanMemoryModelKHR)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics MakeAvailableKHR requires capability "
<< "VulkanMemoryModelKHR";
}
if (flags & SpvMemorySemanticsMakeVisibleKHRMask &&
!_.HasCapability(SpvCapabilityVulkanMemoryModelKHR)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics MakeVisibleKHR requires capability "
<< "VulkanMemoryModelKHR";
}
if (opcode == SpvOpAtomicFlagClear &&
(flags & SpvMemorySemanticsAcquireMask ||
flags & SpvMemorySemanticsAcquireReleaseMask)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "Memory Semantics Acquire and AcquireRelease cannot be used with "
<< spvOpcodeString(opcode);
}
if (opcode == SpvOpAtomicCompareExchange && operand_index == 5 &&
(flags & SpvMemorySemanticsReleaseMask ||
flags & SpvMemorySemanticsAcquireReleaseMask)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics Release and AcquireRelease cannot be used "
"for operand Unequal";
}
if (spvIsVulkanEnv(_.context()->target_env)) {
if (opcode == SpvOpAtomicLoad &&
(flags & SpvMemorySemanticsReleaseMask ||
flags & SpvMemorySemanticsAcquireReleaseMask ||
flags & SpvMemorySemanticsSequentiallyConsistentMask)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "Vulkan spec disallows OpAtomicLoad with Memory Semantics "
"Release, AcquireRelease and SequentiallyConsistent";
}
if (opcode == SpvOpAtomicStore &&
(flags & SpvMemorySemanticsAcquireMask ||
flags & SpvMemorySemanticsAcquireReleaseMask ||
flags & SpvMemorySemanticsSequentiallyConsistentMask)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "Vulkan spec disallows OpAtomicStore with Memory Semantics "
"Acquire, AcquireRelease and SequentiallyConsistent";
}
}
if (flags & SpvMemorySemanticsMakeAvailableKHRMask &&
!(flags & (SpvMemorySemanticsReleaseMask |
SpvMemorySemanticsAcquireReleaseMask))) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": MakeAvailableKHR Memory Semantics also requires either "
"Release or AcquireRelease Memory Semantics";
}
if (flags & SpvMemorySemanticsMakeVisibleKHRMask &&
!(flags & (SpvMemorySemanticsAcquireMask |
SpvMemorySemanticsAcquireReleaseMask))) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": MakeVisibleKHR Memory Semantics also requires either Acquire "
"or AcquireRelease Memory Semantics";
}
if (flags & (SpvMemorySemanticsMakeAvailableKHRMask |
SpvMemorySemanticsMakeVisibleKHRMask)) {
const bool includes_storage_class =
flags & (SpvMemorySemanticsUniformMemoryMask |
SpvMemorySemanticsSubgroupMemoryMask |
SpvMemorySemanticsWorkgroupMemoryMask |
SpvMemorySemanticsCrossWorkgroupMemoryMask |
SpvMemorySemanticsAtomicCounterMemoryMask |
SpvMemorySemanticsImageMemoryMask |
SpvMemorySemanticsOutputMemoryKHRMask);
if (!includes_storage_class) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": expected Memory Semantics to include a storage class";
}
}
// TODO(atgoo@github.com) Add checks for OpenCL and OpenGL environments.
return SPV_SUCCESS;
}
// Validates correctness of atomic instructions.
spv_result_t AtomicsPass(ValidationState_t& _, const Instruction* inst) {
const SpvOp opcode = inst->opcode();

View File

@ -24,155 +24,12 @@
#include "source/spirv_target_env.h"
#include "source/util/bitutils.h"
#include "source/val/instruction.h"
#include "source/val/validate_memory_semantics.h"
#include "source/val/validate_scopes.h"
#include "source/val/validation_state.h"
namespace spvtools {
namespace val {
namespace {
// Validates Memory Semantics operand.
spv_result_t ValidateMemorySemantics(ValidationState_t& _,
const Instruction* inst, uint32_t id) {
const SpvOp opcode = inst->opcode();
bool is_int32 = false, is_const_int32 = false;
uint32_t value = 0;
std::tie(is_int32, is_const_int32, value) = _.EvalInt32IfConst(id);
if (!is_int32) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": expected Memory Semantics to be a 32-bit int";
}
if (!is_const_int32) {
if (_.HasCapability(SpvCapabilityShader)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "Memory Semantics ids must be OpConstant when Shader "
"capability is present";
}
return SPV_SUCCESS;
}
if (_.memory_model() == SpvMemoryModelVulkanKHR &&
value & SpvMemorySemanticsSequentiallyConsistentMask) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "SequentiallyConsistent memory "
"semantics cannot be used with "
"the VulkanKHR memory model.";
}
if (value & SpvMemorySemanticsOutputMemoryKHRMask &&
!_.HasCapability(SpvCapabilityVulkanMemoryModelKHR)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics OutputMemoryKHR requires capability "
<< "VulkanMemoryModelKHR";
}
if (value & SpvMemorySemanticsMakeAvailableKHRMask &&
!_.HasCapability(SpvCapabilityVulkanMemoryModelKHR)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics MakeAvailableKHR requires capability "
<< "VulkanMemoryModelKHR";
}
if (value & SpvMemorySemanticsMakeVisibleKHRMask &&
!_.HasCapability(SpvCapabilityVulkanMemoryModelKHR)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics MakeVisibleKHR requires capability "
<< "VulkanMemoryModelKHR";
}
const size_t num_memory_order_set_bits = spvtools::utils::CountSetBits(
value & (SpvMemorySemanticsAcquireMask | SpvMemorySemanticsReleaseMask |
SpvMemorySemanticsAcquireReleaseMask |
SpvMemorySemanticsSequentiallyConsistentMask));
if (num_memory_order_set_bits > 1) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics can have at most one of the following bits "
"set: Acquire, Release, AcquireRelease or SequentiallyConsistent";
}
if (value & SpvMemorySemanticsMakeAvailableKHRMask &&
!(value & (SpvMemorySemanticsReleaseMask |
SpvMemorySemanticsAcquireReleaseMask))) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": MakeAvailableKHR Memory Semantics also requires either "
"Release or AcquireRelease Memory Semantics";
}
if (value & SpvMemorySemanticsMakeVisibleKHRMask &&
!(value & (SpvMemorySemanticsAcquireMask |
SpvMemorySemanticsAcquireReleaseMask))) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": MakeVisibleKHR Memory Semantics also requires either Acquire "
"or AcquireRelease Memory Semantics";
}
if (spvIsVulkanEnv(_.context()->target_env)) {
const bool includes_storage_class =
value & (SpvMemorySemanticsUniformMemoryMask |
SpvMemorySemanticsWorkgroupMemoryMask |
SpvMemorySemanticsImageMemoryMask |
SpvMemorySemanticsOutputMemoryKHRMask);
if (opcode == SpvOpMemoryBarrier && !num_memory_order_set_bits) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Vulkan specification requires Memory Semantics to have one "
"of the following bits set: Acquire, Release, AcquireRelease "
"or SequentiallyConsistent";
}
if (opcode == SpvOpMemoryBarrier && !includes_storage_class) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": expected Memory Semantics to include a Vulkan-supported "
"storage class";
}
#if 0
// TODO(atgoo@github.com): this check fails Vulkan CTS, reenable once fixed.
if (opcode == SpvOpControlBarrier && value && !includes_storage_class) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": expected Memory Semantics to include a Vulkan-supported "
"storage class if Memory Semantics is not None";
}
#endif
}
if (value & (SpvMemorySemanticsMakeAvailableKHRMask |
SpvMemorySemanticsMakeVisibleKHRMask)) {
const bool includes_storage_class =
value & (SpvMemorySemanticsUniformMemoryMask |
SpvMemorySemanticsSubgroupMemoryMask |
SpvMemorySemanticsWorkgroupMemoryMask |
SpvMemorySemanticsCrossWorkgroupMemoryMask |
SpvMemorySemanticsAtomicCounterMemoryMask |
SpvMemorySemanticsImageMemoryMask |
SpvMemorySemanticsOutputMemoryKHRMask);
if (!includes_storage_class) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": expected Memory Semantics to include a storage class";
}
}
// TODO(atgoo@github.com) Add checks for OpenCL and OpenGL environments.
return SPV_SUCCESS;
}
} // namespace
// Validates correctness of barrier instructions.
spv_result_t BarriersPass(ValidationState_t& _, const Instruction* inst) {
@ -205,7 +62,6 @@ spv_result_t BarriersPass(ValidationState_t& _, const Instruction* inst) {
const uint32_t execution_scope = inst->word(1);
const uint32_t memory_scope = inst->word(2);
const uint32_t memory_semantics = inst->word(3);
if (auto error = ValidateExecutionScope(_, inst, execution_scope)) {
return error;
@ -215,7 +71,7 @@ spv_result_t BarriersPass(ValidationState_t& _, const Instruction* inst) {
return error;
}
if (auto error = ValidateMemorySemantics(_, inst, memory_semantics)) {
if (auto error = ValidateMemorySemantics(_, inst, 2)) {
return error;
}
break;
@ -223,13 +79,12 @@ spv_result_t BarriersPass(ValidationState_t& _, const Instruction* inst) {
case SpvOpMemoryBarrier: {
const uint32_t memory_scope = inst->word(1);
const uint32_t memory_semantics = inst->word(2);
if (auto error = ValidateMemoryScope(_, inst, memory_scope)) {
return error;
}
if (auto error = ValidateMemorySemantics(_, inst, memory_semantics)) {
if (auto error = ValidateMemorySemantics(_, inst, 1)) {
return error;
}
break;
@ -261,13 +116,12 @@ spv_result_t BarriersPass(ValidationState_t& _, const Instruction* inst) {
}
const uint32_t memory_scope = inst->word(2);
const uint32_t memory_semantics = inst->word(3);
if (auto error = ValidateMemoryScope(_, inst, memory_scope)) {
return error;
}
if (auto error = ValidateMemorySemantics(_, inst, memory_semantics)) {
if (auto error = ValidateMemorySemantics(_, inst, 2)) {
return error;
}
break;

View File

@ -0,0 +1,227 @@
// Copyright (c) 2018 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "source/val/validate_memory_semantics.h"
#include "source/diagnostic.h"
#include "source/spirv_target_env.h"
#include "source/util/bitutils.h"
#include "source/val/instruction.h"
#include "source/val/validation_state.h"
namespace spvtools {
namespace val {
spv_result_t ValidateMemorySemantics(ValidationState_t& _,
const Instruction* inst,
uint32_t operand_index) {
const SpvOp opcode = inst->opcode();
const auto id = inst->GetOperandAs<const uint32_t>(operand_index);
bool is_int32 = false, is_const_int32 = false;
uint32_t value = 0;
std::tie(is_int32, is_const_int32, value) = _.EvalInt32IfConst(id);
if (!is_int32) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": expected Memory Semantics to be a 32-bit int";
}
if (!is_const_int32) {
if (_.HasCapability(SpvCapabilityShader)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "Memory Semantics ids must be OpConstant when Shader "
"capability is present";
}
return SPV_SUCCESS;
}
const size_t num_memory_order_set_bits = spvtools::utils::CountSetBits(
value & (SpvMemorySemanticsAcquireMask | SpvMemorySemanticsReleaseMask |
SpvMemorySemanticsAcquireReleaseMask |
SpvMemorySemanticsSequentiallyConsistentMask));
if (num_memory_order_set_bits > 1) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics can have at most one of the following "
"bits "
"set: Acquire, Release, AcquireRelease or "
"SequentiallyConsistent";
}
if (_.memory_model() == SpvMemoryModelVulkanKHR &&
value & SpvMemorySemanticsSequentiallyConsistentMask) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "SequentiallyConsistent memory "
"semantics cannot be used with "
"the VulkanKHR memory model.";
}
if (value & SpvMemorySemanticsMakeAvailableKHRMask &&
!_.HasCapability(SpvCapabilityVulkanMemoryModelKHR)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics MakeAvailableKHR requires capability "
<< "VulkanMemoryModelKHR";
}
if (value & SpvMemorySemanticsMakeVisibleKHRMask &&
!_.HasCapability(SpvCapabilityVulkanMemoryModelKHR)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics MakeVisibleKHR requires capability "
<< "VulkanMemoryModelKHR";
}
if (value & SpvMemorySemanticsOutputMemoryKHRMask &&
!_.HasCapability(SpvCapabilityVulkanMemoryModelKHR)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics OutputMemoryKHR requires capability "
<< "VulkanMemoryModelKHR";
}
if (value & SpvMemorySemanticsUniformMemoryMask &&
!_.HasCapability(SpvCapabilityShader)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics UniformMemory requires capability Shader";
}
if (value & SpvMemorySemanticsAtomicCounterMemoryMask &&
!_.HasCapability(SpvCapabilityAtomicStorage)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics AtomicCounterMemory requires capability "
"AtomicStorage";
}
if (value & (SpvMemorySemanticsMakeAvailableKHRMask |
SpvMemorySemanticsMakeVisibleKHRMask)) {
const bool includes_storage_class =
value & (SpvMemorySemanticsUniformMemoryMask |
SpvMemorySemanticsSubgroupMemoryMask |
SpvMemorySemanticsWorkgroupMemoryMask |
SpvMemorySemanticsCrossWorkgroupMemoryMask |
SpvMemorySemanticsAtomicCounterMemoryMask |
SpvMemorySemanticsImageMemoryMask |
SpvMemorySemanticsOutputMemoryKHRMask);
if (!includes_storage_class) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": expected Memory Semantics to include a storage class";
}
}
if (value & SpvMemorySemanticsMakeVisibleKHRMask &&
!(value & (SpvMemorySemanticsAcquireMask |
SpvMemorySemanticsAcquireReleaseMask))) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": MakeVisibleKHR Memory Semantics also requires either Acquire "
"or AcquireRelease Memory Semantics";
}
if (value & SpvMemorySemanticsMakeAvailableKHRMask &&
!(value & (SpvMemorySemanticsReleaseMask |
SpvMemorySemanticsAcquireReleaseMask))) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": MakeAvailableKHR Memory Semantics also requires either "
"Release or AcquireRelease Memory Semantics";
}
if (spvIsVulkanEnv(_.context()->target_env)) {
const bool includes_storage_class =
value & (SpvMemorySemanticsUniformMemoryMask |
SpvMemorySemanticsWorkgroupMemoryMask |
SpvMemorySemanticsImageMemoryMask |
SpvMemorySemanticsOutputMemoryKHRMask);
if (opcode == SpvOpMemoryBarrier && !num_memory_order_set_bits) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Vulkan specification requires Memory Semantics to have "
"one "
"of the following bits set: Acquire, Release, "
"AcquireRelease "
"or SequentiallyConsistent";
}
if (opcode == SpvOpMemoryBarrier && !includes_storage_class) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": expected Memory Semantics to include a Vulkan-supported "
"storage class";
}
#if 0
// TODO(atgoo@github.com): this check fails Vulkan CTS, reenable once fixed.
if (opcode == SpvOpControlBarrier && value && !includes_storage_class) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": expected Memory Semantics to include a Vulkan-supported "
"storage class if Memory Semantics is not None";
}
#endif
}
if (opcode == SpvOpAtomicFlagClear &&
(value & SpvMemorySemanticsAcquireMask ||
value & SpvMemorySemanticsAcquireReleaseMask)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "Memory Semantics Acquire and AcquireRelease cannot be used "
"with "
<< spvOpcodeString(opcode);
}
if (opcode == SpvOpAtomicCompareExchange && operand_index == 5 &&
(value & SpvMemorySemanticsReleaseMask ||
value & SpvMemorySemanticsAcquireReleaseMask)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< spvOpcodeString(opcode)
<< ": Memory Semantics Release and AcquireRelease cannot be "
"used "
"for operand Unequal";
}
if (spvIsVulkanEnv(_.context()->target_env)) {
if (opcode == SpvOpAtomicLoad &&
(value & SpvMemorySemanticsReleaseMask ||
value & SpvMemorySemanticsAcquireReleaseMask ||
value & SpvMemorySemanticsSequentiallyConsistentMask)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "Vulkan spec disallows OpAtomicLoad with Memory Semantics "
"Release, AcquireRelease and SequentiallyConsistent";
}
if (opcode == SpvOpAtomicStore &&
(value & SpvMemorySemanticsAcquireMask ||
value & SpvMemorySemanticsAcquireReleaseMask ||
value & SpvMemorySemanticsSequentiallyConsistentMask)) {
return _.diag(SPV_ERROR_INVALID_DATA, inst)
<< "Vulkan spec disallows OpAtomicStore with Memory Semantics "
"Acquire, AcquireRelease and SequentiallyConsistent";
}
}
// TODO(atgoo@github.com) Add checks for OpenCL and OpenGL environments.
return SPV_SUCCESS;
}
} // namespace val
} // namespace spvtools

View File

@ -0,0 +1,28 @@
// Copyright (c) 2018 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Validates correctness of memory semantics for SPIR-V instructions.
#include "source/opcode.h"
#include "source/val/validate.h"
namespace spvtools {
namespace val {
spv_result_t ValidateMemorySemantics(ValidationState_t& _,
const Instruction* inst,
uint32_t operand_index);
} // namespace val
} // namespace spvtools

View File

@ -419,7 +419,7 @@ TEST_F(ValidateAtomics, AtomicLoadWrongMemorySemanticsType) {
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(
getDiagnosticString(),
HasSubstr("AtomicLoad: expected Memory Semantics to be 32-bit int"));
HasSubstr("AtomicLoad: expected Memory Semantics to be a 32-bit int"));
}
TEST_F(ValidateAtomics, AtomicStoreKernelSuccess) {
@ -552,7 +552,7 @@ OpAtomicStore %f32_var %device %f32_1 %f32_1
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(
getDiagnosticString(),
HasSubstr("AtomicStore: expected Memory Semantics to be 32-bit int"));
HasSubstr("AtomicStore: expected Memory Semantics to be a 32-bit int"));
}
TEST_F(ValidateAtomics, AtomicStoreWrongValueType) {
@ -668,7 +668,8 @@ OpAtomicStore %f32_var %device %relaxed %f32_1
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(
getDiagnosticString(),
HasSubstr("AtomicExchange: expected Memory Semantics to be 32-bit int"));
HasSubstr(
"AtomicExchange: expected Memory Semantics to be a 32-bit int"));
}
TEST_F(ValidateAtomics, AtomicExchangeWrongValueType) {
@ -781,10 +782,9 @@ OpAtomicStore %f32_var %device %relaxed %f32_1
CompileSuccessfully(GenerateKernelCode(body));
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(
getDiagnosticString(),
HasSubstr(
"AtomicCompareExchange: expected Memory Semantics to be 32-bit int"));
EXPECT_THAT(getDiagnosticString(),
HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
"be a 32-bit int"));
}
TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType2) {
@ -795,10 +795,9 @@ OpAtomicStore %f32_var %device %relaxed %f32_1
CompileSuccessfully(GenerateKernelCode(body));
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(
getDiagnosticString(),
HasSubstr(
"AtomicCompareExchange: expected Memory Semantics to be 32-bit int"));
EXPECT_THAT(getDiagnosticString(),
HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
"be a 32-bit int"));
}
TEST_F(ValidateAtomics, AtomicCompareExchangeUnequalRelease) {
@ -966,7 +965,7 @@ TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongMemorySemanticsType) {
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(getDiagnosticString(),
HasSubstr("AtomicFlagTestAndSet: "
"expected Memory Semantics to be 32-bit int"));
"expected Memory Semantics to be a 32-bit int"));
}
TEST_F(ValidateAtomics, AtomicFlagClearAcquire) {
@ -1040,7 +1039,8 @@ OpAtomicFlagClear %u32_var %device %u64_1
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(
getDiagnosticString(),
HasSubstr("AtomicFlagClear: expected Memory Semantics to be 32-bit int"));
HasSubstr(
"AtomicFlagClear: expected Memory Semantics to be a 32-bit int"));
}
TEST_F(ValidateAtomics, AtomicIIncrementAcquireAndRelease) {
@ -1051,11 +1051,11 @@ OpAtomicStore %u32_var %device %relaxed %u32_1
CompileSuccessfully(GenerateKernelCode(body));
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(
getDiagnosticString(),
HasSubstr("AtomicIIncrement: no more than one of the following Memory "
"Semantics bits can be set at the same time: Acquire, Release, "
"AcquireRelease or SequentiallyConsistent"));
EXPECT_THAT(getDiagnosticString(),
HasSubstr("AtomicIIncrement: Memory Semantics can have at most "
"one of the following bits set: Acquire, Release, "
"AcquireRelease or SequentiallyConsistent\n %40 = "
"OpAtomicIIncrement %uint %30 %uint_1_0 %uint_6\n"));
}
TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsShader) {
@ -1089,9 +1089,11 @@ OpAtomicStore %u32_var %device %relaxed %u32_1
CompileSuccessfully(GenerateKernelCode(body));
ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
EXPECT_THAT(getDiagnosticString(),
HasSubstr("AtomicIIncrement: Memory Semantics UniformMemory "
"requires capability AtomicStorage"));
EXPECT_THAT(
getDiagnosticString(),
HasSubstr("AtomicIIncrement: Memory Semantics AtomicCounterMemory "
"requires capability AtomicStorage\n %40 = OpAtomicIIncrement "
"%uint %30 %uint_1_0 %uint_1288\n"));
}
TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsWithCapability) {

View File

@ -175,9 +175,7 @@ OpMemoryModel Physical32 OpenCL
%acquire_release = OpConstant %u32 8
%acquire_and_release = OpConstant %u32 6
%sequentially_consistent = OpConstant %u32 16
%acquire_release_uniform_workgroup = OpConstant %u32 328
%acquire_and_release_uniform = OpConstant %u32 70
%uniform = OpConstant %u32 64
%acquire_release_workgroup = OpConstant %u32 264
%named_barrier = OpTypeNamedBarrier
@ -215,7 +213,7 @@ OpControlBarrier %workgroup %workgroup %acquire
OpControlBarrier %workgroup %device %release
OpControlBarrier %cross_device %cross_device %acquire_release
OpControlBarrier %cross_device %cross_device %sequentially_consistent
OpControlBarrier %cross_device %cross_device %acquire_release_uniform_workgroup
OpControlBarrier %cross_device %cross_device %acquire_release_workgroup
)";
CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_1);
@ -615,8 +613,8 @@ OpMemoryBarrier %device %uniform
TEST_F(ValidateBarriers, OpMemoryBarrierKernelSuccess) {
const std::string body = R"(
OpMemoryBarrier %cross_device %acquire_release_uniform_workgroup
OpMemoryBarrier %device %uniform
OpMemoryBarrier %cross_device %acquire_release_workgroup
OpMemoryBarrier %device %none
)";
CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_1);
@ -804,7 +802,7 @@ TEST_F(ValidateBarriers, OpNamedBarrierInitializeU64SubgroupCount) {
TEST_F(ValidateBarriers, OpMemoryNamedBarrierSuccess) {
const std::string body = R"(
%barrier = OpNamedBarrierInitialize %named_barrier %u32_4
OpMemoryNamedBarrier %barrier %workgroup %acquire_release_uniform_workgroup
OpMemoryNamedBarrier %barrier %workgroup %acquire_release_workgroup
)";
CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_1);
@ -813,7 +811,7 @@ OpMemoryNamedBarrier %barrier %workgroup %acquire_release_uniform_workgroup
TEST_F(ValidateBarriers, OpMemoryNamedBarrierNotNamedBarrier) {
const std::string body = R"(
OpMemoryNamedBarrier %u32_1 %workgroup %acquire_release_uniform_workgroup
OpMemoryNamedBarrier %u32_1 %workgroup %acquire_release_workgroup
)";
CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_1);
@ -827,7 +825,7 @@ OpMemoryNamedBarrier %u32_1 %workgroup %acquire_release_uniform_workgroup
TEST_F(ValidateBarriers, OpMemoryNamedBarrierFloatMemoryScope) {
const std::string body = R"(
%barrier = OpNamedBarrierInitialize %named_barrier %u32_4
OpMemoryNamedBarrier %barrier %f32_1 %acquire_release_uniform_workgroup
OpMemoryNamedBarrier %barrier %f32_1 %acquire_release_workgroup
)";
CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_1);
@ -857,7 +855,7 @@ OpMemoryNamedBarrier %barrier %workgroup %f32_0
TEST_F(ValidateBarriers, OpMemoryNamedBarrierAcquireAndRelease) {
const std::string body = R"(
%barrier = OpNamedBarrierInitialize %named_barrier %u32_4
OpMemoryNamedBarrier %barrier %workgroup %acquire_and_release_uniform
OpMemoryNamedBarrier %barrier %workgroup %acquire_and_release
)";
CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_1);