8217417: Decorator name typo: C2_TIGHLY_COUPLED_ALLOC

Fixed typo in decorator name, variables, and comments.

Reviewed-by: tschatzl
This commit is contained in:
Kim Barrett 2019-03-11 02:05:07 -04:00
parent ac03c04256
commit c7ae2d10b7
6 changed files with 10 additions and 10 deletions

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -51,7 +51,7 @@ const DecoratorSet C2_WRITE_ACCESS = DECORATOR_LAST << 7;
// This denotes that the access reads state.
const DecoratorSet C2_READ_ACCESS = DECORATOR_LAST << 8;
// A nearby allocation?
const DecoratorSet C2_TIGHLY_COUPLED_ALLOC = DECORATOR_LAST << 9;
const DecoratorSet C2_TIGHTLY_COUPLED_ALLOC = DECORATOR_LAST << 9;
// Loads and stores from an arraycopy being optimized
const DecoratorSet C2_ARRAY_COPY = DECORATOR_LAST << 10;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -40,9 +40,9 @@ Node* ModRefBarrierSetC2::store_at_resolved(C2Access& access, C2AccessValue& val
bool anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;
bool in_heap = (decorators & IN_HEAP) != 0;
bool use_precise = is_array || anonymous;
bool tighly_coupled_alloc = (decorators & C2_TIGHLY_COUPLED_ALLOC) != 0;
bool tightly_coupled_alloc = (decorators & C2_TIGHTLY_COUPLED_ALLOC) != 0;
if (!access.is_oop() || tighly_coupled_alloc || (!in_heap && !anonymous)) {
if (!access.is_oop() || tightly_coupled_alloc || (!in_heap && !anonymous)) {
return BarrierSetC2::store_at_resolved(access, val);
}

View File

@ -657,7 +657,7 @@ Node* ShenandoahBarrierSetC2::store_at_resolved(C2Access& access, C2AccessValue&
static_cast<const TypeOopPtr*>(val.type()), NULL /* pre_val */, access.type());
} else {
assert(access.is_opt_access(), "only for optimization passes");
assert(((decorators & C2_TIGHLY_COUPLED_ALLOC) != 0 || !ShenandoahSATBBarrier) && (decorators & C2_ARRAY_COPY) != 0, "unexpected caller of this code");
assert(((decorators & C2_TIGHTLY_COUPLED_ALLOC) != 0 || !ShenandoahSATBBarrier) && (decorators & C2_ARRAY_COPY) != 0, "unexpected caller of this code");
C2OptAccess& opt_access = static_cast<C2OptAccess&>(access);
PhaseGVN& gvn = opt_access.gvn();
MergeMemNode* mm = opt_access.mem();

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -160,7 +160,7 @@ Node* ArrayCopyNode::load(BarrierSetC2* bs, PhaseGVN *phase, Node*& ctl, MergeMe
void ArrayCopyNode::store(BarrierSetC2* bs, PhaseGVN *phase, Node*& ctl, MergeMemNode* mem, Node* adr, const TypePtr* adr_type, Node* val, const Type *type, BasicType bt) {
DecoratorSet decorators = C2_WRITE_ACCESS | IN_HEAP | C2_ARRAY_COPY;
if (is_alloc_tightly_coupled()) {
decorators |= C2_TIGHLY_COUPLED_ALLOC;
decorators |= C2_TIGHTLY_COUPLED_ALLOC;
}
C2AccessValuePtr addr(adr, adr_type);
C2AccessValue value(val, type);

View File

@ -47,7 +47,7 @@ private:
static const char* _kind_names[CopyOfRange+1];
#endif
// Is the alloc obtained with
// AllocateArrayNode::Ideal_array_allocation() tighly coupled
// AllocateArrayNode::Ideal_array_allocation() tightly coupled
// (arraycopy follows immediately the allocation)?
// We cache the result of LibraryCallKit::tightly_coupled_allocation
// here because it's much easier to find whether there's a tightly

View File

@ -4461,7 +4461,7 @@ bool LibraryCallKit::inline_native_clone(bool is_virtual) {
return true;
}
// If we have a tighly coupled allocation, the arraycopy may take care
// If we have a tightly coupled allocation, the arraycopy may take care
// of the array initialization. If one of the guards we insert between
// the allocation and the arraycopy causes a deoptimization, an
// unitialized array will escape the compiled method. To prevent that