6863155: Server compiler generates incorrect code (x86, long, bitshift, bitmask)

Code compiled with server compiler generates an incorrect result.

Reviewed-by: cfang, never, kvn
This commit is contained in:
Christian Thalinger 2009-07-28 09:02:30 +02:00
parent 714db48492
commit 6dd47535cd
2 changed files with 51 additions and 10 deletions

View file

@ -608,16 +608,14 @@ Node *AndLNode::Ideal(PhaseGVN *phase, bool can_reshape) {
}
// Are we masking a long that was converted from an int with a mask
// that fits in 32-bits? Commute them and use an AndINode.
if (op == Op_ConvI2L && (mask & CONST64(0xFFFFFFFF00000000)) == 0) {
// If we are doing an UI2L conversion (i.e. the mask is
// 0x00000000FFFFFFFF) we cannot convert the AndL to an AndI
// because the AndI would be optimized away later in Identity.
if (mask != CONST64(0x00000000FFFFFFFF)) {
Node* andi = new (phase->C, 3) AndINode(in1->in(1), phase->intcon(mask));
andi = phase->transform(andi);
return new (phase->C, 2) ConvI2LNode(andi);
}
// that fits in 32-bits? Commute them and use an AndINode. Don't
// convert masks which would cause a sign extension of the integer
// value. This check includes UI2L masks (0x00000000FFFFFFFF) which
// would be optimized away later in Identity.
if (op == Op_ConvI2L && (mask & CONST64(0xFFFFFFFF80000000)) == 0) {
Node* andi = new (phase->C, 3) AndINode(in1->in(1), phase->intcon(mask));
andi = phase->transform(andi);
return new (phase->C, 2) ConvI2LNode(andi);
}
// Masking off sign bits? Dont make them!