diff options
author | Andreas Gampe <agampe@google.com> | 2014-07-22 06:51:47 +0000 |
---|---|---|
committer | Gerrit Code Review <noreply-gerritcodereview@google.com> | 2014-07-21 15:45:44 +0000 |
commit | 9d9fec65366ea4996b17141c97ff94416239b63e (patch) | |
tree | aab1773c63806c77f165822178d424486dfc8ef9 | |
parent | 7ff831f63111de69c71bc8c5cb1042d698b72070 (diff) | |
parent | 7ea6f79bbddd69d5db86a8656a31aaaf64ae2582 (diff) | |
download | art-9d9fec65366ea4996b17141c97ff94416239b63e.zip art-9d9fec65366ea4996b17141c97ff94416239b63e.tar.gz art-9d9fec65366ea4996b17141c97ff94416239b63e.tar.bz2 |
Merge "ART: Throw StackOverflowError in native code"
-rw-r--r-- | compiler/dex/quick/arm/call_arm.cc | 4 | ||||
-rw-r--r-- | compiler/dex/quick/arm64/call_arm64.cc | 4 | ||||
-rw-r--r-- | compiler/dex/quick/mir_to_lir.h | 30 | ||||
-rw-r--r-- | runtime/arch/arm/fault_handler_arm.cc | 4 | ||||
-rw-r--r-- | runtime/entrypoints/entrypoint_utils.cc | 97 | ||||
-rw-r--r-- | runtime/instruction_set.cc | 38 | ||||
-rw-r--r-- | runtime/instruction_set.h | 28 | ||||
-rw-r--r-- | runtime/thread.cc | 10 | ||||
-rw-r--r-- | runtime/thread.h | 5 | ||||
-rw-r--r-- | runtime/well_known_classes.cc | 4 | ||||
-rw-r--r-- | runtime/well_known_classes.h | 2 | ||||
-rw-r--r-- | test/018-stack-overflow/expected.txt | 9 | ||||
-rwxr-xr-x | test/018-stack-overflow/run | 23 | ||||
-rw-r--r-- | test/018-stack-overflow/src/Main.java | 30 |
14 files changed, 211 insertions, 77 deletions
diff --git a/compiler/dex/quick/arm/call_arm.cc b/compiler/dex/quick/arm/call_arm.cc index 6b96e92..5059c5f 100644 --- a/compiler/dex/quick/arm/call_arm.cc +++ b/compiler/dex/quick/arm/call_arm.cc @@ -358,7 +358,7 @@ void ArmMir2Lir::GenEntrySequence(RegLocation* ArgLocs, RegLocation rl_method) { */ bool skip_overflow_check = mir_graph_->MethodIsLeaf() && !IsLargeFrame(frame_size_, kArm); NewLIR0(kPseudoMethodEntry); - constexpr size_t kStackOverflowReservedUsableBytes = kArmStackOverflowReservedBytes - + const size_t kStackOverflowReservedUsableBytes = GetStackOverflowReservedBytes(kArm) - Thread::kStackOverflowSignalReservedBytes; bool large_frame = (static_cast<size_t>(frame_size_) > kStackOverflowReservedUsableBytes); if (!skip_overflow_check) { @@ -381,7 +381,7 @@ void ArmMir2Lir::GenEntrySequence(RegLocation* ArgLocs, RegLocation rl_method) { // This is done before the callee save instructions to avoid any possibility // of these overflowing. This uses r12 and that's never saved in a callee // save. - OpRegRegImm(kOpSub, rs_r12, rs_rARM_SP, kArmStackOverflowReservedBytes); + OpRegRegImm(kOpSub, rs_r12, rs_rARM_SP, GetStackOverflowReservedBytes(kArm)); Load32Disp(rs_r12, 0, rs_r12); MarkPossibleStackOverflowException(); } diff --git a/compiler/dex/quick/arm64/call_arm64.cc b/compiler/dex/quick/arm64/call_arm64.cc index d946ee3..5e95500 100644 --- a/compiler/dex/quick/arm64/call_arm64.cc +++ b/compiler/dex/quick/arm64/call_arm64.cc @@ -330,8 +330,8 @@ void Arm64Mir2Lir::GenEntrySequence(RegLocation* ArgLocs, RegLocation rl_method) NewLIR0(kPseudoMethodEntry); - constexpr size_t kStackOverflowReservedUsableBytes = kArm64StackOverflowReservedBytes - - Thread::kStackOverflowSignalReservedBytes; + const size_t kStackOverflowReservedUsableBytes = GetStackOverflowReservedBytes(kArm64) - + Thread::kStackOverflowSignalReservedBytes; const bool large_frame = static_cast<size_t>(frame_size_) > kStackOverflowReservedUsableBytes; const int spill_count = num_core_spills_ + num_fp_spills_; const int spill_size = (spill_count * kArm64PointerSize + 15) & ~0xf; // SP 16 byte alignment. diff --git a/compiler/dex/quick/mir_to_lir.h b/compiler/dex/quick/mir_to_lir.h index 512849f..c7fafcb 100644 --- a/compiler/dex/quick/mir_to_lir.h +++ b/compiler/dex/quick/mir_to_lir.h @@ -214,32 +214,20 @@ static constexpr size_t kLargeFrameSize = 2 * KB; // The constant is from experience with frameworks code. static constexpr size_t kSmallFrameSize = 1 * KB; -// Determine whether a frame is small or large, used in the decision on whether to elide a -// stack overflow check on method entry. -// -// A frame is considered large when it's either above kLargeFrameSize, or a quarter of the -// overflow-usable stack space. -static constexpr bool IsLargeFrame(size_t size, InstructionSet isa) { - return size >= kLargeFrameSize || size >= GetStackOverflowReservedBytes(isa) / 4; -} - -// We want to ensure that on all systems kSmallFrameSize will lead to false in IsLargeFrame. -COMPILE_ASSERT(!IsLargeFrame(kSmallFrameSize, kArm), - kSmallFrameSize_is_not_a_small_frame_arm); -COMPILE_ASSERT(!IsLargeFrame(kSmallFrameSize, kArm64), - kSmallFrameSize_is_not_a_small_frame_arm64); -COMPILE_ASSERT(!IsLargeFrame(kSmallFrameSize, kMips), - kSmallFrameSize_is_not_a_small_frame_mips); -COMPILE_ASSERT(!IsLargeFrame(kSmallFrameSize, kX86), - kSmallFrameSize_is_not_a_small_frame_x86); -COMPILE_ASSERT(!IsLargeFrame(kSmallFrameSize, kX86_64), - kSmallFrameSize_is_not_a_small_frame_x64_64); - class Mir2Lir : public Backend { public: static constexpr bool kFailOnSizeError = true && kIsDebugBuild; static constexpr bool kReportSizeError = true && kIsDebugBuild; + // Determine whether a frame is small or large, used in the decision on whether to elide a + // stack overflow check on method entry. + // + // A frame is considered large when it's either above kLargeFrameSize, or a quarter of the + // overflow-usable stack space. + static bool IsLargeFrame(size_t size, InstructionSet isa) { + return size >= kLargeFrameSize || size >= GetStackOverflowReservedBytes(isa) / 4; + } + /* * Auxiliary information describing the location of data embedded in the Dalvik * byte code stream. diff --git a/runtime/arch/arm/fault_handler_arm.cc b/runtime/arch/arm/fault_handler_arm.cc index e22c56e..48582f4 100644 --- a/runtime/arch/arm/fault_handler_arm.cc +++ b/runtime/arch/arm/fault_handler_arm.cc @@ -61,7 +61,7 @@ void FaultManager::GetMethodAndReturnPCAndSP(siginfo_t* siginfo, void* context, // get the method from the top of the stack. However it's in r0. uintptr_t* fault_addr = reinterpret_cast<uintptr_t*>(sc->fault_address); uintptr_t* overflow_addr = reinterpret_cast<uintptr_t*>( - reinterpret_cast<uint8_t*>(*out_sp) - kArmStackOverflowReservedBytes); + reinterpret_cast<uint8_t*>(*out_sp) - GetStackOverflowReservedBytes(kArm)); if (overflow_addr == fault_addr) { *out_method = reinterpret_cast<mirror::ArtMethod*>(sc->arm_r0); } else { @@ -192,7 +192,7 @@ bool StackOverflowHandler::Action(int sig, siginfo_t* info, void* context) { VLOG(signals) << "checking for stack overflow, sp: " << std::hex << sp << ", fault_addr: " << fault_addr; - uintptr_t overflow_addr = sp - kArmStackOverflowReservedBytes; + uintptr_t overflow_addr = sp - GetStackOverflowReservedBytes(kArm); Thread* self = reinterpret_cast<Thread*>(sc->arm_r9); CHECK_EQ(self, Thread::Current()); diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc index 0fa0e41..c1c7631 100644 --- a/runtime/entrypoints/entrypoint_utils.cc +++ b/runtime/entrypoints/entrypoint_utils.cc @@ -110,8 +110,8 @@ mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx, mirror: void ThrowStackOverflowError(Thread* self) { if (self->IsHandlingStackOverflow()) { - LOG(ERROR) << "Recursive stack overflow."; - // We don't fail here because SetStackEndForStackOverflow will print better diagnostics. + LOG(ERROR) << "Recursive stack overflow."; + // We don't fail here because SetStackEndForStackOverflow will print better diagnostics. } if (Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()) { @@ -123,15 +123,90 @@ void ThrowStackOverflowError(Thread* self) { JNIEnvExt* env = self->GetJniEnv(); std::string msg("stack size "); msg += PrettySize(self->GetStackSize()); - // Use low-level JNI routine and pre-baked error class to avoid class linking operations that - // would consume more stack. - int rc = ::art::ThrowNewException(env, WellKnownClasses::java_lang_StackOverflowError, - msg.c_str(), NULL); - if (rc != JNI_OK) { - // TODO: ThrowNewException failed presumably because of an OOME, we continue to throw the OOME - // or die in the CHECK below. We may want to throw a pre-baked StackOverflowError - // instead. - LOG(ERROR) << "Couldn't throw new StackOverflowError because JNI ThrowNew failed."; + + // Avoid running Java code for exception initialization. + // TODO: Checks to make this a bit less brittle. + + std::string error_msg; + + // Allocate an uninitialized object. + ScopedLocalRef<jobject> exc(env, + env->AllocObject(WellKnownClasses::java_lang_StackOverflowError)); + if (exc.get() != nullptr) { + // "Initialize". + // StackOverflowError -> VirtualMachineError -> Error -> Throwable -> Object. + // Only Throwable has "custom" fields: + // String detailMessage. + // Throwable cause (= this). + // List<Throwable> suppressedExceptions (= Collections.emptyList()). + // Object stackState; + // StackTraceElement[] stackTrace; + // Only Throwable has a non-empty constructor: + // this.stackTrace = EmptyArray.STACK_TRACE_ELEMENT; + // fillInStackTrace(); + + // detailMessage. + // TODO: Use String::FromModifiedUTF...? + ScopedLocalRef<jstring> s(env, env->NewStringUTF(msg.c_str())); + if (s.get() != nullptr) { + jfieldID detail_message_id = env->GetFieldID(WellKnownClasses::java_lang_Throwable, + "detailMessage", "Ljava/lang/String;"); + env->SetObjectField(exc.get(), detail_message_id, s.get()); + + // cause. + jfieldID cause_id = env->GetFieldID(WellKnownClasses::java_lang_Throwable, + "cause", "Ljava/lang/Throwable;"); + env->SetObjectField(exc.get(), cause_id, exc.get()); + + // suppressedExceptions. + jfieldID emptylist_id = env->GetStaticFieldID(WellKnownClasses::java_util_Collections, + "EMPTY_LIST", "Ljava/util/List;"); + ScopedLocalRef<jobject> emptylist(env, env->GetStaticObjectField( + WellKnownClasses::java_util_Collections, emptylist_id)); + CHECK(emptylist.get() != nullptr); + jfieldID suppressed_id = env->GetFieldID(WellKnownClasses::java_lang_Throwable, + "suppressedExceptions", "Ljava/util/List;"); + env->SetObjectField(exc.get(), suppressed_id, emptylist.get()); + + // stackState is set as result of fillInStackTrace. fillInStackTrace calls + // nativeFillInStackTrace. + ScopedLocalRef<jobject> stack_state_val(env, nullptr); + { + ScopedObjectAccessUnchecked soa(env); + stack_state_val.reset(soa.Self()->CreateInternalStackTrace<false>(soa)); + } + if (stack_state_val.get() != nullptr) { + jfieldID stackstateID = env->GetFieldID(WellKnownClasses::java_lang_Throwable, + "stackState", "Ljava/lang/Object;"); + env->SetObjectField(exc.get(), stackstateID, stack_state_val.get()); + + // stackTrace. + jfieldID stack_trace_elem_id = env->GetStaticFieldID( + WellKnownClasses::libcore_util_EmptyArray, "STACK_TRACE_ELEMENT", + "[Ljava/lang/StackTraceElement;"); + ScopedLocalRef<jobject> stack_trace_elem(env, env->GetStaticObjectField( + WellKnownClasses::libcore_util_EmptyArray, stack_trace_elem_id)); + jfieldID stacktrace_id = env->GetFieldID( + WellKnownClasses::java_lang_Throwable, "stackTrace", "[Ljava/lang/StackTraceElement;"); + env->SetObjectField(exc.get(), stacktrace_id, stack_trace_elem.get()); + + // Throw the exception. + ThrowLocation throw_location = self->GetCurrentLocationForThrow(); + self->SetException(throw_location, + reinterpret_cast<mirror::Throwable*>(self->DecodeJObject(exc.get()))); + } else { + error_msg = "Could not create stack trace."; + } + } else { + // Could not allocate a string object. + error_msg = "Couldn't throw new StackOverflowError because JNI NewStringUTF failed."; + } + } else { + error_msg = "Could not allocate StackOverflowError object."; + } + + if (!error_msg.empty()) { + LOG(ERROR) << error_msg; CHECK(self->IsExceptionPending()); } diff --git a/runtime/instruction_set.cc b/runtime/instruction_set.cc index 5b60396..d7e358c 100644 --- a/runtime/instruction_set.cc +++ b/runtime/instruction_set.cc @@ -83,6 +83,44 @@ size_t GetInstructionSetAlignment(InstructionSet isa) { } } + +static constexpr size_t kDefaultStackOverflowReservedBytes = 16 * KB; +static constexpr size_t kMipsStackOverflowReservedBytes = kDefaultStackOverflowReservedBytes; + +// TODO: Lower once implicit stack-overflow checks can work with less than 16K. +static constexpr size_t kArmStackOverflowReservedBytes = (kIsDebugBuild ? 16 : 16) * KB; +static constexpr size_t kArm64StackOverflowReservedBytes = (kIsDebugBuild ? 16 : 16) * KB; +static constexpr size_t kX86StackOverflowReservedBytes = (kIsDebugBuild ? 16 : 16) * KB; +static constexpr size_t kX86_64StackOverflowReservedBytes = (kIsDebugBuild ? 16 : 16) * KB; + +size_t GetStackOverflowReservedBytes(InstructionSet isa) { + switch (isa) { + case kArm: // Intentional fall-through. + case kThumb2: + return kArmStackOverflowReservedBytes; + + case kArm64: + return kArm64StackOverflowReservedBytes; + + case kMips: + return kMipsStackOverflowReservedBytes; + + case kX86: + return kX86StackOverflowReservedBytes; + + case kX86_64: + return kX86_64StackOverflowReservedBytes; + + case kNone: + LOG(FATAL) << "kNone has no stack overflow size"; + return 0; + + default: + LOG(FATAL) << "Unknown instruction set" << isa; + return 0; + } +} + std::string InstructionSetFeatures::GetFeatureString() const { std::string result; if ((mask_ & kHwDiv) != 0) { diff --git a/runtime/instruction_set.h b/runtime/instruction_set.h index dce1c15..f212811 100644 --- a/runtime/instruction_set.h +++ b/runtime/instruction_set.h @@ -169,33 +169,7 @@ static inline size_t GetBytesPerFprSpillLocation(InstructionSet isa) { } } -static constexpr size_t kDefaultStackOverflowReservedBytes = 16 * KB; -static constexpr size_t kArmStackOverflowReservedBytes = kDefaultStackOverflowReservedBytes; -static constexpr size_t kMipsStackOverflowReservedBytes = kDefaultStackOverflowReservedBytes; - -// TODO: shrink reserved space, in particular for 64bit. - -// Worst-case, we would need about 2.6x the amount of x86_64 for many more registers. -// But this one works rather well. -static constexpr size_t kArm64StackOverflowReservedBytes = 32 * KB; -// TODO: Bumped to workaround regression (http://b/14982147) Specifically to fix: -// test-art-host-run-test-interpreter-018-stack-overflow -// test-art-host-run-test-interpreter-107-int-math2 -static constexpr size_t kX86StackOverflowReservedBytes = (kIsDebugBuild ? 32 : 24) * KB; -static constexpr size_t kX86_64StackOverflowReservedBytes = 32 * KB; - -static constexpr size_t GetStackOverflowReservedBytes(InstructionSet isa) { - return (isa == kArm || isa == kThumb2) ? kArmStackOverflowReservedBytes : - isa == kArm64 ? kArm64StackOverflowReservedBytes : - isa == kMips ? kMipsStackOverflowReservedBytes : - isa == kX86 ? kX86StackOverflowReservedBytes : - isa == kX86_64 ? kX86_64StackOverflowReservedBytes : - isa == kNone ? (LOG(FATAL) << "kNone has no stack overflow size", 0) : - (LOG(FATAL) << "Unknown instruction set" << isa, 0); -} - -static constexpr size_t kRuntimeStackOverflowReservedBytes = - GetStackOverflowReservedBytes(kRuntimeISA); +size_t GetStackOverflowReservedBytes(InstructionSet isa); enum InstructionFeatures { kHwDiv = 0x1, // Supports hardware divide. diff --git a/runtime/thread.cc b/runtime/thread.cc index ba5f9d4..8151464 100644 --- a/runtime/thread.cc +++ b/runtime/thread.cc @@ -76,6 +76,8 @@ namespace art { bool Thread::is_started_ = false; pthread_key_t Thread::pthread_key_self_; ConditionVariable* Thread::resume_cond_ = nullptr; +const size_t Thread::kStackOverflowImplicitCheckSize = kStackOverflowProtectedSize + + GetStackOverflowReservedBytes(kRuntimeISA); static const char* kThreadNameDuringStartup = "<native thread without managed peer>"; @@ -219,7 +221,7 @@ static size_t FixStackSize(size_t stack_size) { // It's likely that callers are trying to ensure they have at least a certain amount of // stack space, so we should add our reserved space on top of what they requested, rather // than implicitly take it away from them. - stack_size += kRuntimeStackOverflowReservedBytes; + stack_size += GetStackOverflowReservedBytes(kRuntimeISA); } else { // If we are going to use implicit stack checks, allocate space for the protected // region at the bottom of the stack. @@ -308,7 +310,7 @@ void Thread::InstallImplicitProtection(bool is_main_stack) { if (mprotect(pregion, kStackOverflowProtectedSize, PROT_NONE) == -1) { LOG(FATAL) << "Unable to create protected region in stack for implicit overflow check. Reason:" - << strerror(errno); + << strerror(errno) << kStackOverflowProtectedSize; } // Tell the kernel that we won't be needing these pages any more. @@ -536,7 +538,7 @@ void Thread::InitStackHwm() { tlsPtr_.stack_begin = reinterpret_cast<byte*>(read_stack_base); tlsPtr_.stack_size = read_stack_size; - if (read_stack_size <= kRuntimeStackOverflowReservedBytes) { + if (read_stack_size <= GetStackOverflowReservedBytes(kRuntimeISA)) { LOG(FATAL) << "Attempt to attach a thread with a too-small stack (" << read_stack_size << " bytes)"; } @@ -2247,7 +2249,7 @@ void Thread::SetStackEndForStackOverflow() { if (tlsPtr_.stack_end == tlsPtr_.stack_begin) { // However, we seem to have already extended to use the full stack. LOG(ERROR) << "Need to increase kStackOverflowReservedBytes (currently " - << kRuntimeStackOverflowReservedBytes << ")?"; + << GetStackOverflowReservedBytes(kRuntimeISA) << ")?"; DumpStack(LOG(ERROR)); LOG(FATAL) << "Recursive stack overflow."; } diff --git a/runtime/thread.h b/runtime/thread.h index d08c2fc..c555034 100644 --- a/runtime/thread.h +++ b/runtime/thread.h @@ -104,8 +104,7 @@ class Thread { // is protected against reads and the lower is available for use while // throwing the StackOverflow exception. static constexpr size_t kStackOverflowProtectedSize = 16 * KB; - static constexpr size_t kStackOverflowImplicitCheckSize = kStackOverflowProtectedSize + - kRuntimeStackOverflowReservedBytes; + static const size_t kStackOverflowImplicitCheckSize; // Creates a new native thread corresponding to the given managed peer. // Used to implement Thread.start. @@ -567,7 +566,7 @@ class Thread { // overflow region. tlsPtr_.stack_end = tlsPtr_.stack_begin + kStackOverflowImplicitCheckSize; } else { - tlsPtr_.stack_end = tlsPtr_.stack_begin + kRuntimeStackOverflowReservedBytes; + tlsPtr_.stack_end = tlsPtr_.stack_begin + GetStackOverflowReservedBytes(kRuntimeISA); } } diff --git a/runtime/well_known_classes.cc b/runtime/well_known_classes.cc index fdc6e3f..3a6a72b 100644 --- a/runtime/well_known_classes.cc +++ b/runtime/well_known_classes.cc @@ -47,6 +47,8 @@ jclass WellKnownClasses::java_lang_Thread$UncaughtExceptionHandler; jclass WellKnownClasses::java_lang_ThreadGroup; jclass WellKnownClasses::java_lang_Throwable; jclass WellKnownClasses::java_nio_DirectByteBuffer; +jclass WellKnownClasses::java_util_Collections; +jclass WellKnownClasses::libcore_util_EmptyArray; jclass WellKnownClasses::org_apache_harmony_dalvik_ddmc_Chunk; jclass WellKnownClasses::org_apache_harmony_dalvik_ddmc_DdmServer; @@ -150,6 +152,8 @@ void WellKnownClasses::Init(JNIEnv* env) { java_lang_ThreadGroup = CacheClass(env, "java/lang/ThreadGroup"); java_lang_Throwable = CacheClass(env, "java/lang/Throwable"); java_nio_DirectByteBuffer = CacheClass(env, "java/nio/DirectByteBuffer"); + java_util_Collections = CacheClass(env, "java/util/Collections"); + libcore_util_EmptyArray = CacheClass(env, "libcore/util/EmptyArray"); org_apache_harmony_dalvik_ddmc_Chunk = CacheClass(env, "org/apache/harmony/dalvik/ddmc/Chunk"); org_apache_harmony_dalvik_ddmc_DdmServer = CacheClass(env, "org/apache/harmony/dalvik/ddmc/DdmServer"); diff --git a/runtime/well_known_classes.h b/runtime/well_known_classes.h index f6c2930..7639f50 100644 --- a/runtime/well_known_classes.h +++ b/runtime/well_known_classes.h @@ -60,7 +60,9 @@ struct WellKnownClasses { static jclass java_lang_ThreadGroup; static jclass java_lang_Thread$UncaughtExceptionHandler; static jclass java_lang_Throwable; + static jclass java_util_Collections; static jclass java_nio_DirectByteBuffer; + static jclass libcore_util_EmptyArray; static jclass org_apache_harmony_dalvik_ddmc_Chunk; static jclass org_apache_harmony_dalvik_ddmc_DdmServer; diff --git a/test/018-stack-overflow/expected.txt b/test/018-stack-overflow/expected.txt index 98b45b7..cc10c0c 100644 --- a/test/018-stack-overflow/expected.txt +++ b/test/018-stack-overflow/expected.txt @@ -1,3 +1,10 @@ -caught SOE in testSelfRecursion +libartd run. +caught SOE3 in testSelfRecursion +caught SOE10 in testSelfRecursion +caught SOE in testMutualRecursion +SOE test done +libart run. +caught SOE3 in testSelfRecursion +caught SOE10 in testSelfRecursion caught SOE in testMutualRecursion SOE test done diff --git a/test/018-stack-overflow/run b/test/018-stack-overflow/run new file mode 100755 index 0000000..1a71a1a --- /dev/null +++ b/test/018-stack-overflow/run @@ -0,0 +1,23 @@ +#!/bin/bash +# +# Copyright (C) 2014 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Run normal. This will be the debug build. +echo "libartd run." +${RUN} "${@}" + +# Run non-debug. +echo "libart run." +${RUN} "${@/#libartd.so/libart.so}" diff --git a/test/018-stack-overflow/src/Main.java b/test/018-stack-overflow/src/Main.java index 41adabc..0961226 100644 --- a/test/018-stack-overflow/src/Main.java +++ b/test/018-stack-overflow/src/Main.java @@ -25,16 +25,38 @@ public class Main { } private static void testSelfRecursion() { +// try { +// stackOverflowTestSub0(); +// } +// catch (StackOverflowError soe) { +// System.out.println("caught SOE0 in testSelfRecursion"); +// } try { - stackOverflowTestSub(0.0, 0.0, 0.0); + stackOverflowTestSub3(0.0, 1.0, 2.0); } catch (StackOverflowError soe) { - System.out.println("caught SOE in testSelfRecursion"); + System.out.println("caught SOE3 in testSelfRecursion"); } + try { + stackOverflowTestSub10(0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0); + } + catch (StackOverflowError soe) { + System.out.println("caught SOE10 in testSelfRecursion"); + } + } + + private static void stackOverflowTestSub0() { + stackOverflowTestSub0(); + } + + private static void stackOverflowTestSub3(double pad1, double pad2, double pad3) { + stackOverflowTestSub3(pad1, pad2, pad3); } - private static void stackOverflowTestSub(double pad1, double pad2, double pad3) { - stackOverflowTestSub(pad1, pad2, pad3); + private static void stackOverflowTestSub10(double pad1, double pad2, double pad3, double pad4, + double pad5, double pad6, double pad7, double pad8, + double pad9, double pad10) { + stackOverflowTestSub10(pad1, pad2, pad3, pad4, pad5, pad6, pad7, pad8, pad9, pad10); } private static void testMutualRecursion() { |