@@ -436,31 +436,26 @@ int pthread_attr_destroy(pthread_attr_t *a)
436436
437437#endif
438438
439-
440- void
441- _Py_InitializeRecursionLimits (PyThreadState * tstate )
439+ static void
440+ hardware_stack_limits (uintptr_t * top , uintptr_t * base )
442441{
443- _PyThreadStateImpl * _tstate = (_PyThreadStateImpl * )tstate ;
444442#ifdef WIN32
445443 ULONG_PTR low , high ;
446444 GetCurrentThreadStackLimits (& low , & high );
447- _tstate -> c_stack_top = (uintptr_t )high ;
445+ * top = (uintptr_t )high ;
448446 ULONG guarantee = 0 ;
449447 SetThreadStackGuarantee (& guarantee );
450- _tstate -> c_stack_hard_limit = ((uintptr_t )low ) + guarantee + _PyOS_STACK_MARGIN_BYTES ;
451- _tstate -> c_stack_soft_limit = _tstate -> c_stack_hard_limit + _PyOS_STACK_MARGIN_BYTES ;
448+ * base = (uintptr_t )low + guarantee ;
452449#elif defined(__APPLE__ )
453450 pthread_t this_thread = pthread_self ();
454451 void * stack_addr = pthread_get_stackaddr_np (this_thread ); // top of the stack
455452 size_t stack_size = pthread_get_stacksize_np (this_thread );
456- _tstate -> c_stack_top = (uintptr_t )stack_addr ;
457- _tstate -> c_stack_hard_limit = _tstate -> c_stack_top - stack_size ;
458- _tstate -> c_stack_soft_limit = _tstate -> c_stack_hard_limit + _PyOS_STACK_MARGIN_BYTES ;
453+ * top = (uintptr_t )stack_addr ;
454+ * base = ((uintptr_t )stack_addr ) - stack_size ;
459455#else
460- uintptr_t here_addr = _Py_get_machine_stack_pointer ();
461- /// XXX musl supports HAVE_PTHRED_GETATTR_NP, but the resulting stack size
462- /// (on alpine at least) is much smaller than expected and imposes undue limits
463- /// compared to the old stack size estimation. (We assume musl is not glibc.)
456+ /// XXX musl supports HAVE_PTHRED_GETATTR_NP, but the resulting stack size
457+ /// (on alpine at least) is much smaller than expected and imposes undue limits
458+ /// compared to the old stack size estimation. (We assume musl is not glibc.)
464459# if defined(HAVE_PTHREAD_GETATTR_NP ) && !defined(_AIX ) && \
465460 !defined(__NetBSD__ ) && (defined(__GLIBC__ ) || !defined(__linux__ ))
466461 size_t stack_size , guard_size ;
@@ -473,38 +468,35 @@ _Py_InitializeRecursionLimits(PyThreadState *tstate)
473468 err |= pthread_attr_destroy (& attr );
474469 }
475470 if (err == 0 ) {
476- uintptr_t base = ((uintptr_t )stack_addr ) + guard_size ;
477- uintptr_t top = base + stack_size ;
478- # ifdef _Py_THREAD_SANITIZER
479- // Thread sanitizer crashes if we use a bit more than half the stack.
480- # if _Py_STACK_GROWS_DOWN
481- base += stack_size / 2 ;
482- # else
483- top -= stack_size / 2 ;
484- # endif
485- # endif
486- # if _Py_STACK_GROWS_DOWN
487- _tstate -> c_stack_top = top ;
488- _tstate -> c_stack_hard_limit = base + _PyOS_STACK_MARGIN_BYTES ;
489- _tstate -> c_stack_soft_limit = base + _PyOS_STACK_MARGIN_BYTES * 2 ;
490- assert (_tstate -> c_stack_soft_limit < here_addr );
491- assert (here_addr < _tstate -> c_stack_top );
492- # else
493- _tstate -> c_stack_top = base ;
494- _tstate -> c_stack_hard_limit = top - _PyOS_STACK_MARGIN_BYTES ;
495- _tstate -> c_stack_soft_limit = top - _PyOS_STACK_MARGIN_BYTES * 2 ;
496- assert (here_addr > base );
497- assert (here_addr < _tstate -> c_stack_soft_limit );
498- # endif
471+ * base = ((uintptr_t )stack_addr ) + guard_size ;
472+ * top = (uintptr_t )stack_addr + stack_size ;
499473 return ;
500474 }
501475# endif
502- _tstate -> c_stack_top = _Py_SIZE_ROUND_UP (here_addr , 4096 );
503- _tstate -> c_stack_soft_limit = _tstate -> c_stack_top - Py_C_STACK_SIZE ;
504- _tstate -> c_stack_hard_limit = _tstate -> c_stack_top - (Py_C_STACK_SIZE + _PyOS_STACK_MARGIN_BYTES );
476+ uintptr_t here_addr = _Py_get_machine_stack_pointer ();
477+ uintptr_t top_addr = _Py_SIZE_ROUND_UP (here_addr , 4096 );
478+ * top = top_addr ;
479+ * base = top_addr - Py_C_STACK_SIZE ;
505480#endif
506481}
507482
483+ void
484+ _Py_InitializeRecursionLimits (PyThreadState * tstate )
485+ {
486+ uintptr_t top ;
487+ uintptr_t base ;
488+ hardware_stack_limits (& top , & base );
489+ #ifdef _Py_THREAD_SANITIZER
490+ // Thread sanitizer crashes if we use more than half the stack.
491+ uintptr_t stacksize = top - base ;
492+ base += stacksize /2 ;
493+ #endif
494+ _PyThreadStateImpl * _tstate = (_PyThreadStateImpl * )tstate ;
495+ _tstate -> c_stack_top = top ;
496+ _tstate -> c_stack_hard_limit = base + _PyOS_STACK_MARGIN_BYTES ;
497+ _tstate -> c_stack_soft_limit = base + _PyOS_STACK_MARGIN_BYTES * 2 ;
498+ }
499+
508500/* The function _Py_EnterRecursiveCallTstate() only calls _Py_CheckRecursiveCall()
509501 if the recursion_depth reaches recursion_limit. */
510502int
0 commit comments