Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion compiler/src/dotty/tools/dotc/Driver.scala
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,17 @@ class Driver {
compileCtx.setReporter(reporter)
if (callback != null)
compileCtx.setCompilerCallback(callback)
process(args, compileCtx)
try
process(args, compileCtx)
catch
// This should be the ONLY point in the compiler where we catch stack overflows.
// The JVM cannot be assumed to function 100% properly after a stack overflow is caught.
// This is a pure best-effort attempt at helping the user.
case so: StackOverflowError =>
report.error("Stack overflow in the compiler.\n"
+ "See https://docs.scala-lang.org/overviews/compiler-options/compiling-deeply-nested-code.html\n"
+ s"Stack trace:\n${so.getStackTrace.mkString("\n ")}")(using compileCtx)
compileCtx.reporter
}

/** Entry point to the compiler with no optional arguments.
Expand Down
2 changes: 2 additions & 0 deletions compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
Original file line number Diff line number Diff line change
Expand Up @@ -462,6 +462,8 @@ private sealed trait XSettings:
val XjarCompressionLevel: Setting[Int] = IntChoiceSetting(AdvancedSetting, "Xjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION to Deflater.BEST_COMPRESSION, Deflater.DEFAULT_COMPRESSION)
val XkindProjector: Setting[String] = ChoiceSetting(AdvancedSetting, "Xkind-projector", "[underscores, enable, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Xkind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable", legacyArgs = true)

val XmaxFuel: Setting[Int] = IntSetting(AdvancedSetting, "Xmax-fuel", "Max fuel for recursive operations before abandoning.", 400)

/** Documentation related settings */
val XdropComments: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xdrop-docs", "Drop documentation when scanning source files.", aliases = List("-Xdrop-comments"))
val XcookComments: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xcook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Xcook-comments"))
Expand Down
4 changes: 2 additions & 2 deletions compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ trait ConstraintRunInfo { self: Run =>
if maxSize > 0 then
val printer = if ctx.settings.YdetailedStats.value then default else typr
printer.println(s"max constraint size: $maxSize")
try printer.println(s"max constraint = ${maxConstraint.nn.show}")
catch case ex: StackOverflowError => printer.println("max constraint cannot be printed due to stack overflow")
ctx.handleRecursive("printing max constraint of size", "$maxSize"):
printer.println(s"max constraint = ${maxConstraint.nn.show}")

protected def reset(): Unit = maxConstraint = null
}
16 changes: 16 additions & 0 deletions compiler/src/dotty/tools/dotc/core/Contexts.scala
Original file line number Diff line number Diff line change
Expand Up @@ -523,6 +523,19 @@ object Contexts {
final def withUncommittedTyperState: Context =
withTyperState(typerState.uncommittedAncestor)

/** Ensures recursive operations obey the fuel limit, and throws user-friendly errors when they do not. */
inline final def handleRecursive[T](name: String, details: => String, weight: Int = 1)(inline block: T): T =
val op = RecursiveOperation(name, details, weight)
if base.recursiveDepth >= settings.XmaxFuel.value then
throw new RecursionOverflow(op :: base.recursiveOperations)
base.recursiveOperations = op :: base.recursiveOperations
base.recursiveDepth += 1
try
block
finally
base.recursiveDepth -= 1
base.recursiveOperations = base.recursiveOperations.tail

final def withProperty[T](key: Key[T], value: Option[T]): Context =
if (property(key) == value) this
else value match {
Expand Down Expand Up @@ -1021,6 +1034,9 @@ object Contexts {
*/
private[dotc] var coverage: Coverage | Null = null

private[dotc] var recursiveDepth: Int = 0
private[dotc] var recursiveOperations: List[RecursiveOperation] = Nil

// Types state
/** A table for hash consing unique types */
private[core] val uniques: Uniques = Uniques()
Expand Down
25 changes: 12 additions & 13 deletions compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala
Original file line number Diff line number Diff line change
Expand Up @@ -346,20 +346,19 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
if newSet.isEmpty then deps.remove(referenced)
else deps.updated(referenced, newSet)

def traverse(t: Type) = try
def traverse(t: Type) = ctx.handleRecursive("adjust", t.show):
Copy link
Copy Markdown
Member

@bishabosha bishabosha Apr 29, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this still relies on stack recursion no? (mutual recursion via traverseChildren)

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, this PR doesn't change the use of recursion, it changes how stack overflows are detected.

t match
case param: TypeParamRef =>
if hasBounds(param) then
if variance >= 0 then coDeps = update(coDeps, param)
if variance <= 0 then contraDeps = update(contraDeps, param)
else
traverse(entry(param))
case tp: LazyRef =>
if !seen.contains(tp) then
seen += tp
traverse(tp.ref)
case _ => traverseChildren(t)
catch case ex: Throwable => handleRecursive("adjust", t.show, ex)
case param: TypeParamRef =>
if hasBounds(param) then
if variance >= 0 then coDeps = update(coDeps, param)
if variance <= 0 then contraDeps = update(contraDeps, param)
else
traverse(entry(param))
case tp: LazyRef =>
if !seen.contains(tp) then
seen += tp
traverse(tp.ref)
case _ => traverseChildren(t)
end Adjuster

/** Adjust dependencies to account for the delta of previous entry `prevEntry`
Expand Down
44 changes: 44 additions & 0 deletions compiler/src/dotty/tools/dotc/core/RecursiveOperation.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
package dotty.tools
package dotc
package core

import dotty.tools.dotc.core.Contexts.Context
import dotty.tools.dotc.core.Decorators.*
import dotty.tools.dotc.reporting.Message

/**
* Operation that may cause unbounded recursion depending on user input.
* @param title the operation title
* @param details the lazily-initialized operation details
* @param weight the operation weight, used to prioritize some operations when displaying error messages
*/
class RecursiveOperation(val title: String, details: => String, val weight: Int):
def explanation: String = s"$title $details"

/**
* Thrown when recursing too deep, as an alternative to triggering a stack overflow.
*
* @param ops the recursive operations, most recent first, i.e., in reverse order
*/
class RecursionOverflow(val ops: List[RecursiveOperation])(using Context) extends TypeError:
override def fillInStackTrace(): Throwable =
this

private def opsString(rs: List[RecursiveOperation]): String = {
val maxShown = 20
if (rs.lengthCompare(maxShown) > 0)
i"""${opsString(rs.take(maxShown / 2))}
| ...
|${opsString(rs.takeRight(maxShown / 2))}"""
else
rs.map(_.explanation).mkString("\n ", "\n| ", "")
}

override def toMessage(using Context): Message =
val mostCommon = ops.groupBy(_.title).toList.maxBy(_._2.map(_.weight).sum)._2
em"""Recursion limit exceeded.
|Maybe there is an illegal cyclic reference?
|If that's not the case, you could also try to increase the stacksize using the -Xss JVM option.
|For the unprocessed stack trace, compile with -Xno-enrich-error-messages.
|A recurring operation is (inner to outer):
|${opsString(mostCommon).stripMargin}"""
9 changes: 2 additions & 7 deletions compiler/src/dotty/tools/dotc/core/SymDenotations.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2361,7 +2361,7 @@ object SymDenotations {
tp.derivedCapturingType(recur(parent), refs)

case tp: TypeProxy =>
def computeTypeProxy = {
def computeTypeProxy = ctx.handleRecursive("type proxy of", i"$tp"):
val superTp = tp.superType
val baseTp = recur(superTp)
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

each call to recur still increases stack depth right?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah there are ~20 calls to handleRecursive that try and detect potential stack overflows, it'd be nice to rewrite the entire compiler to use an amount of stack space not dependent on user input but that's for (much) later.

tp match {
Expand All @@ -2370,7 +2370,6 @@ object SymDenotations {
case _ =>
}
baseTp
}
computeTypeProxy

case tp: AndOrType =>
Expand Down Expand Up @@ -2430,7 +2429,7 @@ object SymDenotations {
def computeMemberNames(keepOnly: NameFilter)(implicit onBehalf: MemberNames, ctx: Context): Set[Name] = {
var names = Set[Name]()
def maybeAdd(name: Name) = if (keepOnly(thisType, name)) names += name
try {
ctx.handleRecursive("member names", i"of $this"):
for ptype <- parentTypes do
ptype.classSymbol match
case pcls: ClassSymbol =>
Expand All @@ -2449,10 +2448,6 @@ object SymDenotations {
else info.decls.iterator
for (sym <- ownSyms) maybeAdd(sym.name)
names
}
catch {
case ex: Throwable => handleRecursive("member names", i"of $this", ex)
}
}

override final def fullNameSeparated(kind: QualifiedNameKind)(using Context): Name = {
Expand Down
62 changes: 28 additions & 34 deletions compiler/src/dotty/tools/dotc/core/TypeApplications.scala
Original file line number Diff line number Diff line change
Expand Up @@ -178,43 +178,40 @@ class TypeApplications(val self: Type) extends AnyVal {
case NoPrefix => true
case _ => false
}
try self match {
case self: TypeRef =>
val tsym = self.symbol
if (tsym.isClass) tsym.typeParams
else tsym.infoOrCompleter match {
case info: LazyType if isTrivial(self.prefix, tsym) =>
val tparams = info.completerTypeParams(tsym)
if tsym.isCompleted then tsym.info.typeParams
ctx.handleRecursive("type parameters of", self.show):
self match
case self: TypeRef =>
val tsym = self.symbol
if (tsym.isClass) tsym.typeParams
else tsym.infoOrCompleter match {
case info: LazyType if isTrivial(self.prefix, tsym) =>
val tparams = info.completerTypeParams(tsym)
if tsym.isCompleted then tsym.info.typeParams
// Completers sometimes represent parameters as symbols where
// the completed type represents them as paramrefs. Make sure we get
// a stable result by calling `typeParams` recursively. Test case
// is pos/i19942.scala, where parameter F0 has initially a Namer#TypeDefCompleter.
// After calling its completerTypeParams, we get a list of parameter symbols
// and as a side effect F0 is completed. Calling typeParams on the completed
// type gives a list of paramrefs.
else tparams
case _ => self.info.typeParams
}
case self: AppliedType =>
if (self.tycon.typeSymbol.isClass) Nil
else self.superType.typeParams
case self: ClassInfo =>
self.cls.typeParams
case self: HKTypeLambda =>
self.typeParams
case _: SingletonType | _: RefinedType | _: RecType =>
Nil
case self: WildcardType =>
self.optBounds.typeParams
case self: TypeProxy =>
self.superType.typeParams
case _ =>
Nil
}
catch {
case ex: Throwable => handleRecursive("type parameters of", self.show, ex)
}
else tparams
case _ => self.info.typeParams
}
case self: AppliedType =>
if (self.tycon.typeSymbol.isClass) Nil
else self.superType.typeParams
case self: ClassInfo =>
self.cls.typeParams
case self: HKTypeLambda =>
self.typeParams
case _: SingletonType | _: RefinedType | _: RecType =>
Nil
case self: WildcardType =>
self.optBounds.typeParams
case self: TypeProxy =>
self.superType.typeParams
case _ =>
Nil
}

/** Substitute in `self` the type parameters of `tycon` by some other types. */
Expand Down Expand Up @@ -399,12 +396,9 @@ class TypeApplications(val self: Type) extends AnyVal {
if hasParamsWithoutArg then
AppliedType(self, args)
else
try
ctx.handleRecursive("try to instantiate", i"$dealiased[$args%, %]"):
val instantiated = dealiased.instantiate(args)
if (followAlias) instantiated.normalized else instantiated
catch
case ex: Throwable => handleRecursive("try to instantiate", i"$dealiased[$args%, %]", ex)

else AppliedType(self, args)
}
else dealiased.resType match {
Expand Down
35 changes: 17 additions & 18 deletions compiler/src/dotty/tools/dotc/core/TypeComparer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -223,14 +223,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling
this.leftRoot = tp1
}
else this.approx = a
try recur(tp1, tp2)
catch {
case ex: Throwable => handleRecursive("subtype", i"$tp1 <:< $tp2", ex, weight = 2)
}
finally {
try
ctx.handleRecursive("subtype", i"$tp1 <:< $tp2", weight = 2):
recur(tp1, tp2)
finally
this.approx = savedApprox
this.leftRoot = savedLeftRoot
}
}

def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, ApproxState.Fresh)
Expand Down Expand Up @@ -3342,18 +3340,19 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling
|| (cannotBeNothing(tp1) || cannotBeNothing(tp2))
}

args1.lazyZip(args2).lazyZip(cls.typeParams).exists {
(arg1, arg2, tparam) =>
val v = tparam.paramVarianceSign
if (v > 0)
covariantDisjoint(arg1, arg2, tparam)
else if (v < 0)
// Contravariant case: a value where this type parameter is
// instantiated to `Any` belongs to both types.
false
else
invariantDisjoint(arg1, arg2, tparam)
}
ctx.handleRecursive("are args provably disjoint for", i"$cls"):
args1.lazyZip(args2).lazyZip(cls.typeParams).exists {
(arg1, arg2, tparam) =>
val v = tparam.paramVarianceSign
if (v > 0)
covariantDisjoint(arg1, arg2, tparam)
else if (v < 0)
// Contravariant case: a value where this type parameter is
// instantiated to `Any` belongs to both types.
false
else
invariantDisjoint(arg1, arg2, tparam)
}
end provablyDisjointTypeArgs

protected def explainingTypeComparer(short: Boolean) = ExplainingTypeComparer(comparerContext, short)
Expand Down
8 changes: 4 additions & 4 deletions compiler/src/dotty/tools/dotc/core/TypeErasure.scala
Original file line number Diff line number Diff line change
Expand Up @@ -930,10 +930,10 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst
else if sourceLanguage.isScala2 && (elemtp.hiBound.isNullType || elemtp.hiBound.isNothingType) then
JavaArrayType(defn.ObjectType)
else
try erasureFn(sourceLanguage, semiEraseVCs = false, isConstructor, isSymbol, inSigName)(elemtp) match
case _: WildcardType => WildcardType
case elem => JavaArrayType(elem)
catch case ex: Throwable => handleRecursive("erase array type", tp.show, ex)
ctx.handleRecursive("erase array type", tp.show):
erasureFn(sourceLanguage, semiEraseVCs = false, isConstructor, isSymbol, inSigName)(elemtp) match
case _: WildcardType => WildcardType
case elem => JavaArrayType(elem)
}

private def erasePair(tp: Type)(using Context): Type = {
Expand Down
Loading
Loading