Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions Sources/SwiftDriver/Driver/CompilerMode.swift
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,15 @@ extension CompilerMode {
}
}

var isBatchCompile: Bool {
switch self {
case .batchCompile:
return true
case .singleCompile, .standardCompile, .compilePCM, .immediate, .repl:
return false
}
}

// Whether this compilation mode supports the use of bridging pre-compiled
// headers.
public var supportsBridgingPCH: Bool {
Expand Down
25 changes: 25 additions & 0 deletions Sources/SwiftDriver/Driver/Driver.swift
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,9 @@ public struct Driver {
/// The specified maximum number of parallel jobs to execute.
@_spi(Testing) public let numParallelJobs: Int?

/// Whether jobs should continue to be executed after a failure.
@_spi(Testing) public let continueBuildingAfterErrors: Bool
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We'll need to drop te @_spi here until the related build issues get sorted out

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's being used a lot in this file. Not sure what the issue is, but it's still being used in a lot of places.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have a set of changes I have not pushed in a PR (yet) that comments out all the spi's so we can use the older compilers. Not sure what to do about this issue.


/// The set of sanitizers that were requested
let enabledSanitizers: Set<Sanitizer>

Expand Down Expand Up @@ -408,6 +411,8 @@ public struct Driver {
self.numThreads = Self.determineNumThreads(&parsedOptions, compilerMode: compilerMode, diagnosticsEngine: diagnosticEngine)
self.numParallelJobs = Self.determineNumParallelJobs(&parsedOptions, diagnosticsEngine: diagnosticEngine, env: env)

self.continueBuildingAfterErrors = Self.computeContinueBuildingAfterErrors(&parsedOptions, compilerMode: compilerMode)

Self.validateWarningControlArgs(&parsedOptions, diagnosticEngine: diagnosticEngine)
Self.validateProfilingArgs(&parsedOptions,
fileSystem: fileSystem,
Expand Down Expand Up @@ -896,6 +901,7 @@ extension Driver {
workload: .init(allJobs, incrementalCompilationState),
delegate: createToolExecutionDelegate(),
numParallelJobs: numParallelJobs ?? 1,
continueBuildingAfterErrors: continueBuildingAfterErrors,
forceResponseFiles: forceResponseFiles,
recordedInputModificationDates: recordedInputModificationDates)
}
Expand Down Expand Up @@ -1357,6 +1363,25 @@ extension Driver {

return numJobs
}

static func computeContinueBuildingAfterErrors(
_ parsedOptions: inout ParsedOptions,
compilerMode: CompilerMode
) -> Bool {
// Note: Batch mode handling of serialized diagnostics requires that all
// batches get to run, in order to make sure that all diagnostics emitted
// during the compilation end up in at least one serialized diagnostic file.
// Therefore, treat batch mode as implying -continue-building-after-errors.
// (This behavior could be limited to only when serialized diagnostics are
// being emitted, but this seems more consistent and less surprising for
// users.)
// FIXME: We don't really need (or want) a full ContinueBuildingAfterErrors.
// If we fail to precompile a bridging header, for example, there's no need
// to go on to compilation of source files, and if compilation of source files
// fails, we shouldn't try to link. Instead, we'd want to let all jobs finish
// but not schedule any new ones.
return compilerMode.isBatchCompile || parsedOptions.contains(.continueBuildingAfterErrors)
}
}

extension Diagnostic.Message {
Expand Down
4 changes: 4 additions & 0 deletions Sources/SwiftDriver/Execution/DriverExecutor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ public protocol DriverExecutor {
func execute(workload: DriverExecutorWorkload,
delegate: JobExecutionDelegate,
numParallelJobs: Int,
continueBuildingAfterErrors: Bool,
forceResponseFiles: Bool,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will require a corresponding update to SPM's executor

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's why in my version, I put this bit in the workload parameter. Seemed like one could defend that the decision to run jobs after an error was a workload parameter.

recordedInputModificationDates: [TypedVirtualPath: Date]
) throws
Expand All @@ -35,6 +36,7 @@ public protocol DriverExecutor {
func execute(jobs: [Job],
delegate: JobExecutionDelegate,
numParallelJobs: Int,
continueBuildingAfterErrors: Bool,
forceResponseFiles: Bool,
recordedInputModificationDates: [TypedVirtualPath: Date]
) throws
Expand Down Expand Up @@ -92,13 +94,15 @@ extension DriverExecutor {
jobs: [Job],
delegate: JobExecutionDelegate,
numParallelJobs: Int,
continueBuildingAfterErrors: Bool,
forceResponseFiles: Bool,
recordedInputModificationDates: [TypedVirtualPath: Date]
) throws {
try execute(
workload: .all(jobs),
delegate: delegate,
numParallelJobs: numParallelJobs,
continueBuildingAfterErrors: continueBuildingAfterErrors,
forceResponseFiles: forceResponseFiles,
recordedInputModificationDates: recordedInputModificationDates)
}
Expand Down
28 changes: 27 additions & 1 deletion Sources/SwiftDriverExecution/MultiJobExecutor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,9 @@ public final class MultiJobExecutor {
/// Operation queue for executing tasks in parallel.
let jobQueue: OperationQueue

/// Whether jobs should continue being scheduled after one fails.
let continueBuildingAfterErrors: Bool

/// The process set to use when launching new processes.
let processSet: ProcessSet?

Expand All @@ -87,6 +90,9 @@ public final class MultiJobExecutor {
/// any newly-required job. Set only once.
private(set) var executeAllJobsTaskBuildEngine: LLTaskBuildEngine? = nil

/// Should new jobs immediately fail.
private(set) var isCancelled: Bool = false


init(
argsResolver: ArgsResolver,
Expand All @@ -95,6 +101,7 @@ public final class MultiJobExecutor {
workload: DriverExecutorWorkload,
executorDelegate: JobExecutionDelegate,
jobQueue: OperationQueue,
continueBuildingAfterErrors: Bool,
processSet: ProcessSet?,
forceResponseFiles: Bool,
recordedInputModificationDates: [TypedVirtualPath: Date],
Expand All @@ -114,6 +121,7 @@ public final class MultiJobExecutor {
self.fileSystem = fileSystem
self.executorDelegate = executorDelegate
self.jobQueue = jobQueue
self.continueBuildingAfterErrors = continueBuildingAfterErrors
self.processSet = processSet
self.forceResponseFiles = forceResponseFiles
self.recordedInputModificationDates = recordedInputModificationDates
Expand Down Expand Up @@ -199,6 +207,10 @@ public final class MultiJobExecutor {
executeAllJobsTaskBuildEngine = engine
}

fileprivate func cancel() {
isCancelled = true
}

/// After a job finishes, an incremental build may discover more jobs are needed, or if all compilations
/// are done, will need to then add in the post-compilation rules.
fileprivate func addRuleBeyondMandatoryCompiles(
Expand Down Expand Up @@ -239,6 +251,9 @@ public final class MultiJobExecutor {
/// The number of jobs to run in parallel.
private let numParallelJobs: Int

/// Whether jobs should continue being scheduled after one fails.
private let continueBuildingAfterErrors: Bool

/// The process set to use when launching new processes.
private let processSet: ProcessSet?

Expand All @@ -260,6 +275,7 @@ public final class MultiJobExecutor {
executorDelegate: JobExecutionDelegate,
diagnosticsEngine: DiagnosticsEngine,
numParallelJobs: Int? = nil,
continueBuildingAfterErrors: Bool = false,
processSet: ProcessSet? = nil,
forceResponseFiles: Bool = false,
recordedInputModificationDates: [TypedVirtualPath: Date] = [:],
Expand All @@ -270,6 +286,7 @@ public final class MultiJobExecutor {
self.executorDelegate = executorDelegate
self.diagnosticsEngine = diagnosticsEngine
self.numParallelJobs = numParallelJobs ?? 1
self.continueBuildingAfterErrors = continueBuildingAfterErrors
self.processSet = processSet
self.forceResponseFiles = forceResponseFiles
self.recordedInputModificationDates = recordedInputModificationDates
Expand Down Expand Up @@ -304,6 +321,7 @@ public final class MultiJobExecutor {
workload: workload,
executorDelegate: executorDelegate,
jobQueue: jobQueue,
continueBuildingAfterErrors: continueBuildingAfterErrors,
processSet: processSet,
forceResponseFiles: forceResponseFiles,
recordedInputModificationDates: recordedInputModificationDates,
Expand Down Expand Up @@ -428,11 +446,14 @@ class ExecuteJobRule: LLBuildRule {

override func inputsAvailable(_ engine: LLTaskBuildEngine) {
// Return early any of the input failed.
guard allInputsSucceeded else {
guard allInputsSucceeded, !context.isCancelled else {
return engine.taskIsComplete(DriverBuildValue.jobExecution(success: false))
}

context.jobQueue.addOperation {
guard !self.context.isCancelled else {
return engine.taskIsComplete(DriverBuildValue.jobExecution(success: false))
}
self.executeJob(engine)
}
}
Comment on lines 447 to 459
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If these changes work, this part of the PR is more elegant than mine.

Expand Down Expand Up @@ -532,6 +553,11 @@ class ExecuteJobRule: LLBuildRule {
}

engine.taskIsComplete(value)

if !value.success, !context.continueBuildingAfterErrors {
// Allow currently running jobs to finish, but don't execute anymore
context.cancel()
}
}
}

Expand Down
2 changes: 2 additions & 0 deletions Sources/SwiftDriverExecution/SwiftDriverExecutor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ public final class SwiftDriverExecutor: DriverExecutor {
public func execute(workload: DriverExecutorWorkload,
delegate: JobExecutionDelegate,
numParallelJobs: Int = 1,
continueBuildingAfterErrors: Bool,
forceResponseFiles: Bool = false,
recordedInputModificationDates: [TypedVirtualPath: Date] = [:]
) throws {
Expand All @@ -69,6 +70,7 @@ public final class SwiftDriverExecutor: DriverExecutor {
executorDelegate: delegate,
diagnosticsEngine: diagnosticsEngine,
numParallelJobs: numParallelJobs,
continueBuildingAfterErrors: continueBuildingAfterErrors,
processSet: processSet,
forceResponseFiles: forceResponseFiles,
recordedInputModificationDates: recordedInputModificationDates)
Expand Down
88 changes: 86 additions & 2 deletions Tests/SwiftDriverTests/JobExecutorTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -306,14 +306,98 @@ final class JobExecutorTests: XCTestCase {
$0 <<< "let bar = 3"
}

// FIXME: It's unfortunate we diagnose this twice, once for each job which uses the input.
verifier.expect(.error("input file '\(other.description)' was modified during the build"))
Comment on lines -309 to -310
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice!

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this still needed if -driver-continue-building... is set? Or can we 86 it?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nope not needed. It was getting emitted twice because the second job was running even though the first one failed.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh @davidungar I missunderstood your question. Yes, if continue building after errors is set, then we would get 2 errors, but since it's not and the driver defaults to 1 job at a time, it only outputs once.

Is this not causing a test failure in #387 ?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you, @cltnschlosser . Apparently not causing a failure there. As soon as I've gotten my PRs in, I'll push forward on the legacy tests and see what pops up.

verifier.expect(.error("input file '\(other.description)' was modified during the build"))
XCTAssertThrowsError(try driver.run(jobs: jobs))
}
}
}

func testStopAfterError() throws {
try withTemporaryDirectory { path in
let foo = path.appending(component: "foo.swift")
try localFileSystem.writeFileContents(foo) {
$0 <<< """
struct Foo {
let bar: Int
init() {
self.bar = self.bar
}
}
"""
}
let bar = path.appending(component: "bar.swift")
try localFileSystem.writeFileContents(bar) {
$0 <<< """
struct Bar {
let baz: Int
init() {
self.baz = self.baz
}
}
"""
}

// Replace the error stream with one we capture here.
let errorStream = stderrStream
let errorOutputFile = path.appending(component: "dummy_error_stream")
TSCBasic.stderrStream = try! ThreadSafeOutputByteStream(LocalFileOutputByteStream(errorOutputFile))
// Restore the error stream to what it was
defer { TSCBasic.stderrStream = errorStream }

try assertNoDriverDiagnostics(args: "swiftc", foo.pathString, bar.pathString) { driver in
let jobs = try driver.planBuild()
XCTAssertThrowsError(try driver.run(jobs: jobs))
}

let invocationError = try localFileSystem.readFileContents(errorOutputFile).description
XCTAssertTrue(invocationError.contains("self.bar = self.bar"))
XCTAssertFalse(invocationError.contains("self.baz = self.baz"))
}
}

func testContinueAfterError() throws {
try withTemporaryDirectory { path in
let foo = path.appending(component: "foo.swift")
try localFileSystem.writeFileContents(foo) {
$0 <<< """
struct Foo {
let bar: Int
init() {
self.bar = self.bar
}
}
"""
}
let bar = path.appending(component: "bar.swift")
try localFileSystem.writeFileContents(bar) {
$0 <<< """
struct Bar {
let baz: Int
init() {
self.baz = self.baz
}
}
"""
}

// Replace the error stream with one we capture here.
let errorStream = stderrStream
let errorOutputFile = path.appending(component: "dummy_error_stream")
TSCBasic.stderrStream = try! ThreadSafeOutputByteStream(LocalFileOutputByteStream(errorOutputFile))
// Restore the error stream to what it was
defer { TSCBasic.stderrStream = errorStream }

try assertNoDriverDiagnostics(args: "swiftc", "-continue-building-after-errors", foo.pathString, bar.pathString) { driver in
let jobs = try driver.planBuild()
XCTAssertThrowsError(try driver.run(jobs: jobs))
}

let invocationError = try localFileSystem.readFileContents(errorOutputFile).description
XCTAssertTrue(invocationError.contains("self.bar = self.bar"))
XCTAssertTrue(invocationError.contains("self.baz = self.baz"))
}
}

func testTemporaryFileWriting() throws {
try withTemporaryDirectory { path in
let resolver = try ArgsResolver(fileSystem: localFileSystem, temporaryDirectory: .absolute(path))
Expand Down
6 changes: 6 additions & 0 deletions Tests/SwiftDriverTests/SwiftDriverTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -1466,6 +1466,11 @@ final class SwiftDriverTests: XCTestCase {
}
}

func testBatchModeContinueAfterErrors() throws {
let driver = try Driver(args: ["swiftc", "foo1.swift", "bar1.swift", "-enable-batch-mode"])
XCTAssert(driver.continueBuildingAfterErrors)
}

func testSingleThreadedWholeModuleOptimizationCompiles() throws {
var driver1 = try Driver(args: ["swiftc", "-whole-module-optimization", "foo.swift", "bar.swift", "-module-name", "Test", "-target", "x86_64-apple-macosx10.15", "-emit-module-interface", "-emit-objc-header-path", "Test-Swift.h", "-emit-private-module-interface-path", "Test.private.swiftinterface"])
let plannedJobs = try driver1.planBuild()
Expand Down Expand Up @@ -2861,6 +2866,7 @@ final class SwiftDriverTests: XCTestCase {
func execute(workload: DriverExecutorWorkload,
delegate: JobExecutionDelegate,
numParallelJobs: Int,
continueBuildingAfterErrors: Bool,
forceResponseFiles: Bool,
recordedInputModificationDates: [TypedVirtualPath : Date]) throws {
fatalError()
Expand Down