@@ -459,6 +459,19 @@ pub fn span_of_attrs(attrs: &Attributes) -> syntax_pos::Span {
459459 start. to ( end)
460460}
461461
462+ /// Reports a resolution failure diagnostic.
463+ ///
464+ /// Ideally we can report the diagnostic with the actual span in the source where the link failure
465+ /// occurred. However, there's a mismatch between the span in the source code and the span in the
466+ /// markdown, so we have to do a bit of work to figure out the correspondence.
467+ ///
468+ /// It's not too hard to find the span for sugared doc comments (`///` and `/**`), because the
469+ /// source will match the markdown exactly, excluding the comment markers. However, it's much more
470+ /// difficult to calculate the spans for unsugared docs, because we have to deal with escaping and
471+ /// other source features. So, we attempt to find the exact source span of the resolution failure
472+ /// in sugared docs, but use the span of the documentation attributes themselves for unsugared
473+ /// docs. Because this span might be overly large, we display the markdown line containing the
474+ /// failure as a note.
462475fn resolution_failure (
463476 cx : & DocContext ,
464477 attrs : & Attributes ,
@@ -469,47 +482,75 @@ fn resolution_failure(
469482 let sp = span_of_attrs ( attrs) ;
470483 let msg = format ! ( "`[{}]` cannot be resolved, ignoring it..." , path_str) ;
471484
472- let code_dox = sp. to_src ( cx) ;
473-
474- let doc_comment_padding = 3 ;
475485 let mut diag = if let Some ( link_range) = link_range {
476- // blah blah blah\nblah\nblah [blah] blah blah\nblah blah
477- // ^ ~~~~~~
478- // | link_range
479- // last_new_line_offset
480-
481- let mut diag;
482- if dox. lines ( ) . count ( ) == code_dox. lines ( ) . count ( ) {
483- let line_offset = dox[ ..link_range. start ] . lines ( ) . count ( ) ;
484- // The span starts in the `///`, so we don't have to account for the leading whitespace.
485- let code_dox_len = if line_offset <= 1 {
486- doc_comment_padding
487- } else {
488- // The first `///`.
489- doc_comment_padding +
490- // Each subsequent leading whitespace and `///`.
491- code_dox. lines ( ) . skip ( 1 ) . take ( line_offset - 1 ) . fold ( 0 , |sum, line| {
492- sum + doc_comment_padding + line. len ( ) - line. trim_start ( ) . len ( )
493- } )
494- } ;
486+ let src = cx. sess ( ) . source_map ( ) . span_to_snippet ( sp) ;
487+ let is_all_sugared_doc = attrs. doc_strings . iter ( ) . all ( |frag| match frag {
488+ DocFragment :: SugaredDoc ( ..) => true ,
489+ _ => false ,
490+ } ) ;
491+
492+ if let ( Ok ( src) , true ) = ( src, is_all_sugared_doc) {
493+ // The number of markdown lines up to and including the resolution failure.
494+ let num_lines = dox[ ..link_range. start ] . lines ( ) . count ( ) ;
495+
496+ // We use `split_terminator('\n')` instead of `lines()` when counting bytes to ensure
497+ // that DOS-style line endings do not cause the spans to be calculated incorrectly.
498+ let mut src_lines = src. split_terminator ( '\n' ) ;
499+ let mut md_lines = dox. split_terminator ( '\n' ) . take ( num_lines) . peekable ( ) ;
500+
501+ // The number of bytes from the start of the source span to the resolution failure that
502+ // are *not* part of the markdown, like comment markers.
503+ let mut extra_src_bytes = 0 ;
504+
505+ while let Some ( md_line) = md_lines. next ( ) {
506+ loop {
507+ let source_line = src_lines
508+ . next ( )
509+ . expect ( "could not find markdown line in source" ) ;
510+
511+ match source_line. find ( md_line) {
512+ Some ( offset) => {
513+ extra_src_bytes += if md_lines. peek ( ) . is_some ( ) {
514+ source_line. len ( ) - md_line. len ( )
515+ } else {
516+ offset
517+ } ;
518+ break ;
519+ }
520+ None => {
521+ // Since this is a source line that doesn't include a markdown line,
522+ // we have to count the newline that we split from earlier.
523+ extra_src_bytes += source_line. len ( ) + 1 ;
524+ }
525+ }
526+ }
527+ }
495528
496- // Extract the specific span.
497529 let sp = sp. from_inner_byte_pos (
498- link_range. start + code_dox_len ,
499- link_range. end + code_dox_len ,
530+ link_range. start + extra_src_bytes ,
531+ link_range. end + extra_src_bytes ,
500532 ) ;
501533
502- diag = cx. tcx . struct_span_lint_node ( lint:: builtin:: INTRA_DOC_LINK_RESOLUTION_FAILURE ,
503- NodeId :: from_u32 ( 0 ) ,
504- sp,
505- & msg) ;
534+ let mut diag = cx. tcx . struct_span_lint_node (
535+ lint:: builtin:: INTRA_DOC_LINK_RESOLUTION_FAILURE ,
536+ NodeId :: from_u32 ( 0 ) ,
537+ sp,
538+ & msg,
539+ ) ;
506540 diag. span_label ( sp, "cannot be resolved, ignoring" ) ;
541+ diag
507542 } else {
508- diag = cx. tcx . struct_span_lint_node ( lint:: builtin:: INTRA_DOC_LINK_RESOLUTION_FAILURE ,
509- NodeId :: from_u32 ( 0 ) ,
510- sp,
511- & msg) ;
543+ let mut diag = cx. tcx . struct_span_lint_node (
544+ lint:: builtin:: INTRA_DOC_LINK_RESOLUTION_FAILURE ,
545+ NodeId :: from_u32 ( 0 ) ,
546+ sp,
547+ & msg,
548+ ) ;
512549
550+ // blah blah blah\nblah\nblah [blah] blah blah\nblah blah
551+ // ^ ~~~~
552+ // | link_range
553+ // last_new_line_offset
513554 let last_new_line_offset = dox[ ..link_range. start ] . rfind ( '\n' ) . map_or ( 0 , |n| n + 1 ) ;
514555 let line = dox[ last_new_line_offset..] . lines ( ) . next ( ) . unwrap_or ( "" ) ;
515556
@@ -522,8 +563,8 @@ fn resolution_failure(
522563 before=link_range. start - last_new_line_offset,
523564 found=link_range. len( ) ,
524565 ) ) ;
566+ diag
525567 }
526- diag
527568 } else {
528569 cx. tcx . struct_span_lint_node ( lint:: builtin:: INTRA_DOC_LINK_RESOLUTION_FAILURE ,
529570 NodeId :: from_u32 ( 0 ) ,
0 commit comments