File tree Expand file tree Collapse file tree 3 files changed +38
-4
lines changed
Expand file tree Collapse file tree 3 files changed +38
-4
lines changed Original file line number Diff line number Diff line change @@ -75,10 +75,6 @@ def _has_excuse(self, node):
7575 """Iterates through the tokenize tokens above the passed node to evaluate whether a
7676 doc-missing excuse has been placed (right) above this nodes begin"""
7777 node_start = node .lineno
78- assert node_start < len (self .tokens ), (
79- "An unexpected context occurred during parsing of {} "
80- "It seems not all file lines were tokenized for comment checking."
81- ).format (self .filename )
8278
8379 # Find the index of first token which starts at the same line as the node
8480 token_index = - 1
Original file line number Diff line number Diff line change 1+ """
2+ this is a very long docstring
3+
4+ this is a very long docstring
5+ this is a very long docstring
6+ this is a very long docstring
7+ this is a very long docstring
8+ this is a very long docstring
9+ this is a very long docstring
10+ this is a very long docstring
11+ this is a very long docstring
12+ """
13+
14+
15+ class A :
16+ """This is the first class in the alphabeth."""
17+
18+ # docstr-coverage:excused `test ignore after long docstrings`
19+ def ignored (self ):
20+ pass
21+
22+ def missing (self ):
23+ pass
Original file line number Diff line number Diff line change 2121SAMPLES_C_DIRECTORY = os .path .join ("tests" , "extra_samples" )
2222PRIVATE_NO_DOCS_PATH = os .path .join (SAMPLES_C_DIRECTORY , "private_undocumented.py" )
2323
24+ INDIVIDUAL_SAMPLES_DIR = os .path .join ("tests" , "individual_samples" )
25+
2426
2527def test_should_report_for_an_empty_file ():
2628 result = analyze ([EMPTY_FILE_PATH ])
@@ -261,3 +263,16 @@ def test_skip_private():
261263 "empty" : False ,
262264 }
263265 assert total_results == {"missing_count" : 1 , "needed_count" : 2 , "coverage" : 50.0 }
266+
267+
268+ def test_long_doc ():
269+ """Regression test on issue 79
270+
271+ Multiline docstrings can be a smoke test when checking
272+ the tokenize tokens (which is based on line numbers)."""
273+ result = analyze ([os .path .join (INDIVIDUAL_SAMPLES_DIR , "long_doc.py" )])
274+ assert result .count_aggregate ().coverage () == 75.0
275+ assert result .count_aggregate ().num_files == 1
276+ # 2 + 1 inline ignore
277+ assert result .count_aggregate ().found == 3
278+ assert result .count_aggregate ().needed == 4
You can’t perform that action at this time.
0 commit comments