Skip to content

Commit f01cb2b

Browse files
authored
Remove wrong assertion (HunterMcGushion#82)
1 parent c8d5268 commit f01cb2b

File tree

3 files changed

+38
-4
lines changed

3 files changed

+38
-4
lines changed

docstr_coverage/visitor.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -75,10 +75,6 @@ def _has_excuse(self, node):
7575
"""Iterates through the tokenize tokens above the passed node to evaluate whether a
7676
doc-missing excuse has been placed (right) above this nodes begin"""
7777
node_start = node.lineno
78-
assert node_start < len(self.tokens), (
79-
"An unexpected context occurred during parsing of {} "
80-
"It seems not all file lines were tokenized for comment checking."
81-
).format(self.filename)
8278

8379
# Find the index of first token which starts at the same line as the node
8480
token_index = -1
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
"""
2+
this is a very long docstring
3+
4+
this is a very long docstring
5+
this is a very long docstring
6+
this is a very long docstring
7+
this is a very long docstring
8+
this is a very long docstring
9+
this is a very long docstring
10+
this is a very long docstring
11+
this is a very long docstring
12+
"""
13+
14+
15+
class A:
16+
"""This is the first class in the alphabeth."""
17+
18+
# docstr-coverage:excused `test ignore after long docstrings`
19+
def ignored(self):
20+
pass
21+
22+
def missing(self):
23+
pass

tests/test_coverage.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@
2121
SAMPLES_C_DIRECTORY = os.path.join("tests", "extra_samples")
2222
PRIVATE_NO_DOCS_PATH = os.path.join(SAMPLES_C_DIRECTORY, "private_undocumented.py")
2323

24+
INDIVIDUAL_SAMPLES_DIR = os.path.join("tests", "individual_samples")
25+
2426

2527
def test_should_report_for_an_empty_file():
2628
result = analyze([EMPTY_FILE_PATH])
@@ -261,3 +263,16 @@ def test_skip_private():
261263
"empty": False,
262264
}
263265
assert total_results == {"missing_count": 1, "needed_count": 2, "coverage": 50.0}
266+
267+
268+
def test_long_doc():
269+
"""Regression test on issue 79
270+
271+
Multiline docstrings can be a smoke test when checking
272+
the tokenize tokens (which is based on line numbers)."""
273+
result = analyze([os.path.join(INDIVIDUAL_SAMPLES_DIR, "long_doc.py")])
274+
assert result.count_aggregate().coverage() == 75.0
275+
assert result.count_aggregate().num_files == 1
276+
# 2 + 1 inline ignore
277+
assert result.count_aggregate().found == 3
278+
assert result.count_aggregate().needed == 4

0 commit comments

Comments
 (0)