Coverage for genbadge/utils_flake8.py: 73%
57 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-11-10 20:37 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-11-10 20:37 +0000
1# Authors: Sylvain MARIE <sylvain.marie@se.com>
2# + All contributors to <https://github.com/smarie/python-genbadge>
3#
4# License: 3-clause BSD, <https://github.com/smarie/python-genbadge/blob/master/LICENSE>
5from __future__ import division
7from warnings import warn
8import re
10from .utils_badge import Badge
13try:
14 # flake8-html is an optional dependency, do not fail too soon if it cant be loaded
15 import flake8_html
16except ImportError as e:
17 ee = e # save it
18 class FakeFlake8HtmlImport(object): # noqa
19 def __getattribute__(self, item):
20 raise ImportError("Could not import `flake8_html` module, please install it. "
21 "Note that all dependencies for the flake8 command can be installed with "
22 "`pip install genbadge[flake8]`. Caught: %r" % ee)
23 flake8_html = FakeFlake8HtmlImport()
26class Flake8Stats(object):
27 """
28 Contains the results from parsing the flake8 report.
29 The severity levels are defined by flake8-html
30 """
31 def __init__(self,
32 nb_critical=0, nb_warning=0, nb_info=0
33 ):
34 # severities 1, 2, 3
35 self.nb_critical = nb_critical
36 self.nb_warning = nb_warning
37 self.nb_info = nb_info
39 def add(self,
40 nb, # type: int
41 code # type: str
42 ):
43 """
44 Add `nb` errors with the same code to the statistics.
45 """
46 severity = flake8_html.plugin.find_severity(code)
47 if severity == 1:
48 self.nb_critical += nb
49 elif severity == 2:
50 self.nb_warning += nb
51 elif severity == 3: 51 ↛ 54line 51 didn't jump to line 54, because the condition on line 51 was never false
52 self.nb_info += nb
53 else:
54 raise ValueError("Unknown severity: %r for code %r" % (severity, code))
56 @property
57 def nb_total(self):
58 return self.nb_critical + self.nb_warning + self.nb_info
61def get_color(
62 flake8_stats # type: Flake8Stats
63):
64 """ Returns the badge color to use depending on the flake8 results """
66 if flake8_stats.nb_critical > 0: 66 ↛ 68line 66 didn't jump to line 68, because the condition on line 66 was never false
67 color = 'red'
68 elif flake8_stats.nb_warning > 0:
69 color = 'orange'
70 elif flake8_stats.nb_info > 0:
71 color = 'green'
72 else:
73 color = 'brightgreen'
75 return color
78def get_flake8_badge(
79 flake8_stats, # type: Flake8Stats
80 left_txt = "flake8" # type: str
81):
82 # type: (...) -> Badge
83 """Return the badge from coverage results """
85 color = get_color(flake8_stats)
87 right_txt = "%s C, %s W, %s I" % (flake8_stats.nb_critical, flake8_stats.nb_warning, flake8_stats.nb_info)
89 return Badge(left_txt=left_txt, right_txt=right_txt, color=color)
92def get_flake8_stats(flake8_stats_file):
93 # type: (...) -> Flake8Stats
94 """
95 Reads an index.html file obtained from flake8-html.
96 """
97 if isinstance(flake8_stats_file, str):
98 # assume a file path
99 with open(flake8_stats_file) as f:
100 flake8_stats_txt = f.read()
101 else:
102 # assume a stream already
103 flake8_stats_txt = flake8_stats_file.read()
105 return parse_flake8_stats(flake8_stats_txt)
108RE_TO_MATCH = re.compile(r"([0-9]+)\s+([A-Z0-9]+)\s.*")
111def parse_flake8_stats(stats_txt # type: str
112 ):
113 # type: (...) -> Flake8Stats
115 stats = Flake8Stats()
116 for line in stats_txt.splitlines():
117 match = RE_TO_MATCH.match(line)
118 if not match: 118 ↛ 119line 118 didn't jump to line 119, because the condition on line 118 was never true
119 warn("Line in Flake8 statistics report does not match template and will be ignored: %r" % line)
120 else:
121 nb, code = match.groups()
122 stats.add(int(nb), code)
124 return stats
127# def parse_flake8_html(html # type: str
128# ):
129# #
130# """Reads the flake8 html report"""
131# soup = bs4.BeautifulSoup(html, "html.parser")
132#
133# # check title
134# title = soup.head.title.get_text()
135# assert title == 'flake8 violations', "Invalid flake8 html report found, unexpected title: %s" % title
136#
137# # get page div
138# pagediv = soup.body.find("div", {"id": "page"})
139# assert pagediv.h1.get_text() == 'flake8 violations'
140#
141# results_dct = dict()
142# ul_violations = pagediv.ul
143# for li in ul_violations.find_all('li'):
144# # synthesis
145# typ_str, severity_str = li.a.span['class']
146# assert typ_str == 'count'
147# assert severity_str.startswith('sev-')
148# count = int(li.a.span.get_text().strip())
149# worst_severity_nb = int(severity_str[4:])
150#
151# count2, file_name = list(li.stripped_strings)
152# assert int(count2) == count
153#
154# # we need to access the details because the count is not correct
155# li_href = li.a['href']
156# child_results_dct = parse_child_html(path, li_href)
157# for c_severity_nb, c_count in child_results_dct.items():
158# try:
159# results_dct[c_severity_nb] += c_count
160# except KeyError:
161# results_dct[c_severity_nb] = c_count
162#
163# assert worst_severity_nb == min(child_results_dct.keys())
164# assert count == sum(child_results_dct.values())
165#
166# return results_dct
168#
169# def parse_child_html(path, # type: str
170# suffix # type: str
171# ):
172# with open(path + suffix) as f:
173# html_child = f.read()
174# soup_child = bs4.BeautifulSoup(html_child, "html.parser")
175#
176# # check title
177# title = soup_child.head.title.get_text()
178# assert title.startswith('flake8 violations'), "Invalid flake8 html report found, unexpected title: %s" % title
179#
180# # get page div
181# pagediv = soup_child.body.find("div", {"id": "page"})
182# # assert pagediv.h1.get_text() == 'flake8 violations'
183#
184# results_dct = dict()
185# ul_violations = pagediv.ul
186# for li in ul_violations.find_all('li', recursive=False):
187# code = li.a['data-code'] # F401, etc.
188# typ_str, severity_str = li.a.span['class']
189# assert typ_str == 'count'
190# assert severity_str.startswith('sev-')
191# count = int(li.a.span.get_text().strip())
192# severity_nb = int(severity_str[4:])
193#
194# try:
195# results_dct[severity_nb] += count
196# except KeyError:
197# results_dct[severity_nb] = count
198#
199# return results_dct