mirror of
https://github.com/ejwa/gitinspector.git
synced 2025-03-19 23:08:15 +01:00
Fixed some pylint violations.
This commit is contained in:
parent
bc9fd8b207
commit
bc6be1c56f
4 changed files with 40 additions and 36 deletions
gitinspector
|
@ -61,7 +61,9 @@ class BlameThread(threading.Thread):
|
||||||
self.blames = blames
|
self.blames = blames
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
|
|
||||||
def __clear_blamechunk_information__(self):
|
self.is_inside_comment = False
|
||||||
|
|
||||||
|
def __clear_blamechunk_info__(self):
|
||||||
self.blamechunk_email = None
|
self.blamechunk_email = None
|
||||||
self.blamechunk_is_last = False
|
self.blamechunk_is_last = False
|
||||||
self.blamechunk_is_prior = False
|
self.blamechunk_is_prior = False
|
||||||
|
@ -101,26 +103,25 @@ class BlameThread(threading.Thread):
|
||||||
rows = git_blame_r.readlines()
|
rows = git_blame_r.readlines()
|
||||||
git_blame_r.close()
|
git_blame_r.close()
|
||||||
|
|
||||||
self.is_inside_comment = False
|
self.__clear_blamechunk_info__()
|
||||||
self.__clear_blamechunk_information__()
|
|
||||||
|
|
||||||
for j in range(0, len(rows)):
|
for j in range(0, len(rows)):
|
||||||
row = rows[j].decode("utf-8", "replace").strip()
|
row = rows[j].decode("utf-8", "replace").strip()
|
||||||
lr = row.split(" ", 2)
|
keyval = row.split(" ", 2)
|
||||||
|
|
||||||
if self.blamechunk_is_last:
|
if self.blamechunk_is_last:
|
||||||
self.__handle_blamechunk_content__(row)
|
self.__handle_blamechunk_content__(row)
|
||||||
self.__clear_blamechunk_information__()
|
self.__clear_blamechunk_info__()
|
||||||
elif lr[0] == "boundary":
|
elif keyval[0] == "boundary":
|
||||||
self.blamechunk_is_prior = True
|
self.blamechunk_is_prior = True
|
||||||
elif lr[0] == "author-mail":
|
elif keyval[0] == "author-mail":
|
||||||
self.blamechunk_email = lr[1].lstrip("<").rstrip(">")
|
self.blamechunk_email = keyval[1].lstrip("<").rstrip(">")
|
||||||
elif lr[0] == "author-time":
|
elif keyval[0] == "author-time":
|
||||||
self.blamechunk_time = datetime.date.fromtimestamp(int(lr[1]))
|
self.blamechunk_time = datetime.date.fromtimestamp(int(keyval[1]))
|
||||||
elif lr[0] == "filename":
|
elif keyval[0] == "filename":
|
||||||
self.blamechunk_is_last = True
|
self.blamechunk_is_last = True
|
||||||
elif Blame.is_revision(lr[0]):
|
elif Blame.is_revision(keyval[0]):
|
||||||
self.blamechunk_revision = lr[0]
|
self.blamechunk_revision = keyval[0]
|
||||||
|
|
||||||
__thread_lock__.release() # Lock controlling the number of threads running
|
__thread_lock__.release() # Lock controlling the number of threads running
|
||||||
|
|
||||||
|
@ -275,7 +276,8 @@ class BlameOutput(Outputable):
|
||||||
terminal.clear_row()
|
terminal.clear_row()
|
||||||
|
|
||||||
print(textwrap.fill(_(BLAME_INFO_TEXT) + ":", width=terminal.get_size()[0]) + "\n")
|
print(textwrap.fill(_(BLAME_INFO_TEXT) + ":", width=terminal.get_size()[0]) + "\n")
|
||||||
terminal.printb(_("Author").ljust(21) + _("Rows").rjust(10) + _("Stability").rjust(15) + _("Age").rjust(13) + _("% in comments").rjust(20))
|
terminal.printb(_("Author").ljust(21) + _("Rows").rjust(10) + _("Stability").rjust(15) + _("Age").rjust(13) +
|
||||||
|
_("% in comments").rjust(20))
|
||||||
|
|
||||||
for i in sorted(__blame__.get_summed_blames().items()):
|
for i in sorted(__blame__.get_summed_blames().items()):
|
||||||
print(i[0].ljust(20)[0:20], end=" ")
|
print(i[0].ljust(20)[0:20], end=" ")
|
||||||
|
|
|
@ -43,7 +43,5 @@ def create(url):
|
||||||
return url
|
return url
|
||||||
|
|
||||||
def delete():
|
def delete():
|
||||||
global __cloned_path__
|
|
||||||
|
|
||||||
if __cloned_path__:
|
if __cloned_path__:
|
||||||
shutil.rmtree(__cloned_path__, ignore_errors=True)
|
shutil.rmtree(__cloned_path__, ignore_errors=True)
|
||||||
|
|
|
@ -75,8 +75,10 @@ def set_filtered(string, filter_type="file"):
|
||||||
|
|
||||||
FILTERING_INFO_TEXT = N_("The following files were excluded from the statistics due to the specified exclusion patterns")
|
FILTERING_INFO_TEXT = N_("The following files were excluded from the statistics due to the specified exclusion patterns")
|
||||||
FILTERING_AUTHOR_INFO_TEXT = N_("The following authors were excluded from the statistics due to the specified exclusion patterns")
|
FILTERING_AUTHOR_INFO_TEXT = N_("The following authors were excluded from the statistics due to the specified exclusion patterns")
|
||||||
FILTERING_EMAIL_INFO_TEXT = N_("The authors with the following emails were excluded from the statistics due to the specified exclusion patterns")
|
FILTERING_EMAIL_INFO_TEXT = N_("The authors with the following emails were excluded from the statistics due to the specified " \
|
||||||
FILTERING_EMAIL_INFO_TEXT = N_("The following commit revisions were excluded from the statistics due to the specified exclusion patterns")
|
"exclusion patterns")
|
||||||
|
FILTERING_EMAIL_INFO_TEXT = N_("The following commit revisions were excluded from the statistics due to the specified " \
|
||||||
|
"exclusion patterns")
|
||||||
|
|
||||||
class Filtering(Outputable):
|
class Filtering(Outputable):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -31,7 +31,8 @@ import subprocess
|
||||||
__metric_eloc__ = {"java": 500, "c": 500, "cpp": 500, "h": 300, "hpp": 300, "php": 500, "py": 500, "glsl": 1000,
|
__metric_eloc__ = {"java": 500, "c": 500, "cpp": 500, "h": 300, "hpp": 300, "php": 500, "py": 500, "glsl": 1000,
|
||||||
"rb": 500, "js": 500, "sql": 1000, "xml": 1000}
|
"rb": 500, "js": 500, "sql": 1000, "xml": 1000}
|
||||||
|
|
||||||
__metric_cc_tokens__ = [[["java", "js", "c", "cc", "cpp"], ["else", "for\s+\(.*\)", "if\s+\(.*\)", "case\s+\w+:", "default:", "while\s+\(.*\)"],
|
__metric_cc_tokens__ = [[["java", "js", "c", "cc", "cpp"], ["else", "for\s+\(.*\)", "if\s+\(.*\)", "case\s+\w+:",
|
||||||
|
"default:", "while\s+\(.*\)"],
|
||||||
["assert", "break", "continue", "return"]],
|
["assert", "break", "continue", "return"]],
|
||||||
[["py"], ["^\s+elif .*:$", "^\s+else:$", "^\s+for .*:", "^\s+if .*:$", "^\s+while .*:$"],
|
[["py"], ["^\s+elif .*:$", "^\s+else:$", "^\s+for .*:", "^\s+if .*:$", "^\s+while .*:$"],
|
||||||
["^\s+assert", "break", "continue", "return"]]]
|
["^\s+assert", "break", "continue", "return"]]]
|
||||||
|
@ -59,16 +60,16 @@ class MetricsLogic:
|
||||||
|
|
||||||
extension = FileDiff.get_extension(i)
|
extension = FileDiff.get_extension(i)
|
||||||
lines = MetricsLogic.get_eloc(file_r, extension)
|
lines = MetricsLogic.get_eloc(file_r, extension)
|
||||||
cc = MetricsLogic.get_cyclomatic_complexity(file_r, extension)
|
cycc = MetricsLogic.get_cyclomatic_complexity(file_r, extension)
|
||||||
|
|
||||||
if __metric_eloc__.get(extension, None) != None and __metric_eloc__[extension] < lines:
|
if __metric_eloc__.get(extension, None) != None and __metric_eloc__[extension] < lines:
|
||||||
self.eloc[i.strip()] = lines
|
self.eloc[i.strip()] = lines
|
||||||
|
|
||||||
if METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD < cc:
|
if METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD < cycc:
|
||||||
self.cyclomatic_complexity[i.strip()] = cc
|
self.cyclomatic_complexity[i.strip()] = cycc
|
||||||
|
|
||||||
if lines > 0 and METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD < cc / float(lines):
|
if lines > 0 and METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD < cycc / float(lines):
|
||||||
self.cyclomatic_complexity_density[i.strip()] = cc / float(lines)
|
self.cyclomatic_complexity_density[i.strip()] = cycc / float(lines)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_cyclomatic_complexity(file_r, extension):
|
def get_cyclomatic_complexity(file_r, extension):
|
||||||
|
@ -89,13 +90,13 @@ class MetricsLogic:
|
||||||
(_, is_inside_comment) = comment.handle_comment_block(is_inside_comment, extension, i)
|
(_, is_inside_comment) = comment.handle_comment_block(is_inside_comment, extension, i)
|
||||||
|
|
||||||
if not is_inside_comment and not comment.is_comment(extension, i):
|
if not is_inside_comment and not comment.is_comment(extension, i):
|
||||||
for t in entry_tokens:
|
for j in entry_tokens:
|
||||||
if re.search(t, i, re.DOTALL):
|
if re.search(j, i, re.DOTALL):
|
||||||
cc_counter += 2
|
cc_counter += 2
|
||||||
for t in exit_tokens:
|
for j in exit_tokens:
|
||||||
if re.search(t, i, re.DOTALL):
|
if re.search(j, i, re.DOTALL):
|
||||||
cc_counter += 1
|
cc_counter += 1
|
||||||
return cc_counter;
|
return cc_counter
|
||||||
|
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
|
@ -115,7 +116,8 @@ class MetricsLogic:
|
||||||
|
|
||||||
ELOC_INFO_TEXT = N_("The following files are suspiciously big (in order of severity)")
|
ELOC_INFO_TEXT = N_("The following files are suspiciously big (in order of severity)")
|
||||||
CYCLOMATIC_COMPLEXITY_TEXT = N_("The following files have an elevated cyclomatic complexity (in order of severity)")
|
CYCLOMATIC_COMPLEXITY_TEXT = N_("The following files have an elevated cyclomatic complexity (in order of severity)")
|
||||||
CYCLOMATIC_COMPLEXITY_DENSITY_TEXT = N_("The following files have an elevated cyclomatic complexity density (in order of severity)")
|
CYCLOMATIC_COMPLEXITY_DENSITY_TEXT = N_("The following files have an elevated cyclomatic complexity density " \
|
||||||
|
"(in order of severity)")
|
||||||
METRICS_MISSING_INFO_TEXT = N_("No metrics violations were found in the repository")
|
METRICS_MISSING_INFO_TEXT = N_("No metrics violations were found in the repository")
|
||||||
|
|
||||||
METRICS_VIOLATION_SCORES = [[1.0, "minimal"], [1.25, "minor"], [1.5, "medium"], [2.0, "bad"], [3.0, "severe"]]
|
METRICS_VIOLATION_SCORES = [[1.0, "minimal"], [1.25, "minor"], [1.5, "medium"], [2.0, "bad"], [3.0, "severe"]]
|
||||||
|
@ -156,25 +158,25 @@ class Metrics(Outputable):
|
||||||
|
|
||||||
if metrics_logic.eloc:
|
if metrics_logic.eloc:
|
||||||
metrics_xml += "<div><h4>" + _(ELOC_INFO_TEXT) + ".</h4>"
|
metrics_xml += "<div><h4>" + _(ELOC_INFO_TEXT) + ".</h4>"
|
||||||
for n, i in enumerate(sorted(set([(j, i) for (i, j) in metrics_logic.eloc.items()]), reverse = True)):
|
for num, i in enumerate(sorted(set([(j, i) for (i, j) in metrics_logic.eloc.items()]), reverse = True)):
|
||||||
metrics_xml += "<div class=\"" + __get_metrics_score__(__metric_eloc__[FileDiff.get_extension(i[1])], i[0]) + \
|
metrics_xml += "<div class=\"" + __get_metrics_score__(__metric_eloc__[FileDiff.get_extension(i[1])], i[0]) + \
|
||||||
(" odd\">" if n % 2 == 1 else "\">") + \
|
(" odd\">" if num % 2 == 1 else "\">") + \
|
||||||
_("{0} ({1} estimated lines of code)").format(i[1], str(i[0])) + "</div>"
|
_("{0} ({1} estimated lines of code)").format(i[1], str(i[0])) + "</div>"
|
||||||
metrics_xml += "</div>"
|
metrics_xml += "</div>"
|
||||||
|
|
||||||
if metrics_logic.cyclomatic_complexity:
|
if metrics_logic.cyclomatic_complexity:
|
||||||
metrics_xml += "<div><h4>" + _(CYCLOMATIC_COMPLEXITY_TEXT) + "</h4>"
|
metrics_xml += "<div><h4>" + _(CYCLOMATIC_COMPLEXITY_TEXT) + "</h4>"
|
||||||
for n, i in enumerate(sorted(set([(j, i) for (i, j) in metrics_logic.cyclomatic_complexity.items()]), reverse = True)):
|
for num, i in enumerate(sorted(set([(j, i) for (i, j) in metrics_logic.cyclomatic_complexity.items()]), reverse = True)):
|
||||||
metrics_xml += "<div class=\"" + __get_metrics_score__(METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD, i[0]) + \
|
metrics_xml += "<div class=\"" + __get_metrics_score__(METRIC_CYCLOMATIC_COMPLEXITY_THRESHOLD, i[0]) + \
|
||||||
(" odd\">" if n % 2 == 1 else "\">") + \
|
(" odd\">" if num % 2 == 1 else "\">") + \
|
||||||
_("{0} ({1} in cyclomatic complexity)").format(i[1], str(i[0])) + "</div>"
|
_("{0} ({1} in cyclomatic complexity)").format(i[1], str(i[0])) + "</div>"
|
||||||
metrics_xml += "</div>"
|
metrics_xml += "</div>"
|
||||||
|
|
||||||
if metrics_logic.cyclomatic_complexity_density:
|
if metrics_logic.cyclomatic_complexity_density:
|
||||||
metrics_xml += "<div><h4>" + _(CYCLOMATIC_COMPLEXITY_DENSITY_TEXT) + "</h4>"
|
metrics_xml += "<div><h4>" + _(CYCLOMATIC_COMPLEXITY_DENSITY_TEXT) + "</h4>"
|
||||||
for n, i in enumerate(sorted(set([(j, i) for (i, j) in metrics_logic.cyclomatic_complexity_density.items()]), reverse = True)):
|
for num, i in enumerate(sorted(set([(j, i) for (i, j) in metrics_logic.cyclomatic_complexity_density.items()]), reverse = True)):
|
||||||
metrics_xml += "<div class=\"" + __get_metrics_score__(METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD, i[0]) + \
|
metrics_xml += "<div class=\"" + __get_metrics_score__(METRIC_CYCLOMATIC_COMPLEXITY_DENSITY_THRESHOLD, i[0]) + \
|
||||||
(" odd\">" if n % 2 == 1 else "\">") + \
|
(" odd\">" if num % 2 == 1 else "\">") + \
|
||||||
_("{0} ({1:.3f} in cyclomatic complexity density)").format(i[1], i[0]) + "</div>"
|
_("{0} ({1:.3f} in cyclomatic complexity density)").format(i[1], i[0]) + "</div>"
|
||||||
metrics_xml += "</div>"
|
metrics_xml += "</div>"
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue