raw_metrics.py 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125
  1. # This program is free software; you can redistribute it and/or modify it under
  2. # the terms of the GNU General Public License as published by the Free Software
  3. # Foundation; either version 2 of the License, or (at your option) any later
  4. # version.
  5. #
  6. # This program is distributed in the hope that it will be useful, but WITHOUT
  7. # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
  8. # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
  9. #
  10. # You should have received a copy of the GNU General Public License along with
  11. # this program; if not, write to the Free Software Foundation, Inc.,
  12. # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
  13. """ Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
  14. http://www.logilab.fr/ -- mailto:contact@logilab.fr
  15. Raw metrics checker
  16. """
  17. import tokenize
  18. # pylint now requires pylint >= 2.2, so this is no longer necessary
  19. #if not hasattr(tokenize, 'NL'):
  20. # raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
  21. from logilab.common.ureports import Table
  22. from pylint.interfaces import IRawChecker
  23. from pylint.checkers import BaseRawChecker, EmptyReport
  24. from pylint.reporters import diff_string
  25. def report_raw_stats(sect, stats, old_stats):
  26. """calculate percentage of code / doc / comment / empty
  27. """
  28. total_lines = stats['total_lines']
  29. if not total_lines:
  30. raise EmptyReport()
  31. sect.description = '%s lines have been analyzed' % total_lines
  32. lines = ('type', 'number', '%', 'previous', 'difference')
  33. for node_type in ('code', 'docstring', 'comment', 'empty'):
  34. key = node_type + '_lines'
  35. total = stats[key]
  36. percent = float(total * 100) / total_lines
  37. old = old_stats.get(key, None)
  38. if old is not None:
  39. diff_str = diff_string(old, total)
  40. else:
  41. old, diff_str = 'NC', 'NC'
  42. lines += (node_type, str(total), '%.2f' % percent,
  43. str(old), diff_str)
  44. sect.append(Table(children=lines, cols=5, rheaders=1))
  45. class RawMetricsChecker(BaseRawChecker):
  46. """does not check anything but gives some raw metrics :
  47. * total number of lines
  48. * total number of code lines
  49. * total number of docstring lines
  50. * total number of comments lines
  51. * total number of empty lines
  52. """
  53. __implements__ = (IRawChecker,)
  54. # configuration section name
  55. name = 'metrics'
  56. # configuration options
  57. options = ( )
  58. # messages
  59. msgs = {}
  60. # reports
  61. reports = ( ('RP0701', 'Raw metrics', report_raw_stats), )
  62. def __init__(self, linter):
  63. BaseRawChecker.__init__(self, linter)
  64. self.stats = None
  65. def open(self):
  66. """init statistics"""
  67. self.stats = self.linter.add_stats(total_lines=0, code_lines=0,
  68. empty_lines=0, docstring_lines=0,
  69. comment_lines=0)
  70. def process_tokens(self, tokens):
  71. """update stats"""
  72. i = 0
  73. tokens = list(tokens)
  74. while i < len(tokens):
  75. i, lines_number, line_type = get_type(tokens, i)
  76. self.stats['total_lines'] += lines_number
  77. self.stats[line_type] += lines_number
  78. JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)
  79. def get_type(tokens, start_index):
  80. """return the line type : docstring, comment, code, empty"""
  81. i = start_index
  82. tok_type = tokens[i][0]
  83. start = tokens[i][2]
  84. pos = start
  85. line_type = None
  86. while i < len(tokens) and tokens[i][2][0] == start[0]:
  87. tok_type = tokens[i][0]
  88. pos = tokens[i][3]
  89. if line_type is None:
  90. if tok_type == tokenize.STRING:
  91. line_type = 'docstring_lines'
  92. elif tok_type == tokenize.COMMENT:
  93. line_type = 'comment_lines'
  94. elif tok_type in JUNK:
  95. pass
  96. else:
  97. line_type = 'code_lines'
  98. i += 1
  99. if line_type is None:
  100. line_type = 'empty_lines'
  101. elif i < len(tokens) and tok_type == tokenize.NEWLINE:
  102. i += 1
  103. return i, pos[0] - start[0] + 1, line_type
  104. def register(linter):
  105. """ required method to auto register this checker """
  106. linter.register_checker(RawMetricsChecker(linter))