diff --git a/tools/report-converter/codechecker_report_converter/report/__init__.py b/tools/report-converter/codechecker_report_converter/report/__init__.py index a426eb2d11..cfa257d1da 100644 --- a/tools/report-converter/codechecker_report_converter/report/__init__.py +++ b/tools/report-converter/codechecker_report_converter/report/__init__.py @@ -291,7 +291,8 @@ def __init__( bug_path_events: Optional[List[BugPathEvent]] = None, bug_path_positions: Optional[List[BugPathPosition]] = None, notes: Optional[List[BugPathEvent]] = None, - macro_expansions: Optional[List[MacroExpansion]] = None + macro_expansions: Optional[List[MacroExpansion]] = None, + annotations: Optional[Dict[str, str]] = None ): self.analyzer_result_file_path = analyzer_result_file_path self.file = file @@ -304,6 +305,7 @@ def __init__( self.analyzer_name = analyzer_name self.category = category self.type = type + self.annotations = annotations self.bug_path_events = bug_path_events \ if bug_path_events is not None else \ diff --git a/tools/report-converter/codechecker_report_converter/report/parser/plist.py b/tools/report-converter/codechecker_report_converter/report/parser/plist.py index ebe941558d..4e279dae82 100644 --- a/tools/report-converter/codechecker_report_converter/report/parser/plist.py +++ b/tools/report-converter/codechecker_report_converter/report/parser/plist.py @@ -244,6 +244,9 @@ def __create_report( analyzer_name = self.__get_analyzer_name(checker_name, metadata) severity = self.get_severity(checker_name) + report_annotation = diag["report-annotation"] \ + if "report-annotation" in diag else None + return Report( analyzer_result_file_path=analyzer_result_file_path, file=files[location['file']], @@ -259,7 +262,8 @@ def __create_report( bug_path_events=self.__get_bug_path_events(diag, files), bug_path_positions=self.__get_bug_path_positions(diag, files), notes=self.__get_notes(diag, files), - macro_expansions=self.__get_macro_expansions(diag, files)) + macro_expansions=self.__get_macro_expansions(diag, files), + annotations=report_annotation) def __get_analyzer_name( self, diff --git a/web/api/codechecker_api_shared.thrift b/web/api/codechecker_api_shared.thrift index ac88b857f7..167f8ab40b 100644 --- a/web/api/codechecker_api_shared.thrift +++ b/web/api/codechecker_api_shared.thrift @@ -12,6 +12,7 @@ enum ErrorCode { UNAUTHORIZED, // Authorization denied. User does not have right to perform an action. API_MISMATCH, // The client attempted to query an API version that is not supported by the server. SOURCE_FILE, // The client sent a source code which contains errors (e.g.: source code comment errors). + REPORT_FORMAT, // The client sent a report with wrong format (e.g. report annotation has bad type in a .plist) } exception RequestFailed { diff --git a/web/api/js/codechecker-api-node/dist/codechecker-api-6.52.0.tgz b/web/api/js/codechecker-api-node/dist/codechecker-api-6.52.0.tgz deleted file mode 100644 index 376e5ca1dd..0000000000 Binary files a/web/api/js/codechecker-api-node/dist/codechecker-api-6.52.0.tgz and /dev/null differ diff --git a/web/api/js/codechecker-api-node/dist/codechecker-api-6.53.0.tgz b/web/api/js/codechecker-api-node/dist/codechecker-api-6.53.0.tgz new file mode 100644 index 0000000000..416c878d13 Binary files /dev/null and b/web/api/js/codechecker-api-node/dist/codechecker-api-6.53.0.tgz differ diff --git a/web/api/js/codechecker-api-node/package.json b/web/api/js/codechecker-api-node/package.json index 2ddf5aac4a..e876b60239 100644 --- a/web/api/js/codechecker-api-node/package.json +++ b/web/api/js/codechecker-api-node/package.json @@ -1,6 +1,6 @@ { "name": "codechecker-api", - "version": "6.52.0", + "version": "6.53.0", "description": "Generated node.js compatible API stubs for CodeChecker server.", "main": "lib", "homepage": "https://github.com/Ericsson/codechecker", diff --git a/web/api/py/codechecker_api/dist/codechecker_api.tar.gz b/web/api/py/codechecker_api/dist/codechecker_api.tar.gz index 7adb8ce65d..fe714e89f6 100644 Binary files a/web/api/py/codechecker_api/dist/codechecker_api.tar.gz and b/web/api/py/codechecker_api/dist/codechecker_api.tar.gz differ diff --git a/web/api/py/codechecker_api/setup.py b/web/api/py/codechecker_api/setup.py index e873d11dd1..f5627f0eed 100644 --- a/web/api/py/codechecker_api/setup.py +++ b/web/api/py/codechecker_api/setup.py @@ -8,7 +8,7 @@ with open('README.md', encoding='utf-8', errors="ignore") as f: long_description = f.read() -api_version = '6.52.0' +api_version = '6.53.0' setup( name='codechecker_api', diff --git a/web/api/py/codechecker_api_shared/dist/codechecker_api_shared.tar.gz b/web/api/py/codechecker_api_shared/dist/codechecker_api_shared.tar.gz index 5388f01ff0..098e124d0a 100644 Binary files a/web/api/py/codechecker_api_shared/dist/codechecker_api_shared.tar.gz and b/web/api/py/codechecker_api_shared/dist/codechecker_api_shared.tar.gz differ diff --git a/web/api/py/codechecker_api_shared/setup.py b/web/api/py/codechecker_api_shared/setup.py index 1e1bdcc8dd..f16016b4e3 100644 --- a/web/api/py/codechecker_api_shared/setup.py +++ b/web/api/py/codechecker_api_shared/setup.py @@ -8,7 +8,7 @@ with open('README.md', encoding='utf-8', errors="ignore") as f: long_description = f.read() -api_version = '6.52.0' +api_version = '6.53.0' setup( name='codechecker_api_shared', diff --git a/web/api/report_server.thrift b/web/api/report_server.thrift index 896425596f..c8ba910969 100644 --- a/web/api/report_server.thrift +++ b/web/api/report_server.thrift @@ -72,6 +72,7 @@ enum SortType { REVIEW_STATUS, DETECTION_STATUS, BUG_PATH_LENGTH, + TIMESTAMP, } enum RunSortType { @@ -98,6 +99,11 @@ enum CommentKind { SYSTEM // System events. } +struct Pair { + 1: string first, + 2: string second +} + struct SourceFileData { 1: i64 fileId, 2: string filePath, @@ -315,7 +321,10 @@ struct ReportData { 15: i64 bugPathLength, // Length of the bug path. 16: optional ReportDetails details, // Details of the report. 17: optional string analyzerName, // Analyzer name. - 18: optional string timeStamp, // Timestamp for dynamic analyzers. + // Report annotations are key-value pairs attached to a report. This is a set + // of custom labels that describe some properties of a report. For example the + // timestamp in case of dynamic analyzers when the report was actually emitted. + 18: optional map annotations, } typedef list ReportDataList @@ -364,6 +373,12 @@ struct ReportFilter { 20: optional bool fileMatchesAnyPoint, // Similar to fileMatchesAnyPoint but for component filtering. 21: optional bool componentMatchesAnyPoint, + // Filter on reports that match some annotations. Annotations are key-value + // pairs, however, as a filter field a list of pairs is required. This way + // several possible values of a key can be provided. For example: + // [(key1, value1), (key1, value2), (key2, value3)] returns reports which + // have "value1" OR "value2" for "key1" AND have "value3" for "key2". + 22: optional list annotations, } struct RunReportCount { @@ -575,7 +590,6 @@ service codeCheckerDBAccess { 7: optional bool getDetails) throws (1: codechecker_api_shared.RequestFailed requestError), - // Count the results separately for multiple runs. // If an empty run id list is provided the report // counts will be calculated for all of the available runs. diff --git a/web/codechecker_web/shared/version.py b/web/codechecker_web/shared/version.py index 60867c84d7..52c8d760d9 100644 --- a/web/codechecker_web/shared/version.py +++ b/web/codechecker_web/shared/version.py @@ -18,7 +18,7 @@ # The newest supported minor version (value) for each supported major version # (key) in this particular build. SUPPORTED_VERSIONS = { - 6: 52 + 6: 53 } # Used by the client to automatically identify the latest major and minor diff --git a/web/server/codechecker_server/api/mass_store_run.py b/web/server/codechecker_server/api/mass_store_run.py index 13c00f13c4..fa260722d1 100644 --- a/web/server/codechecker_server/api/mass_store_run.py +++ b/web/server/codechecker_server/api/mass_store_run.py @@ -38,8 +38,9 @@ from ..database.config_db_model import Product from ..database.database import DBSession from ..database.run_db_model import AnalysisInfo, AnalyzerStatistic, \ - BugPathEvent, BugReportPoint, ExtendedReportData, File, FileContent, \ - Report as DBReport, ReviewStatus, Run, RunHistory, RunLock + BugPathEvent, BugReportPoint, ReportAnnotations, ExtendedReportData, \ + File, FileContent, Report as DBReport, ReviewStatus, Run, RunHistory, \ + RunLock from ..metadata import checker_is_unavailable, MetadataInfoParser from .report_server import ThriftRequestHandler @@ -941,6 +942,10 @@ def get_missing_file_ids(report: Report) -> List[str]: review_status, scc, detection_status, detected_at, run_history_time, analysis_info, analyzer_name, fixed_at) + if report.annotations: + self.__validate_and_add_report_annotations( + session, report_id, report.annotations) + self.__new_report_hashes.add(report.report_hash) self.__already_added_report_hashes.add(report_path_hash) @@ -948,6 +953,53 @@ def get_missing_file_ids(report: Report) -> List[str]: return True + def __validate_and_add_report_annotations( + self, + session: DBSession, + report_id: int, + report_annotation: Dict + ): + """ + This function checks the format of the annotations. For example a + "timestamp" annotation must be in datetime format. If the format + doesn't match then an exception is thrown. In case of proper format the + annotation is added to the database. + """ + ALLOWED_TYPES = { + "datetime": { + "func": datetime.fromisoformat, + "display": "date-time in ISO format" + }, + "string": { + "func": str, + "display": "string" + } + } + + ALLOWED_ANNOTATIONS = { + "timestamp": ALLOWED_TYPES["datetime"], + "testsuite": ALLOWED_TYPES["string"], + "testcase": ALLOWED_TYPES["string"] + } + + for key, value in report_annotation.items(): + try: + ALLOWED_ANNOTATIONS[key]["func"](value) + session.add(ReportAnnotations(report_id, key, value)) + except KeyError: + raise codechecker_api_shared.ttypes.RequestFailed( + codechecker_api_shared.ttypes.ErrorCode.REPORT_FORMAT, + f"'{key}' is not an allowed report annotation.", + ALLOWED_ANNOTATIONS.keys()) + except ValueError: + raise codechecker_api_shared.ttypes.RequestFailed( + codechecker_api_shared.ttypes.ErrorCode.REPORT_FORMAT, + f"'{value}' has wrong format. '{key}' annotations must be " + f"'{ALLOWED_ANNOTATIONS[key]['display']}'." + ) + + session.flush() + def __store_reports( self, session: DBSession, diff --git a/web/server/codechecker_server/api/report_server.py b/web/server/codechecker_server/api/report_server.py index 5c17c003f3..905e82a61f 100644 --- a/web/server/codechecker_server/api/report_server.py +++ b/web/server/codechecker_server/api/report_server.py @@ -52,9 +52,10 @@ from ..database.database import conv, DBSession, escape_like from ..database.run_db_model import \ AnalysisInfo, AnalyzerStatistic, BugPathEvent, BugReportPoint, \ - CleanupPlan, CleanupPlanReportHash, Comment, ExtendedReportData, File, \ - FileContent, Report, ReportAnalysisInfo, ReviewStatus, Run, RunHistory, \ - RunHistoryAnalysisInfo, RunLock, SourceComponent + CleanupPlan, CleanupPlanReportHash, Comment, ReportAnnotations, \ + ExtendedReportData, File, FileContent, Report, ReportAnalysisInfo, \ + ReviewStatus, Run, RunHistory, RunHistoryAnalysisInfo, RunLock, \ + SourceComponent from .thrift_enum_helper import detection_status_enum, \ detection_status_str, review_status_enum, review_status_str, \ @@ -953,7 +954,8 @@ def get_sort_map(sort_types, is_unique=False): SortType.CHECKER_NAME: [(Report.checker_id, 'checker_id')], SortType.SEVERITY: [(Report.severity, 'severity')], SortType.REVIEW_STATUS: [(Report.review_status, 'rw_status')], - SortType.DETECTION_STATUS: [(Report.detection_status, 'dt_status')]} + SortType.DETECTION_STATUS: [(Report.detection_status, 'dt_status')], + SortType.TIMESTAMP: [('annotation_timestamp', 'annotation_timestamp')]} if is_unique: sort_type_map[SortType.FILENAME] = [(File.filename, 'filename')] @@ -1745,6 +1747,71 @@ def getRunResults(self, run_ids, limit, offset, sort_types, filter_expression, join_tables = process_report_filter( session, run_ids, report_filter, cmp_data) + # Extending "reports" table with report annotation columns. + # + # Suppose that we have these tables in the database: + # + # reports + # ================= + # id, severity, ... + # ----------------- + # 1, HIGH, ... + # 2, MEDIUM, ... + # + # report_annotations + # ======================= + # report_id, key, value + # ----------------------- + # 1, key1, value1 + # 1, key2, value2 + # 2, key1, value3 + # + # The resulting table should look like this: + # + # reports extended + # =================================================== + # id, severity, ..., annotation_key1, annotation_key2 + # --------------------------------------------------- + # 1, HIGH, ..., value1, value2 + # 2, MEDIUM, ..., value3, NULL + # + # The SQL query which results this table is similar to this: + # + # SELECT + # , + # MAX(CASE WHEN report_annotations.key == THEN + # report_annotations.value END) AS annotation_ + # MAX(CASE WHEN report_annotations.key == THEN + # report_annotations.value END) AS annotation_ + # FROM + # reports + # LEFT OUTER JOIN report_annotations ON + # report_annotations.report_id = reports.id + # GROUP BY + # reports.id; + # + # , ... are the distinct keys in table + # "report_annotations". These are collected in a previous query. + # + # Since the "join" operation makes a Cartesian product of the two + # tables, the resulting table contains as many rows for a report as + # many annotations belong to it. These have to be joined by report + # ID and this is the reason of the aggregating MAX() functions. + # + # TODO: The creation of this extended table should be produced by + # a helper function and it could be used as a sub-query in every + # other query which originally works on "reports" table. + + annotation_keys = list(map( + lambda x: x[0], + session.query(ReportAnnotations.key).distinct().all())) + + annotation_cols = [ + func.max(sqlalchemy.case([( + ReportAnnotations.key == col, + ReportAnnotations.value)])).label(f"annotation_{col}") + for col in annotation_keys] + is_unique = report_filter is not None and report_filter.isUnique if is_unique: sort_types, sort_type_map, order_type_map = \ @@ -1779,18 +1846,24 @@ def getRunResults(self, run_ids, limit, offset, sort_types, sorted_reports = sorted_reports \ .limit(limit).offset(offset).subquery() - q = session.query(Report, File.filename) \ - .outerjoin(File, Report.file_id == File.id) \ + q = session.query(Report, File.filename, *annotation_cols) \ + .outerjoin( + ReportAnnotations, + Report.id == ReportAnnotations.report_id) \ .outerjoin(sorted_reports, sorted_reports.c.id == Report.id) \ .filter(sorted_reports.c.id.isnot(None)) + if File not in join_tables: + q = q.outerjoin(File, Report.file_id == File.id) + # We have to sort the results again because an ORDER BY in a # subtable is broken by the JOIN. q = sort_results_query(q, sort_types, sort_type_map, order_type_map) + q = q.group_by(Report.id, File.id) query_result = q.all() @@ -1800,7 +1873,13 @@ def getRunResults(self, run_ids, limit, offset, sort_types, report_ids = [r[0].id for r in query_result] report_details = get_report_details(session, report_ids) - for report, filename in query_result: + for row in query_result: + report = row[0] + filename = row[1] + annotations = { + k: v for k, v in zip(annotation_keys, row[2:]) + if v is not None} + review_data = create_review_data( report.review_status, report.review_status_message, @@ -1826,15 +1905,28 @@ def getRunResults(self, run_ids, limit, offset, sort_types, fixedAt=str(report.fixed_at), bugPathLength=report.path_length, details=report_details.get(report.id), - analyzerName=report.analyzer_name)) + analyzerName=report.analyzer_name, + annotations=annotations)) else: # not is_unique sort_types, sort_type_map, order_type_map = \ get_sort_map(sort_types) - if SortType.FILENAME in map(lambda x: x.type, sort_types): - join_tables.append(File) + q = session.query(Report, File.filepath, *annotation_cols) \ + .outerjoin( + ReportAnnotations, + Report.id == ReportAnnotations.report_id) + + if File not in join_tables: + q = q.outerjoin(File, Report.file_id == File.id) + + # Grouping by "reports.id" is described at the beginning of + # this function. Grouping by "files.id" is necessary, because + # "files" table is joined for gathering file names belonging to + # the given report. According to SQL syntax if there is a group + # by report IDs then files should also be either grouped or an + # aggregate function must be applied on them. + q = q.group_by(Report.id, File.id) - q = session.query(Report) q = apply_report_filter(q, filter_expression, join_tables) q = sort_results_query(q, sort_types, sort_type_map, @@ -1847,25 +1939,16 @@ def getRunResults(self, run_ids, limit, offset, sort_types, # Get report details if it is required. report_details = {} if get_details: - report_ids = [r.id for r in query_result] + report_ids = [r[0].id for r in query_result] report_details = get_report_details(session, report_ids) - # Earlier file table was joined to the query of reports. - # However, that created an SQL strategy in PostgreSQL which - # resulted in timeout. Based on heuristics the query strategy - # (which not only depends on the query statement but the table - # size and many other things) may contain an inner loop on - # "reports" table which is one of the largest tables. This - # separate query of file table results a query strategy for the - # previous query which doesn't contain such an inner loop. See - # EXPLAIN SELECT columns FROM ... - file_ids = set(map(lambda r: r.file_id, query_result)) - all_files = dict() - for chunk in util.chunks(file_ids, SQLITE_MAX_VARIABLE_NUMBER): - all_files.update(dict(session.query(File.id, File.filepath) - .filter(File.id.in_(chunk)).all())) - - for report in query_result: + for row in query_result: + report = row[0] + filepath = row[1] + annotations = { + k: v for k, v in zip(annotation_keys, row[2:]) + if v is not None} + review_data = create_review_data( report.review_status, report.review_status_message, @@ -1876,7 +1959,7 @@ def getRunResults(self, run_ids, limit, offset, sort_types, results.append( ReportData(runId=report.run_id, bugHash=report.bug_id, - checkedFile=all_files[report.file_id], + checkedFile=filepath, checkerMsg=report.checker_message, reportId=report.id, fileId=report.file_id, @@ -1892,7 +1975,8 @@ def getRunResults(self, run_ids, limit, offset, sort_types, report.fixed_at else None, bugPathLength=report.path_length, details=report_details.get(report.id), - analyzerName=report.analyzer_name)) + analyzerName=report.analyzer_name, + annotations=annotations)) return results diff --git a/web/server/codechecker_server/database/run_db_model.py b/web/server/codechecker_server/database/run_db_model.py index 0ed58c720e..1faba6c088 100644 --- a/web/server/codechecker_server/database/run_db_model.py +++ b/web/server/codechecker_server/database/run_db_model.py @@ -401,6 +401,8 @@ class Report(Base): 'confirm_deleted_rows': False } + annotations = relationship("ReportAnnotations") + # Priority/severity etc... def __init__(self, run_id, bug_id, file_id, checker_message, checker_id, checker_cat, bug_type, line, column, severity, review_status, @@ -429,6 +431,23 @@ def __init__(self, run_id, bug_id, file_id, checker_message, checker_id, self.analyzer_name = analyzer_name +class ReportAnnotations(Base): + __tablename__ = "report_annotations" + + def __init__(self, report_id: int, key: str, value: str): + self.report_id = report_id + self.key = key + self.value = value + + report_id = Column( + Integer, + ForeignKey("reports.id", ondelete="CASCADE"), + primary_key=True, + index=True) + key = Column(String, primary_key=True, nullable=False) + value = Column(String, nullable=False) + + class Comment(Base): __tablename__ = 'comments' diff --git a/web/server/codechecker_server/migrations/report/versions/9d956a0fae8d_report_annotations.py b/web/server/codechecker_server/migrations/report/versions/9d956a0fae8d_report_annotations.py new file mode 100644 index 0000000000..85dc5dc376 --- /dev/null +++ b/web/server/codechecker_server/migrations/report/versions/9d956a0fae8d_report_annotations.py @@ -0,0 +1,32 @@ +"""Report annotations + +Revision ID: 9d956a0fae8d +Revises: 75ae226b5d88 +Create Date: 2023-02-09 17:45:56.162040 + +""" + +# revision identifiers, used by Alembic. +revision = '9d956a0fae8d' +down_revision = '75ae226b5d88' +branch_labels = None +depends_on = None + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + op.create_table('report_annotations', + sa.Column('report_id', sa.Integer(), nullable=False), + sa.Column('key', sa.String(), nullable=False), + sa.Column('value', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['report_id'], ['reports.id'], name=op.f('fk_report_annotations_report_id_reports'), ondelete='CASCADE'), + sa.PrimaryKeyConstraint('report_id', 'key', name=op.f('pk_report_annotations')) + ) + op.create_index(op.f('ix_report_annotations_report_id'), 'report_annotations', ['report_id'], unique=False) + + +def downgrade(): + op.drop_index(op.f('ix_report_annotations_report_id'), table_name='report_annotations') + op.drop_table('report_annotations') diff --git a/web/server/vue-cli/package-lock.json b/web/server/vue-cli/package-lock.json index 50acde7c48..0e4ffb32db 100644 --- a/web/server/vue-cli/package-lock.json +++ b/web/server/vue-cli/package-lock.json @@ -11,7 +11,7 @@ "@mdi/font": "^6.5.95", "chart.js": "^2.9.4", "chartjs-plugin-datalabels": "^0.7.0", - "codechecker-api": "file:../../api/js/codechecker-api-node/dist/codechecker-api-6.52.0.tgz", + "codechecker-api": "file:../../api/js/codechecker-api-node/dist/codechecker-api-6.53.0.tgz", "codemirror": "^5.65.0", "date-fns": "^2.28.0", "js-cookie": "^3.0.1", @@ -4869,9 +4869,9 @@ } }, "node_modules/codechecker-api": { - "version": "6.52.0", - "resolved": "file:../../api/js/codechecker-api-node/dist/codechecker-api-6.52.0.tgz", - "integrity": "sha512-AveYaEuNpNsGVU6O56jIELG0yM9m3rCbqvf5WOT1EWgpHnpjzHm/WTS1VKxbWcn2prDhfizgnM9Z2+Qqlaw4kw==", + "version": "6.53.0", + "resolved": "file:../../api/js/codechecker-api-node/dist/codechecker-api-6.53.0.tgz", + "integrity": "sha512-156RuFb5HCRhvxVbvJDbNKUsjxuga4DdjaiC0FbIORhQlIt/Wm1RtBxx9mPVp+YTJCpWh3MxFTrvxH8JlIb6zA==", "license": "SEE LICENSE IN LICENSE", "dependencies": { "thrift": "0.13.0-hotfix.1" @@ -19853,8 +19853,8 @@ "dev": true }, "codechecker-api": { - "version": "file:../../api/js/codechecker-api-node/dist/codechecker-api-6.52.0.tgz", - "integrity": "sha512-AveYaEuNpNsGVU6O56jIELG0yM9m3rCbqvf5WOT1EWgpHnpjzHm/WTS1VKxbWcn2prDhfizgnM9Z2+Qqlaw4kw==", + "version": "file:../../api/js/codechecker-api-node/dist/codechecker-api-6.53.0.tgz", + "integrity": "sha512-156RuFb5HCRhvxVbvJDbNKUsjxuga4DdjaiC0FbIORhQlIt/Wm1RtBxx9mPVp+YTJCpWh3MxFTrvxH8JlIb6zA==", "requires": { "thrift": "0.13.0-hotfix.1" } diff --git a/web/server/vue-cli/package.json b/web/server/vue-cli/package.json index 646a038eba..a748087451 100644 --- a/web/server/vue-cli/package.json +++ b/web/server/vue-cli/package.json @@ -27,20 +27,20 @@ }, "dependencies": { "@mdi/font": "^6.5.95", - "codechecker-api": "file:../../api/js/codechecker-api-node/dist/codechecker-api-6.52.0.tgz", "chart.js": "^2.9.4", "chartjs-plugin-datalabels": "^0.7.0", + "codechecker-api": "file:../../api/js/codechecker-api-node/dist/codechecker-api-6.53.0.tgz", "codemirror": "^5.65.0", "date-fns": "^2.28.0", - "jsplumb": "^2.15.6", "js-cookie": "^3.0.1", + "jsplumb": "^2.15.6", "lodash": "^4.17.21", "marked": "^4.0.10", "splitpanes": "^2.3.8", "vue": "^2.6.14", - "vuetify": "^2.6.2", "vue-chartjs": "^3.5.1", "vue-router": "^3.5.3", + "vuetify": "^2.6.2", "vuex": "^3.6.2" }, "devDependencies": { @@ -48,8 +48,8 @@ "@babel/core": "^7.16.7", "@babel/preset-env": "^7.16.8", "@types/jest": "^27.4.0", - "@vue/vue2-jest": "^28.0.0", "@vue/test-utils": "^1.3.0", + "@vue/vue2-jest": "^28.0.0", "babel-core": "^7.0.0-bridge.0", "babel-eslint": "^10.1.0", "babel-jest": "^28.1.1", @@ -57,8 +57,8 @@ "buffer": "^6.0.3", "copy-webpack-plugin": "^10.2.0", "core-js": "^3.20.3", - "css-loader": "^6.5.1", "cross-env": "^7.0.3", + "css-loader": "^6.5.1", "deepmerge": "^4.2.2", "eslint": "^8.7.0", "eslint-plugin-vue": "^8.3.0", diff --git a/web/server/vue-cli/src/components/Report/ReportFilter/ReportFilter.vue b/web/server/vue-cli/src/components/Report/ReportFilter/ReportFilter.vue index a3be1486b2..43edd33809 100644 --- a/web/server/vue-cli/src/components/Report/ReportFilter/ReportFilter.vue +++ b/web/server/vue-cli/src/components/Report/ReportFilter/ReportFilter.vue @@ -283,6 +283,18 @@ + + + + + + + + + +int main() +{ + int* p = (int*)malloc(sizeof(int)); + *p = 42; + sizeof(42); +} diff --git a/web/tests/projects/dynamic/main.c_clang-tidy_0212cbc2c7194b7a5d431a18ff51bb1c.plist b/web/tests/projects/dynamic/main.c_clang-tidy_0212cbc2c7194b7a5d431a18ff51bb1c.plist new file mode 100644 index 0000000000..9d3fe83f5c --- /dev/null +++ b/web/tests/projects/dynamic/main.c_clang-tidy_0212cbc2c7194b7a5d431a18ff51bb1c.plist @@ -0,0 +1,115 @@ + + + + + diagnostics + + + category + bugprone + check_name + bugprone-sizeof-expression + description + suspicious usage of 'sizeof(K)'; did you mean 'K'? + issue_hash_content_of_line_in_context + 34f8bd4635566816af99c68456d2efc0 + report-annotation + + timestamp + 2000-01-01 09:01 + testsuite + TS-1 + testcase + TC-1 + + location + + col + 3 + file + 0 + line + 7 + + path + + + depth + 0 + kind + event + location + + col + 3 + file + 0 + line + 7 + + message + suspicious usage of 'sizeof(K)'; did you mean 'K'? + + + + + category + clang + check_name + clang-diagnostic-unused-value + description + expression result unused + issue_hash_content_of_line_in_context + 6adc9f369b933c9f6eb1b4ed2f3eea6b + location + + col + 3 + file + 0 + line + 7 + + path + + + depth + 0 + kind + event + location + + col + 3 + file + 0 + line + 7 + + message + expression result unused + + + + + files + + $FILE_PATH$/main.c + + metadata + + analyzer + + name + clang-tidy + + generated_by + + name + CodeChecker + version + 6.21.0 + + + + diff --git a/web/tests/projects/dynamic/main.c_clangsa_0212cbc2c7194b7a5d431a18ff51bb1c.plist b/web/tests/projects/dynamic/main.c_clangsa_0212cbc2c7194b7a5d431a18ff51bb1c.plist new file mode 100644 index 0000000000..62fbe7d6d7 --- /dev/null +++ b/web/tests/projects/dynamic/main.c_clangsa_0212cbc2c7194b7a5d431a18ff51bb1c.plist @@ -0,0 +1,182 @@ + + + + + diagnostics + + + category + Memory error + check_name + unix.Malloc + description + Potential leak of memory pointed to by 'p' + issue_hash_content_of_line_in_context + 3f6e85f87a4f12bb917d4e63bd152f9e + report-annotation + + timestamp + 2000-01-01 09:00 + testsuite + TS-1 + testcase + TC-1 + + location + + col + 3 + file + 0 + line + 7 + + path + + + edges + + + end + + + col + 3 + file + 0 + line + 7 + + + col + 8 + file + 0 + line + 7 + + + start + + + col + 3 + file + 0 + line + 5 + + + col + 5 + file + 0 + line + 5 + + + + + kind + control + + + depth + 0 + kind + event + location + + col + 18 + file + 0 + line + 5 + + message + Memory is allocated + ranges + + + + col + 18 + file + 0 + line + 5 + + + col + 36 + file + 0 + line + 5 + + + + + + depth + 0 + kind + event + location + + col + 3 + file + 0 + line + 7 + + message + Potential leak of memory pointed to by 'p' + ranges + + + + col + 3 + file + 0 + line + 7 + + + col + 3 + file + 0 + line + 7 + + + + + + + + files + + $FILE_PATH$/main.c + + metadata + + analyzer + + name + clangsa + + generated_by + + name + CodeChecker + version + 6.21.0 + + + + diff --git a/web/tests/projects/dynamic/main.c_cppcheck_0212cbc2c7194b7a5d431a18ff51bb1c.plist b/web/tests/projects/dynamic/main.c_cppcheck_0212cbc2c7194b7a5d431a18ff51bb1c.plist new file mode 100644 index 0000000000..9d8d600ce7 --- /dev/null +++ b/web/tests/projects/dynamic/main.c_cppcheck_0212cbc2c7194b7a5d431a18ff51bb1c.plist @@ -0,0 +1,97 @@ + + + + + diagnostics + + + category + warning + check_name + cppcheck-sizeofwithnumericparameter + description + Suspicious usage of 'sizeof' with a numeric constant as parameter. + issue_hash_content_of_line_in_context + 25776c4906250250b0eaa947ef04cfd2 + report-annotation + + timestamp + 2000-01-01 09:02 + testsuite + TS-1 + testcase + TC-1 + + location + + col + 3 + file + 0 + line + 7 + + path + + + depth + 0 + kind + event + location + + col + 3 + file + 0 + line + 7 + + message + Suspicious usage of 'sizeof' with a numeric constant as parameter. + ranges + + + + col + 3 + file + 0 + line + 7 + + + col + 3 + file + 0 + line + 7 + + + + + + + + files + + $FILE_PATH$/main.c + + metadata + + analyzer + + name + cppcheck + + generated_by + + name + CodeChecker + version + 6.21.0 + + + +