Skip to content

Commit

Permalink
[refactor] Rename and fix load_json_or_empty
Browse files Browse the repository at this point in the history
"load_json_or_empty" is a too long name for this function, and the
second part doesn't carry any information.

This function is only used for opening JSON files, so "kind" parameter
is not necessary.

Also move this load_json() function to codechecker_common.util module
because originally it was under report converter tool, but most usages
were in all other parts of CodeChecker. Many times this was the only
dependency towards report_converter.
  • Loading branch information
bruntib committed Jan 6, 2023
1 parent 95a9534 commit 696e179
Show file tree
Hide file tree
Showing 20 changed files with 122 additions and 117 deletions.
9 changes: 4 additions & 5 deletions analyzer/codechecker_analyzer/analyzer_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,10 @@
import os
import sys

from codechecker_report_converter.util import load_json_or_empty

from codechecker_common import logger
from codechecker_common.checker_labels import CheckerLabels
from codechecker_common.singleton import Singleton
from codechecker_common.util import load_json

from . import env

Expand Down Expand Up @@ -80,7 +79,7 @@ def __get_package_config(self):
self._data_files_dir_path, "config", "config.json")

LOG.debug('Reading config: %s', pckg_config_file)
cfg_dict = load_json_or_empty(pckg_config_file)
cfg_dict = load_json(pckg_config_file)

if not cfg_dict:
raise ValueError(f"No configuration file '{pckg_config_file}' can "
Expand All @@ -95,7 +94,7 @@ def __get_package_layout(self):
self._data_files_dir_path, "config", "package_layout.json")

LOG.debug('Reading config: %s', layout_cfg_file)
lcfg_dict = load_json_or_empty(layout_cfg_file)
lcfg_dict = load_json(layout_cfg_file)

if not lcfg_dict:
raise ValueError(f"No configuration file '{layout_cfg_file}' can "
Expand All @@ -117,7 +116,7 @@ def __set_version(self):
"""
Get the package version from the version config file.
"""
vfile_data = load_json_or_empty(self.version_file)
vfile_data = load_json(self.version_file)

if not vfile_data:
sys.exit(1)
Expand Down
5 changes: 2 additions & 3 deletions analyzer/codechecker_analyzer/buildlog/log_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,10 @@
import traceback
from typing import Dict, List, Optional

from codechecker_report_converter.util import load_json_or_empty

from codechecker_analyzer.analyzers import clangsa

from codechecker_common.logger import get_logger
from codechecker_common.util import load_json

from .. import gcc_toolchain
from .build_action import BuildAction
Expand Down Expand Up @@ -542,7 +541,7 @@ def load_compiler_info(file_path: str):
ICI = ImplicitCompilerInfo
ICI.compiler_info = {}

contents = load_json_or_empty(file_path, {})
contents = load_json(file_path, {})
for k, v in contents.items():
k = json.loads(k)
ICI.compiler_info[
Expand Down
6 changes: 3 additions & 3 deletions analyzer/codechecker_analyzer/cmd/analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@

from typing import List

from codechecker_report_converter.util import load_json_or_empty
from tu_collector import tu_collector

from codechecker_analyzer import analyzer, analyzer_context, env
Expand All @@ -34,6 +33,7 @@
from codechecker_common import arg, logger, cmd_config
from codechecker_common.skiplist_handler import SkipListHandler, \
SkipListHandlers
from codechecker_common.util import load_json


LOG = logger.get_logger('system')
Expand Down Expand Up @@ -933,7 +933,7 @@ def main(args):
sys.exit(1)
compiler_info_file = args.compiler_info_file

compile_commands = load_json_or_empty(args.logfile)
compile_commands = load_json(args.logfile)
if compile_commands is None:
sys.exit(1)
__change_args_to_command_in_comp_db(compile_commands)
Expand Down Expand Up @@ -1052,7 +1052,7 @@ def main(args):
metadata_file = os.path.join(args.output_path, 'metadata.json')
metadata_prev = None
if os.path.exists(metadata_file):
metadata_prev = load_json_or_empty(metadata_file)
metadata_prev = load_json(metadata_file)
metadata_tool['result_source_files'] = \
__get_result_source_files(metadata_prev)

Expand Down
6 changes: 3 additions & 3 deletions analyzer/codechecker_analyzer/cmd/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@
import sys
from typing import Dict, Optional, Set

from codechecker_report_converter.util import dump_json_output, \
load_json_or_empty
from codechecker_report_converter.util import dump_json_output
from codechecker_report_converter.report import report_file, \
reports as reports_helper
from codechecker_report_converter.report.output import baseline, codeclimate, \
Expand All @@ -34,6 +33,7 @@
from codechecker_common import arg, logger, cmd_config
from codechecker_common.skiplist_handler import SkipListHandler, \
SkipListHandlers
from codechecker_common.util import load_json


LOG = logger.get_logger('system')
Expand Down Expand Up @@ -257,7 +257,7 @@ def get_metadata(dir_path: str) -> Optional[Dict]:
""" Get metadata from the given dir path or None if not exists. """
metadata_file = os.path.join(dir_path, "metadata.json")
if os.path.exists(metadata_file):
return load_json_or_empty(metadata_file)
return load_json(metadata_file)

return None

Expand Down
31 changes: 15 additions & 16 deletions analyzer/tests/unit/test_log_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,10 @@
import tempfile
import unittest

from codechecker_report_converter.util import load_json_or_empty

from codechecker_analyzer.buildlog import log_parser
from codechecker_common.skiplist_handler import SkipListHandler, \
SkipListHandlers
from codechecker_common.util import load_json


class LogParserTest(unittest.TestCase):
Expand Down Expand Up @@ -77,7 +76,7 @@ def test_old_ldlogger(self):
# define being considered a file and ignored, for now.

build_actions, _ = log_parser.\
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)
build_action = build_actions[0]

self.assertEqual(build_action.source, r'/tmp/a.cpp')
Expand All @@ -97,7 +96,7 @@ def test_new_ldlogger(self):
# and --target=x86_64-linux-gnu.

build_actions, _ = log_parser.\
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)
build_action = build_actions[0]

self.assertEqual(build_action.source, r'/tmp/a.cpp')
Expand All @@ -110,7 +109,7 @@ def test_new_ldlogger(self):
logfile = os.path.join(self.__test_files, "ldlogger-new-space.json")

build_actions, _ = log_parser.\
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)
build_action = build_actions[0]

self.assertEqual(build_action.source, r'/tmp/a b.cpp')
Expand All @@ -120,7 +119,7 @@ def test_new_ldlogger(self):
logfile = os.path.join(self.__test_files, "ldlogger-new-at.json")

build_actions, _ = log_parser.\
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)
build_action = build_actions[0]

self.assertEqual(len(build_action.analyzer_options), 1)
Expand All @@ -129,7 +128,7 @@ def test_new_ldlogger(self):

# Test the same stuff with response files.
logfile = os.path.join(self.__test_files, "ldlogger-new-response.json")
logjson = load_json_or_empty(logfile)
logjson = load_json(logfile)
# Make it relative to the response file.
logjson[0]['directory'] = self.__test_files

Expand All @@ -154,7 +153,7 @@ def test_old_intercept_build(self):
logfile = os.path.join(self.__test_files, "intercept-old.json")

build_actions, _ = log_parser.\
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)
build_action = build_actions[0]

self.assertEqual(build_action.source, r'/tmp/a.cpp')
Expand All @@ -169,7 +168,7 @@ def test_old_intercept_build(self):
logfile = os.path.join(self.__test_files, "intercept-old-space.json")

build_actions, _ = log_parser.\
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)
build_action = build_actions[0]

self.assertEqual(build_action.source, '/tmp/a b.cpp')
Expand All @@ -191,7 +190,7 @@ def test_new_intercept_build(self):
# The define is passed to the analyzer properly.

build_actions, _ = log_parser.\
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)
build_action = build_actions[0]

self.assertEqual(build_action.source, r'/tmp/a.cpp')
Expand All @@ -204,7 +203,7 @@ def test_new_intercept_build(self):
logfile = os.path.join(self.__test_files, "intercept-new-space.json")

build_actions, _ = log_parser.\
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)
build_action = build_actions[0]

self.assertEqual(build_action.source, '/tmp/a b.cpp')
Expand Down Expand Up @@ -278,7 +277,7 @@ def test_include_rel_to_abs(self):
logfile = os.path.join(self.__test_files, "include.json")

build_actions, _ = log_parser.\
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)
build_action = build_actions[0]

self.assertEqual(len(build_action.analyzer_options), 4)
Expand Down Expand Up @@ -511,7 +510,7 @@ def test_response_file_simple(self):
logfile = os.path.join(self.compile_command_file_path)

build_actions, _ = log_parser. \
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)
build_action = build_actions[0]
self.assertEqual(len(build_action.analyzer_options), 1)
self.assertEqual(build_action.analyzer_options[0],
Expand All @@ -537,7 +536,7 @@ def test_response_file_contains_source_file(self):
logfile = os.path.join(self.compile_command_file_path)

build_actions, _ = log_parser. \
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)
build_action = build_actions[0]

self.assertEqual(len(build_action.analyzer_options), 1)
Expand Down Expand Up @@ -576,7 +575,7 @@ def test_response_file_contains_multiple_source_files(self):
logfile = os.path.join(self.compile_command_file_path)

build_actions, _ = log_parser. \
parse_unique_log(load_json_or_empty(logfile), self.__this_dir)
parse_unique_log(load_json(logfile), self.__this_dir)

self.assertEqual(len(build_actions), 2)

Expand Down Expand Up @@ -610,7 +609,7 @@ def test_source_file_contains_at_sign(self):
file=src_file_path
)]))

build_actions, _ = log_parser.parse_unique_log(load_json_or_empty(
build_actions, _ = log_parser.parse_unique_log(load_json(
self.compile_command_file_path), self.__this_dir)

self.assertEqual(len(build_actions), 1)
Expand Down
6 changes: 3 additions & 3 deletions codechecker_common/checker_labels.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from typing import Any, cast, DefaultDict, Dict, Iterable, List, Optional, \
Set, Tuple, Union

from codechecker_report_converter.util import load_json_or_empty
from codechecker_common.util import load_json


# TODO: Most of the methods of this class get an optional analyzer name. If
Expand All @@ -31,7 +31,7 @@ def __init__(self, checker_labels_dir: str):
self.__descriptions = {}

if 'descriptions.json' in os.listdir(checker_labels_dir):
self.__descriptions = load_json_or_empty(os.path.join(
self.__descriptions = load_json(os.path.join(
checker_labels_dir, 'descriptions.json'))

label_json_files = map(
Expand Down Expand Up @@ -63,7 +63,7 @@ def __union_label_files(
all_labels = {}

for label_file in label_files:
data = load_json_or_empty(label_file)
data = load_json(label_file)
analyzer_labels = defaultdict(list)

for checker, labels in data['labels'].items():
Expand Down
5 changes: 2 additions & 3 deletions codechecker_common/cmd_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,8 @@

from typing import List

from codechecker_report_converter.util import load_json_or_empty

from codechecker_common import logger
from codechecker_common.util import load_json

LOG = logger.get_logger('system')

Expand Down Expand Up @@ -63,7 +62,7 @@ def process_config_file(args, subcommand_name):
with open(config_file, encoding='utf-8', errors='ignore') as f:
cfg = yaml.load(f, Loader=yaml.BaseLoader)
else:
cfg = load_json_or_empty(config_file, default={})
cfg = load_json(config_file, default={})

# The subcommand name is analyze but the
# configuration section name is analyzer.
Expand Down
34 changes: 34 additions & 0 deletions codechecker_common/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@


import itertools
import json
import portalocker

from codechecker_common.logger import get_logger

Expand Down Expand Up @@ -39,3 +41,35 @@ def chunks(iterator, n):
for first in iterator:
rest_of_chunk = itertools.islice(iterator, 0, n - 1)
yield itertools.chain([first], rest_of_chunk)


def load_json(path: str, default=None, lock=False):
"""
Load the contents of the given file as a JSON and return it's value,
or default if the file can't be loaded.
"""

ret = default
try:
with open(path, 'r', encoding='utf-8', errors='ignore') as handle:
if lock:
portalocker.lock(handle, portalocker.LOCK_SH)

ret = json.load(handle)

if lock:
portalocker.unlock(handle)
except IOError as ex:
LOG.warning("Failed to open json file: %s", path)
LOG.warning(ex)
except OSError as ex:
LOG.warning("Failed to open json file: %s", path)
LOG.warning(ex)
except ValueError as ex:
LOG.warning("%s is not a valid json file.", path)
LOG.warning(ex)
except TypeError as ex:
LOG.warning('Failed to process json file: %s', path)
LOG.warning(ex)

return ret
Loading

0 comments on commit 696e179

Please sign in to comment.