Skip to content

Commit

Permalink
Revert "build(deps): Bump pylint from 2.9.6 to 2.11.1 (iterative#6636)…
Browse files Browse the repository at this point in the history
…" (iterative#6842)

This reverts commit 4646208.
  • Loading branch information
karajan1001 authored Oct 20, 2021
1 parent 4b2bec8 commit 6de3e73
Show file tree
Hide file tree
Showing 40 changed files with 100 additions and 111 deletions.
6 changes: 3 additions & 3 deletions dvc/analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def send(path):
url = "https://analytics.dvc.org"
headers = {"content-type": "application/json"}

with open(path, encoding="utf-8") as fobj:
with open(path) as fobj:
report = json.load(fobj)

report.update(_runtime_info())
Expand Down Expand Up @@ -173,13 +173,13 @@ def _find_or_create_user_id():
try:
with Lock(lockfile):
try:
with open(fname, encoding="utf-8") as fobj:
with open(fname) as fobj:
user_id = json.load(fobj)["user_id"]

except (FileNotFoundError, ValueError, KeyError):
user_id = str(uuid.uuid4())

with open(fname, "w", encoding="utf-8") as fobj:
with open(fname, "w") as fobj:
json.dump({"user_id": user_id}, fobj)

return user_id
Expand Down
2 changes: 1 addition & 1 deletion dvc/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def make_checkpoint():
root_dir, Repo.DVC_DIR, "tmp", CheckpointTask.SIGNAL_FILE
)

with builtins.open(signal_file, "w", encoding="utf-8") as fobj:
with builtins.open(signal_file, "w") as fobj:
# NOTE: force flushing/writing empty file to disk, otherwise when
# run in certain contexts (pytest) file may not actually be written
fobj.write("")
Expand Down
2 changes: 1 addition & 1 deletion dvc/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def init(dvc_dir):
dvc.config.Config: config object.
"""
config_file = os.path.join(dvc_dir, Config.CONFIG)
open(config_file, "w+", encoding="utf-8").close()
open(config_file, "w+").close()
return Config(dvc_dir)

def load(self, validate=True, config=None):
Expand Down
2 changes: 1 addition & 1 deletion dvc/fs/gdrive.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def fs(self):
temporary_save_path = self._gdrive_service_credentials_path

if is_credentials_temp:
with open(temporary_save_path, "w", encoding="utf-8") as cred_file:
with open(temporary_save_path, "w") as cred_file:
cred_file.write(
os.getenv(GDriveFileSystem.GDRIVE_CREDENTIALS_DATA)
)
Expand Down
2 changes: 1 addition & 1 deletion dvc/ignore.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,7 +393,7 @@ def init(path):
if os.path.exists(dvcignore):
return dvcignore

with open(dvcignore, "w", encoding="utf-8") as fobj:
with open(dvcignore, "w") as fobj:
fobj.write(
"# Add patterns of files dvc should ignore, which could improve\n"
"# the performance. Learn more at\n"
Expand Down
2 changes: 1 addition & 1 deletion dvc/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def _get_linktype_support_info(repo):

fname = "." + str(uuid.uuid4())
src = os.path.join(repo.odb.local.cache_dir, fname)
open(src, "w", encoding="utf-8").close()
open(src, "w").close()
dst = os.path.join(repo.root_dir, fname)

cache = []
Expand Down
2 changes: 1 addition & 1 deletion dvc/machine/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def create(self, name: Optional[str]):
"""Create and start the specified machine instance."""
config, backend = self.get_config_and_backend(name)
if "startup_script" in config:
with open(config["startup_script"], encoding="utf-8") as fobj:
with open(config["startup_script"]) as fobj:
startup_script = fobj.read()
else:
startup_script = DEFAULT_STARTUP_SCRIPT
Expand Down
4 changes: 2 additions & 2 deletions dvc/render/html.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def write(

page_html = None
if template_path:
with open(template_path, encoding="utf-8") as fobj:
with open(template_path) as fobj:
page_html = fobj.read()

document = HTML(page_html, refresh_seconds=refresh_seconds)
Expand All @@ -102,6 +102,6 @@ def write(

index = Path(os.path.join(path, "index.html"))

with open(index, "w", encoding="utf-8") as fd:
with open(index, "w") as fd:
fd.write(document.embed())
return index
2 changes: 1 addition & 1 deletion dvc/repo/experiments/executor/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def __init__(
def _config(self, cache_dir):
local_config = os.path.join(self.dvc_dir, "config.local")
logger.debug("Writing experiments local config '%s'", local_config)
with open(local_config, "w", encoding="utf-8") as fobj:
with open(local_config, "w") as fobj:
fobj.write(f"[cache]\n dir = {cache_dir}")

def cleanup(self):
Expand Down
8 changes: 2 additions & 6 deletions dvc/repo/plots/template.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def load(self, name: str = None) -> Template:
if name is not None:
template_path = self._find_in_project(name)
if template_path:
with open(template_path, "r", encoding="utf-8") as fd:
with open(template_path, "r") as fd:
content = fd.read()
return Template(content, name)
else:
Expand Down Expand Up @@ -195,9 +195,5 @@ def init(self):
.joinpath(template)
.read_text()
)
with open(
os.path.join(self.templates_dir, template),
"w",
encoding="utf-8",
) as fd:
with open(os.path.join(self.templates_dir, template), "w") as fd:
fd.write(content)
4 changes: 2 additions & 2 deletions dvc/rwlock.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def __init__(self, path):
def _edit_rwlock(lock_dir):
path = os.path.join(lock_dir, "rwlock")
try:
with open(path, encoding="utf-8") as fobj:
with open(path) as fobj:
lock = SCHEMA(json.load(fobj))
except FileNotFoundError:
lock = SCHEMA({})
Expand All @@ -46,7 +46,7 @@ def _edit_rwlock(lock_dir):
lock["read"] = defaultdict(list, lock["read"])
lock["write"] = defaultdict(dict, lock["write"])
yield lock
with open(path, "w+", encoding="utf-8") as fobj:
with open(path, "w+") as fobj:
json.dump(lock, fobj)


Expand Down
6 changes: 3 additions & 3 deletions dvc/scm/git/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def ignore_remove(self, path):
if not os.path.exists(gitignore):
return

with open(gitignore, encoding="utf-8") as fobj:
with open(gitignore) as fobj:
lines = fobj.readlines()

filtered = list(filter(lambda x: x.strip() != entry.strip(), lines))
Expand All @@ -204,14 +204,14 @@ def ignore_remove(self, path):
os.unlink(gitignore)
return

with open(gitignore, "w", encoding="utf-8") as fobj:
with open(gitignore, "w") as fobj:
fobj.writelines(filtered)

self.track_file(relpath(gitignore))

def _install_hook(self, name):
hook = self._hook_path(name)
with open(hook, "w+", encoding="utf-8") as fobj:
with open(hook, "w+") as fobj:
fobj.write(f"#!/bin/sh\nexec dvc git-hook {name} $@\n")

os.chmod(hook, 0o777)
Expand Down
2 changes: 1 addition & 1 deletion dvc/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def _reflink_linux(src, dst):

try:
ret = 255
with open(src, "rb") as s, open(dst, "wb+") as d:
with open(src) as s, open(dst, "w+") as d:
ret = fcntl.ioctl(d.fileno(), FICLONE, s.fileno())
finally:
if ret != 0:
Expand Down
4 changes: 2 additions & 2 deletions dvc/updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def _check(self):
self.fetch()
return

with open(self.updater_file, encoding="utf-8") as fobj:
with open(self.updater_file) as fobj:
import json

try:
Expand Down Expand Up @@ -105,7 +105,7 @@ def _get_latest_version(self):
logger.debug(msg.format(exc))
return

with open(self.updater_file, "w+", encoding="utf-8") as fobj:
with open(self.updater_file, "w+") as fobj:
json.dump(info, fobj)

def _notify(self, latest: str, pkg: Optional[str] = PKG) -> None:
Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ tests =
pydocstyle==6.1.1
jaraco.windows==5.7.0
# pylint requirements
pylint==2.11.1
pylint==2.9.6
# we use this to suppress pytest-related false positives in our tests.
pylint-pytest==1.0.3
# we use this to suppress some messages in tests, eg: foo/bar naming,
Expand Down
2 changes: 1 addition & 1 deletion tests/dir_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def read_text(self, *args, **kwargs): # pylint: disable=signature-differs
path.name: path.read_text(*args, **kwargs)
for path in self.iterdir()
}
return super().read_text(*args, encoding="utf-8", **kwargs)
return super().read_text(*args, **kwargs)

def hash_to_path_info(self, hash_):
return self / hash_[0:2] / hash_[2:]
Expand Down
4 changes: 2 additions & 2 deletions tests/func/experiments/test_remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def test_push_pull_cache(
hash_ = digest(str(x))
path = os.path.join(local_remote.url, hash_[:2], hash_[2:])
assert os.path.exists(path)
assert open(path, encoding="utf-8").read() == str(x)
assert open(path).read() == str(x)

remove(dvc.odb.local.cache_dir)

Expand All @@ -251,7 +251,7 @@ def test_push_pull_cache(
hash_ = digest(str(x))
path = os.path.join(dvc.odb.local.cache_dir, hash_[:2], hash_[2:])
assert os.path.exists(path)
assert open(path, encoding="utf-8").read() == str(x)
assert open(path).read() == str(x)


def test_auth_error_list(tmp_dir, scm, dvc, http_auth_patch):
Expand Down
2 changes: 1 addition & 1 deletion tests/func/experiments/test_show.py
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ def test_show_with_broken_repo(tmp_dir, scm, dvc, exp_stage, caplog):
exp1 = dvc.experiments.run(exp_stage.addressing, params=["foo=2"])
exp2 = dvc.experiments.run(exp_stage.addressing, params=["foo=3"])

with open("dvc.yaml", "a", encoding="utf-8") as fd:
with open("dvc.yaml", "a") as fd:
fd.write("breaking the yaml!")

result = dvc.experiments.show()
Expand Down
2 changes: 1 addition & 1 deletion tests/func/metrics/test_show.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ def test_log_errors(
)
scm.tag("v1")

with open(file, "a", encoding="utf-8") as fd:
with open(file, "a") as fd:
fd.write("\nMALFORMED!")

result = dvc.metrics.show(revs=["v1"])
Expand Down
2 changes: 1 addition & 1 deletion tests/func/params/test_show.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def test_log_errors(tmp_dir, scm, dvc, capsys, file, error_path):
)

rename = (tmp_dir / file).read_text()
with open(tmp_dir / file, "a", encoding="utf-8") as fd:
with open(tmp_dir / file, "a") as fd:
fd.write("\nmalformed!")

scm.add([PIPELINE_FILE, "params_other.yaml"])
Expand Down
4 changes: 2 additions & 2 deletions tests/func/plots/test_show.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def test_dir_plots(tmp_dir, dvc, run_copy_metrics):


def test_ignore_parsing_error(tmp_dir, dvc, run_copy_metrics):
with open("file", "wb", encoding=None) as fobj:
with open("file", "wb") as fobj:
fobj.write(b"\xc1")

run_copy_metrics("file", "plot_file.json", plots=["plot_file.json"])
Expand Down Expand Up @@ -216,7 +216,7 @@ def test_log_errors(
)
scm.tag("v1")

with open(file, "a", encoding="utf-8") as fd:
with open(file, "a") as fd:
fd.write("\nMALFORMED!")

result = dvc.plots.show(onerror=onerror_collect)
Expand Down
6 changes: 3 additions & 3 deletions tests/func/test_add.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def test_add_executable(tmp_dir, dvc):


def test_add_unicode(tmp_dir, dvc):
with open("\xe1", "wb", encoding=None) as fd:
with open("\xe1", "wb") as fd:
fd.write(b"something")

(stage,) = dvc.add("\xe1")
Expand Down Expand Up @@ -143,7 +143,7 @@ def test_warn_about_large_directories(self):
# Create a lot of files
for iteration in range(LARGE_DIR_SIZE + 1):
path = os.path.join("large-dir", str(iteration))
with open(path, "w", encoding="utf-8") as fobj:
with open(path, "w") as fobj:
fobj.write(path)

assert main(["add", "--recursive", "large-dir"]) == 0
Expand Down Expand Up @@ -581,7 +581,7 @@ def test(self):
foo_stage = relpath(self.FOO + DVC_FILE_SUFFIX)

# corrupt stage file
with open(foo_stage, "a+", encoding="utf-8") as file:
with open(foo_stage, "a+") as file:
file.write("this will break yaml file structure")

self._caplog.clear()
Expand Down
14 changes: 7 additions & 7 deletions tests/func/test_checkout.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def test(self):
cache = self.foo_stage.outs[0].cache_path

os.chmod(cache, 0o644)
with open(cache, "a", encoding="utf-8") as fd:
with open(cache, "a") as fd:
fd.write("1")

with pytest.raises(CheckoutError):
Expand Down Expand Up @@ -107,7 +107,7 @@ def test(self):
)

os.chmod(cache, 0o644)
with open(cache, "w+", encoding="utf-8") as fobj:
with open(cache, "w+") as fobj:
fobj.write("1")

with pytest.raises(CheckoutError):
Expand All @@ -127,7 +127,7 @@ class CheckoutBase(TestDvcGit):
GIT_IGNORE = ".gitignore"

def commit_data_file(self, fname, content="random text"):
with open(fname, "w", encoding="utf-8") as fd:
with open(fname, "w") as fd:
fd.write(content)
stages = self.dvc.add(fname)
self.assertEqual(len(stages), 1)
Expand All @@ -136,7 +136,7 @@ def commit_data_file(self, fname, content="random text"):
self.dvc.scm.commit("adding " + fname)

def read_ignored(self):
with open(self.GIT_IGNORE, encoding="utf-8") as f:
with open(self.GIT_IGNORE) as f:
return [s.strip("\n") for s in f.readlines()]

def outs_info(self, stage):
Expand Down Expand Up @@ -188,7 +188,7 @@ def test(self, mock_prompt):
stage = stages[0]

working_dir_change = os.path.join(self.DATA_DIR, "not_cached.txt")
with open(working_dir_change, "w", encoding="utf-8") as f:
with open(working_dir_change, "w") as f:
f.write("not_cached")

ret = main(["checkout", stage.relpath])
Expand All @@ -204,7 +204,7 @@ def test_force(self, mock_prompt):
stage = stages[0]

working_dir_change = os.path.join(self.DATA_DIR, "not_cached.txt")
with open(working_dir_change, "w", encoding="utf-8") as f:
with open(working_dir_change, "w") as f:
f.write("not_cached")

ret = main(["checkout", stage.relpath])
Expand Down Expand Up @@ -610,7 +610,7 @@ def test_checkout_stats_on_failure(tmp_dir, dvc, scm):
# corrupt cache
cache = stage.outs[0].cache_path
os.chmod(cache, 0o644)
with open(cache, "a", encoding="utf-8") as fd:
with open(cache, "a") as fd:
fd.write("destroy cache")

scm.checkout("HEAD~")
Expand Down
4 changes: 2 additions & 2 deletions tests/func/test_data_cloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def _check_status(status, **kwargs):
dvc.cloud.pull(foo_hashes)
assert os.path.exists(cache)
assert os.path.isfile(cache)
with open(cache, encoding="utf-8") as fd:
with open(cache) as fd:
assert fd.read() == "foo"

dvc.cloud.pull(dir_hashes)
Expand Down Expand Up @@ -166,7 +166,7 @@ def test_cloud_cli(tmp_dir, dvc, remote):
assert os.path.isfile("foo")
assert os.path.isdir("data_dir")

with open(cache, encoding="utf-8") as fd:
with open(cache) as fd:
assert fd.read() == "foo"
assert os.path.isfile(cache_dir)

Expand Down
Loading

0 comments on commit 6de3e73

Please sign in to comment.