Skip to content

Commit

Permalink
Use context manager instead of plain open (#1211)
Browse files Browse the repository at this point in the history
Context manager with open closes the files after usage.

When the object is already a pathlib.Path i used read_text
write_text functions

In some cases pathlib.Path.open were used in context manager,
it is basically the same as builtin open.

Thanks, Konstantin Baikov!
  • Loading branch information
kbaikov authored Mar 11, 2021
1 parent a1bcd2f commit 8e18c79
Show file tree
Hide file tree
Showing 17 changed files with 93 additions and 63 deletions.
13 changes: 6 additions & 7 deletions datasette/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def __init__(
and (config_dir / "inspect-data.json").exists()
and not inspect_data
):
inspect_data = json.load((config_dir / "inspect-data.json").open())
inspect_data = json.loads((config_dir / "inspect-data.json").read_text())
if immutables is None:
immutable_filenames = [i["file"] for i in inspect_data.values()]
immutables = [
Expand Down Expand Up @@ -269,7 +269,7 @@ def __init__(
if config_dir and (config_dir / "config.json").exists():
raise StartupError("config.json should be renamed to settings.json")
if config_dir and (config_dir / "settings.json").exists() and not config:
config = json.load((config_dir / "settings.json").open())
config = json.loads((config_dir / "settings.json").read_text())
self._settings = dict(DEFAULT_SETTINGS, **(config or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
self.version_note = version_note
Expand Down Expand Up @@ -450,11 +450,10 @@ def plugin_config(self, plugin_name, database=None, table=None, fallback=True):

def app_css_hash(self):
if not hasattr(self, "_app_css_hash"):
self._app_css_hash = hashlib.sha1(
open(os.path.join(str(app_root), "datasette/static/app.css"))
.read()
.encode("utf8")
).hexdigest()[:6]
with open(os.path.join(str(app_root), "datasette/static/app.css")) as fp:
self._app_css_hash = hashlib.sha1(fp.read().encode("utf8")).hexdigest()[
:6
]
return self._app_css_hash

async def get_canned_queries(self, database_name, actor):
Expand Down
13 changes: 7 additions & 6 deletions datasette/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,13 +125,13 @@ def cli():
@sqlite_extensions
def inspect(files, inspect_file, sqlite_extensions):
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
if inspect_file == "-":
out = sys.stdout
else:
out = open(inspect_file, "w")
loop = asyncio.get_event_loop()
inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions))
out.write(json.dumps(inspect_data, indent=2))
if inspect_file == "-":
sys.stdout.write(json.dumps(inspect_data, indent=2))
else:
with open(inspect_file, "w") as fp:
fp.write(json.dumps(inspect_data, indent=2))


async def inspect_(files, sqlite_extensions):
Expand Down Expand Up @@ -475,7 +475,8 @@ def serve(

inspect_data = None
if inspect_file:
inspect_data = json.load(open(inspect_file))
with open(inspect_file) as fp:
inspect_data = json.load(fp)

metadata_data = None
if metadata:
Expand Down
6 changes: 4 additions & 2 deletions datasette/publish/cloudrun.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,9 +141,11 @@ def cloudrun(
if show_files:
if os.path.exists("metadata.json"):
print("=== metadata.json ===\n")
print(open("metadata.json").read())
with open("metadata.json") as fp:
print(fp.read())
print("\n==== Dockerfile ====\n")
print(open("Dockerfile").read())
with open("Dockerfile") as fp:
print(fp.read())
print("\n====================\n")

image_id = f"gcr.io/{project}/{name}"
Expand Down
17 changes: 10 additions & 7 deletions datasette/publish/heroku.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,9 +171,11 @@ def temporary_heroku_directory(
os.chdir(tmp.name)

if metadata_content:
open("metadata.json", "w").write(json.dumps(metadata_content, indent=2))
with open("metadata.json", "w") as fp:
fp.write(json.dumps(metadata_content, indent=2))

open("runtime.txt", "w").write("python-3.8.7")
with open("runtime.txt", "w") as fp:
fp.write("python-3.8.7")

if branch:
install = [
Expand All @@ -182,11 +184,11 @@ def temporary_heroku_directory(
else:
install = ["datasette"] + list(install)

open("requirements.txt", "w").write("\n".join(install))
with open("requirements.txt", "w") as fp:
fp.write("\n".join(install))
os.mkdir("bin")
open("bin/post_compile", "w").write(
"datasette inspect --inspect-file inspect-data.json"
)
with open("bin/post_compile", "w") as fp:
fp.write("datasette inspect --inspect-file inspect-data.json")

extras = []
if template_dir:
Expand Down Expand Up @@ -218,7 +220,8 @@ def temporary_heroku_directory(
procfile_cmd = "web: datasette serve --host 0.0.0.0 {quoted_files} --cors --port $PORT --inspect-file inspect-data.json {extras}".format(
quoted_files=quoted_files, extras=" ".join(extras)
)
open("Procfile", "w").write(procfile_cmd)
with open("Procfile", "w") as fp:
fp.write(procfile_cmd)

for path, filename in zip(file_paths, file_names):
link_or_copy(path, os.path.join(tmp.name, filename))
Expand Down
6 changes: 4 additions & 2 deletions datasette/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,8 +428,10 @@ def temporary_docker_directory(
)
os.chdir(datasette_dir)
if metadata_content:
open("metadata.json", "w").write(json.dumps(metadata_content, indent=2))
open("Dockerfile", "w").write(dockerfile)
with open("metadata.json", "w") as fp:
fp.write(json.dumps(metadata_content, indent=2))
with open("Dockerfile", "w") as fp:
fp.write(dockerfile)
for path, filename in zip(file_paths, file_names):
link_or_copy(path, os.path.join(datasette_dir, filename))
if template_dir:
Expand Down
3 changes: 2 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ def get_version():
os.path.dirname(os.path.abspath(__file__)), "datasette", "version.py"
)
g = {}
exec(open(path).read(), g)
with open(path) as fp:
exec(fp.read(), g)
return g["__version__"]


Expand Down
6 changes: 2 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,8 @@ def check_permission_actions_are_documented():
from datasette.plugins import pm

content = (
(pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst")
.open()
.read()
)
pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst"
).read_text()
permissions_re = re.compile(r"\.\. _permissions_([^\s:]+):")
documented_permission_actions = set(permissions_re.findall(content)).union(
UNDOCUMENTED_PERMISSIONS
Expand Down
5 changes: 3 additions & 2 deletions tests/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -789,7 +789,8 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename):
conn.executescript(GENERATED_COLUMNS_SQL)
print(f"Test tables written to {db_filename}")
if metadata:
open(metadata, "w").write(json.dumps(METADATA, indent=4))
with open(metadata, "w") as fp:
fp.write(json.dumps(METADATA, indent=4))
print(f"- metadata written to {metadata}")
if plugins_path:
path = pathlib.Path(plugins_path)
Expand All @@ -798,7 +799,7 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename):
test_plugins = pathlib.Path(__file__).parent / "plugins"
for filepath in test_plugins.glob("*.py"):
newpath = path / filepath.name
newpath.write_text(filepath.open().read())
newpath.write_text(filepath.read_text())
print(f" Wrote plugin: {newpath}")
if extra_db_filename:
if pathlib.Path(extra_db_filename).exists():
Expand Down
3 changes: 2 additions & 1 deletion tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,8 @@ def test_inspect_cli_writes_to_file(app_client):
cli, ["inspect", "fixtures.db", "--inspect-file", "foo.json"]
)
assert 0 == result.exit_code, result.output
data = json.load(open("foo.json"))
with open("foo.json") as fp:
data = json.load(fp)
assert ["fixtures"] == list(data.keys())


Expand Down
3 changes: 2 additions & 1 deletion tests/test_cli_serve_get.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ def test_serve_with_get(tmp_path_factory):
@hookimpl
def startup(datasette):
open("{}", "w").write("hello")
with open("{}", "w") as fp:
fp.write("hello")
""".format(
str(plugins_dir / "hello.txt")
),
Expand Down
8 changes: 4 additions & 4 deletions tests/test_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@ def get_headings(content, underline="-"):


def get_labels(filename):
content = (docs_path / filename).open().read()
content = (docs_path / filename).read_text()
return set(label_re.findall(content))


@pytest.fixture(scope="session")
def settings_headings():
return get_headings((docs_path / "settings.rst").open().read(), "~")
return get_headings((docs_path / "settings.rst").read_text(), "~")


@pytest.mark.parametrize("setting", app.SETTINGS)
Expand All @@ -43,7 +43,7 @@ def test_settings_are_documented(settings_headings, setting):
),
)
def test_help_includes(name, filename):
expected = open(str(docs_path / filename)).read()
expected = (docs_path / filename).read_text()
runner = CliRunner()
result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88)
actual = f"$ datasette {name} --help\n\n{result.output}"
Expand All @@ -55,7 +55,7 @@ def test_help_includes(name, filename):

@pytest.fixture(scope="session")
def plugin_hooks_content():
return (docs_path / "plugin_hooks.rst").open().read()
return (docs_path / "plugin_hooks.rst").read_text()


@pytest.mark.parametrize(
Expand Down
6 changes: 4 additions & 2 deletions tests/test_package.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ def test_package(mock_call, mock_which):
capture = CaptureDockerfile()
mock_call.side_effect = capture
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"])
assert 0 == result.exit_code
mock_call.assert_has_calls([mock.call(["docker", "build", "."])])
Expand All @@ -47,7 +48,8 @@ def test_package_with_port(mock_call, mock_which):
mock_call.side_effect = capture
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["package", "test.db", "-p", "8080", "--secret", "sekrit"]
)
Expand Down
3 changes: 2 additions & 1 deletion tests/test_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,8 @@ def test_plugin_config_env_from_list(app_client):


def test_plugin_config_file(app_client):
open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE")
with open(TEMP_PLUGIN_SECRET_FILE, "w") as fp:
fp.write("FROM_FILE")
assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin")
# Ensure secrets aren't visible in /-/metadata.json
metadata = app_client.get("/-/metadata.json")
Expand Down
32 changes: 20 additions & 12 deletions tests/test_publish_cloudrun.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ def test_publish_cloudrun_requires_gcloud(mock_which):
mock_which.return_value = False
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"])
assert result.exit_code == 1
assert "Publishing to Google Cloud requires gcloud" in result.output
Expand Down Expand Up @@ -40,7 +41,8 @@ def test_publish_cloudrun_prompts_for_service(
mock_which.return_value = True
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["publish", "cloudrun", "test.db"], input="input-service"
)
Expand Down Expand Up @@ -81,7 +83,8 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which):
mock_which.return_value = True
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"]
)
Expand Down Expand Up @@ -120,7 +123,8 @@ def test_publish_cloudrun_memory(
mock_which.return_value = True
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
["publish", "cloudrun", "test.db", "--service", "test", "--memory", memory],
Expand Down Expand Up @@ -152,17 +156,19 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which):

runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
open("metadata.yml", "w").write(
textwrap.dedent(
"""
with open("test.db", "w") as fp:
fp.write("data")
with open("metadata.yml", "w") as fp:
fp.write(
textwrap.dedent(
"""
title: Hello from metadata YAML
plugins:
datasette-auth-github:
foo: bar
"""
).strip()
)
).strip()
)
result = runner.invoke(
cli.cli,
[
Expand Down Expand Up @@ -228,7 +234,8 @@ def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which):

runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
[
Expand Down Expand Up @@ -295,7 +302,8 @@ def test_publish_cloudrun_extra_options(

runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
[
Expand Down
12 changes: 8 additions & 4 deletions tests/test_publish_heroku.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ def test_publish_heroku_requires_heroku(mock_which):
mock_which.return_value = False
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"])
assert result.exit_code == 1
assert "Publishing to Heroku requires heroku" in result.output
Expand All @@ -22,7 +23,8 @@ def test_publish_heroku_installs_plugin(mock_call, mock_check_output, mock_which
mock_check_output.side_effect = lambda s: {"['heroku', 'plugins']": b""}[repr(s)]
runner = CliRunner()
with runner.isolated_filesystem():
open("t.db", "w").write("data")
with open("t.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n")
assert 0 != result.exit_code
mock_check_output.assert_has_calls(
Expand Down Expand Up @@ -54,7 +56,8 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which):
}[repr(s)]
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["publish", "heroku", "test.db", "--tar", "gtar"]
)
Expand Down Expand Up @@ -88,7 +91,8 @@ def test_publish_heroku_plugin_secrets(mock_call, mock_check_output, mock_which)
}[repr(s)]
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
[
Expand Down
Loading

0 comments on commit 8e18c79

Please sign in to comment.