Skip to content

Commit

Permalink
Still fixing windows temp file issues
Browse files Browse the repository at this point in the history
  • Loading branch information
mcqueary committed Dec 15, 2023
1 parent 4a67a60 commit 51674b4
Showing 1 changed file with 32 additions and 48 deletions.
80 changes: 32 additions & 48 deletions tests/test_cli.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import importlib.metadata
import os
import tempfile

import pytest
import vcr
Expand All @@ -9,6 +8,7 @@
from graver.constants import APP_NAME

live_urls = [
"https://secure.findagrave.com/cgi-bin/fg.cgi?page=gr&GRid=1784",
"https://www.findagrave.com/memorial/1075/george-washington",
"https://secure.findagrave.com/cgi-bin/fg.cgi?page=gr&GRid=534",
"https://secure.findagrave.com/cgi-bin/fg.cgi?page=gr&GRid=574",
Expand All @@ -31,41 +31,6 @@ def silence_tqdm():
del os.environ["TQDM_MININTERVAL"]


@pytest.fixture
def text_file_with_bad_url():
"""Creates a text file containing a single memorial URL"""
with tempfile.NamedTemporaryFile(delete=False) as tf:
os.environ["BAD_DATA_FILENAME"] = tf.name
with open(tf.name, "w") as f:
f.write("this-does-not-exist")
yield tf


@pytest.fixture
def single_line_text_file():
"""Creates a text file containing a single memorial URL"""
with tempfile.NamedTemporaryFile(delete=False) as tf:
os.environ["SINGLE_LINE_FILENAME"] = tf.name
with open(tf.name, "w") as f:
f.write(live_urls[0])
yield


@pytest.fixture
def multi_line_with_file_urls():
"""Creates a text file containing several memorial URLs, one per line"""
file_urls = [
"https://www.findagrave.com/memorial/22633912/john-quincy-adams",
"https://www.findagrave.com/memorial/1784/grace-brewster-hopper",
"https://www.findagrave.com/cemetery/3136/crown-hill-memorial-park",
]
with tempfile.NamedTemporaryFile(delete=False) as tf:
os.environ["MULTI_LINE_TEST_FILE"] = tf.name
with open(tf.name, "w") as f:
f.write("\n".join(file_urls))
yield


@pytest.mark.parametrize("arg", ["-V", "--version"])
def test_cli_version_multiple_ways(helpers, arg):
assert helpers.graver_cli(arg) == "{} v{}".format(APP_NAME, __version__)
Expand All @@ -88,10 +53,20 @@ def test_cli_scrape_file_does_not_exist(helpers, database):
"name, cassette",
[("grace-brewster-hopper", pytest.vcr_cassettes + "test-cli-scrape-file.yaml")],
)
def test_cli_scrape_file(name, cassette, helpers, database, multi_line_with_file_urls):
def test_cli_scrape_file(name, cassette, helpers, database, tmp_path):
urls = [
"https://www.findagrave.com/memorial/22633912/john-quincy-adams",
"https://www.findagrave.com/memorial/1784/grace-brewster-hopper",
"https://www.findagrave.com/cemetery/3136/crown-hill-memorial-park",
]
with vcr.use_cassette(cassette):
person = pytest.helpers.load_memorial_from_json(name)
url_file = os.getenv("MULTI_LINE_TEST_FILE")

d = tmp_path / "test_cli_scrape_file"
d.mkdir()
url_file = d / "input_urls.txt"
url_file.write_text("\n".join(urls))

db = os.getenv("DATABASE_NAME")
command = "scrape-file {} --db {}".format(url_file, db)
output = helpers.graver_cli(command)
Expand All @@ -102,10 +77,12 @@ def test_cli_scrape_file(name, cassette, helpers, database, multi_line_with_file
assert m.memorial_id == mem_id


def test_cli_scrape_file_with_invalid_url(
helpers, caplog, database, text_file_with_bad_url
):
url_file = os.getenv("BAD_DATA_FILENAME")
def test_cli_scrape_file_with_invalid_url(helpers, caplog, database, tmp_path):
d = tmp_path / "test_cli_scrape_file_with_invalid_url"
d.mkdir()
url_file = d / "invalid_url.txt"
url_file.write_text("this-doesn't-exist\n")

command = "scrape-file {}".format(url_file)
helpers.graver_cli(command)
assert "is not a valid URL" in caplog.text
Expand All @@ -127,8 +104,12 @@ def test_cli_scrape_url(url, helpers, database):
assert m.memorial_id == 49636099


def test_cli_scrape_file_with_bad_urls(helpers, database, text_file_with_bad_url):
url_file = os.getenv("BAD_DATA_FILENAME")
def test_cli_scrape_file_with_bad_urls(helpers, database, tmp_path):
d = tmp_path / "test_cli_scrape_file_with_bad_urls"
d.mkdir()
url_file = d / "invalid_urls.txt"
url_file.write_text("this-does-not-exist\n")

db = os.getenv("DATABASE_NAME")
command = "scrape-file {} --db {}".format(url_file, db)
output = helpers.graver_cli(command)
Expand Down Expand Up @@ -157,13 +138,16 @@ def test_cli_scrape_url_with_bad_url(url, helpers, caplog, database):
"george-washington",
],
)
def test_cli_scrape_file_with_single_url_file(
name, helpers, database, single_line_text_file
):
def test_cli_scrape_file_with_single_url_file(name, helpers, database, tmp_path):
expected = pytest.helpers.load_memorial_from_json(name)
cassette = f"{pytest.vcr_cassettes}{name}.yaml"

d = tmp_path / "test_cli_scrape_file_with_single_url_file"
d.mkdir()
url_file = d / "single_url.txt"
url_file.write_text("https://www.findagrave.com/memorial/1075/george-washington\n")

with vcr.use_cassette(cassette):
url_file = os.getenv("SINGLE_LINE_FILENAME")
db = os.getenv("DATABASE_NAME")
command = "scrape-file {} --db {}".format(url_file, db)
output = helpers.graver_cli(command)
Expand Down

0 comments on commit 51674b4

Please sign in to comment.