Sign Up
Log In
Log In
or
Sign Up
Places
All Projects
Status Monitor
Collapse sidebar
openSUSE:Backports:SLE-15-SP4:FactoryCandidates
python-pytest-html
drop-assertpy-dep.patch
Overview
Repositories
Revisions
Requests
Users
Attributes
Meta
File drop-assertpy-dep.patch of Package python-pytest-html
Index: pytest_html-4.1.1/testing/test_e2e.py =================================================================== --- pytest_html-4.1.1.orig/testing/test_e2e.py +++ pytest_html-4.1.1/testing/test_e2e.py @@ -5,7 +5,6 @@ import urllib.parse import pytest import selenium.webdriver.support.expected_conditions as ec -from assertpy import assert_that from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.wait import WebDriverWait @@ -84,7 +83,7 @@ def test_visible(pytester, path, driver) ec.visibility_of_all_elements_located((By.CSS_SELECTOR, "#results-table")) ) result = driver.find_elements(By.CSS_SELECTOR, "tr.collapsible") - assert_that(result).is_length(2) + assert len(result) == 2 query_params = _encode_query_params({"visible": ""}) driver.get(f"file:///reports{path()}?{query_params}") @@ -92,7 +91,7 @@ def test_visible(pytester, path, driver) ec.visibility_of_all_elements_located((By.CSS_SELECTOR, "#results-table")) ) result = driver.find_elements(By.CSS_SELECTOR, "tr.collapsible") - assert_that(result).is_length(0) + assert len(result) == 0 def test_custom_sorting(pytester, path, driver): @@ -121,17 +120,17 @@ def test_custom_sorting(pytester, path, ) rows = _parse_result_table(driver) - assert_that(rows).is_length(2) - assert_that(rows[0]["test"]).contains("AAA") - assert_that(rows[0]["alpha"]).is_equal_to("AAA") - assert_that(rows[1]["test"]).contains("BBB") - assert_that(rows[1]["alpha"]).is_equal_to("BBB") + assert len(rows) == 2 + assert "AAA" in rows[0]["test"] + assert "AAA" == rows[0]["alpha"] + assert "BBB" in rows[1]["test"] + assert "BBB" == rows[1]["alpha"] driver.find_element(By.CSS_SELECTOR, "th[data-column-type='alpha']").click() # we might need some wait here to ensure sorting happened rows = _parse_result_table(driver) - assert_that(rows).is_length(2) - assert_that(rows[0]["test"]).contains("BBB") - assert_that(rows[0]["alpha"]).is_equal_to("BBB") - assert_that(rows[1]["test"]).contains("AAA") - assert_that(rows[1]["alpha"]).is_equal_to("AAA") + assert len(rows) == 2 + assert "BBB" in rows[0]["test"] + assert "BBB" == rows[0]["alpha"] + assert "AAA" in rows[1]["test"] + assert "AAA" == rows[1]["alpha"] Index: pytest_html-4.1.1/testing/test_integration.py =================================================================== --- pytest_html-4.1.1.orig/testing/test_integration.py +++ pytest_html-4.1.1/testing/test_integration.py @@ -9,7 +9,6 @@ from base64 import b64encode from pathlib import Path import pytest -from assertpy import assert_that from bs4 import BeautifulSoup from selenium import webdriver @@ -82,7 +81,7 @@ def assert_results( if isinstance(number, int): number_of_tests += number result = get_text(page, f"span[class={outcome}]") - assert_that(result).matches(rf"{number} {OUTCOMES[outcome]}") + assert re.match(rf"u{number} {OUTCOMES[outcome]}", result) def get_element(page, selector): @@ -148,13 +147,11 @@ class TestHTML: duration = get_text(page, "#results-table td[class='col-duration']") total_duration = get_text(page, "p[class='run-count']") if pause < 1: - assert_that(int(duration.replace("ms", ""))).is_between( - expectation, expectation * 2 - ) - assert_that(total_duration).matches(r"\d+\s+ms") + assert expectation < int(duration.replace("ms", "")) < expectation * 2 + assert re.match(r"\d+\s+ms", total_duration) else: - assert_that(duration).matches(expectation) - assert_that(total_duration).matches(r"\d{2}:\d{2}:\d{2}") + assert re.match(expectation, duration) + assert re.match(r"\d{2}:\d{2}:\d{2}", total_duration) def test_duration_format_hook(self, pytester): pytester.makeconftest( @@ -169,14 +166,14 @@ class TestHTML: assert_results(page, passed=1) duration = get_text(page, "#results-table td[class='col-duration']") - assert_that(duration).contains("seconds") + assert "seconds" in duration def test_total_number_of_tests_zero(self, pytester): page = run(pytester) assert_results(page) total = get_text(page, "p[class='run-count']") - assert_that(total).matches(r"0 test(?!s)") + assert re.match(r"0 test(?!s)", total) def test_total_number_of_tests_singular(self, pytester): pytester.makepyfile("def test_pass(): pass") @@ -184,7 +181,7 @@ class TestHTML: assert_results(page, passed=1) total = get_text(page, "p[class='run-count']") - assert_that(total).matches(r"1 test(?!s)") + assert re.match(r"1 test(?!s)", total) def test_total_number_of_tests_plural(self, pytester): pytester.makepyfile( @@ -197,7 +194,7 @@ class TestHTML: assert_results(page, passed=2) total = get_text(page, "p[class='run-count']") - assert_that(total).matches(r"2 tests(?!\S)") + assert re.match(r"2 tests(?!\S)", total) def test_pass(self, pytester): pytester.makepyfile("def test_pass(): pass") @@ -217,7 +214,7 @@ class TestHTML: assert_results(page, skipped=1, total_tests=0) log = get_text(page, "div[class='log']") - assert_that(log).contains(reason) + assert reason in log def test_skip_function_marker(self, pytester): reason = str(random.random()) @@ -233,7 +230,7 @@ class TestHTML: assert_results(page, skipped=1, total_tests=0) log = get_text(page, "div[class='log']") - assert_that(log).contains(reason) + assert reason in log def test_skip_class_marker(self, pytester): reason = str(random.random()) @@ -250,16 +247,14 @@ class TestHTML: assert_results(page, skipped=1, total_tests=0) log = get_text(page, "div[class='log']") - assert_that(log).contains(reason) + assert reason in log def test_fail(self, pytester): pytester.makepyfile("def test_fail(): assert False") page = run(pytester) assert_results(page, failed=1) - assert_that(get_log(page)).contains("AssertionError") - assert_that(get_text(page, "div[class='log'] span.error")).matches( - r"^E\s+assert False$" - ) + assert "AssertionError" in get_log(page) + assert re.match(r"^E\s+assert False$", get_text(page, "div[class='log'] span.error")) def test_xfail(self, pytester): reason = str(random.random()) @@ -272,7 +267,7 @@ class TestHTML: ) page = run(pytester) assert_results(page, xfailed=1) - assert_that(get_log(page)).contains(reason) + assert reason in get_log(page) def test_xfail_function_marker(self, pytester): reason = str(random.random()) @@ -286,7 +281,7 @@ class TestHTML: ) page = run(pytester) assert_results(page, xfailed=1) - assert_that(get_log(page)).contains(reason) + assert reason in get_log(page) def test_xfail_class_marker(self, pytester): pytester.makepyfile( @@ -374,8 +369,8 @@ class TestHTML: assert_results(page, error=1, total_tests=0) col_name = get_text(page, "td[class='col-testId']") - assert_that(col_name).contains("::setup") - assert_that(get_log(page)).contains("ValueError") + asswert "::setup" in col_name + assert "ValueError" in get_log(page) @pytest.mark.parametrize("title", ["", "Special Report"]) def test_report_title(self, pytester, title): @@ -392,8 +387,8 @@ class TestHTML: expected_title = title if title else "report.html" page = run(pytester) - assert_that(get_text(page, "#head-title")).is_equal_to(expected_title) - assert_that(get_text(page, "h1[id='title']")).is_equal_to(expected_title) + assert expected_title == get_text(page, "#head-title") + assert expected_title == get_text(page, "h1[id='title']") def test_resources_inline_css(self, pytester): pytester.makepyfile("def test_pass(): pass") @@ -401,15 +396,13 @@ class TestHTML: content = file_content() - assert_that(get_text(page, "head style").strip()).contains(content) + assert content in get_text(page, "head style").strip() def test_resources_css(self, pytester): pytester.makepyfile("def test_pass(): pass") page = run(pytester) - assert_that(page.select_one("head link")["href"]).is_equal_to( - str(Path("assets", "style.css")) - ) + assert page.select_one("head link")["href"] == str(Path("assets", "style.css")) def test_custom_content_in_summary(self, pytester): content = { @@ -435,11 +428,11 @@ class TestHTML: elements = page.select( ".additional-summary p" ) # ".summary__data p:not(.run-count):not(.filter)") - assert_that(elements).is_length(3) + assert len(elements) == 3 for element in elements: key = re.search(r"(\w+).*", element.string).group(1) value = content.pop(key) - assert_that(element.string).contains(value) + assert value in element.string def test_extra_html(self, pytester): content = str(random.random()) @@ -460,7 +453,7 @@ class TestHTML: pytester.makepyfile("def test_pass(): pass") page = run(pytester) - assert_that(page.select_one(".extraHTML").string).is_equal_to(content) + assert content == page.select_one(".extraHTML").string @pytest.mark.parametrize( "content, encoded", @@ -484,10 +477,8 @@ class TestHTML: page = run(pytester, cmd_flags=["--self-contained-html"]) element = page.select_one("a[class='col-links__extra text']") - assert_that(element.string).is_equal_to("Text") - assert_that(element["href"]).is_equal_to( - f"data:text/plain;charset=utf-8;base64,{encoded}" - ) + assert "Text" == element.string + assert f"data:text/plain;charset=utf-8;base64,{encoded}" == element["href"] def test_extra_json(self, pytester): content = {str(random.random()): str(random.random())} @@ -512,10 +503,8 @@ class TestHTML: data = b64encode(content_str.encode("utf-8")).decode("ascii") element = page.select_one("a[class='col-links__extra json']") - assert_that(element.string).is_equal_to("JSON") - assert_that(element["href"]).is_equal_to( - f"data:application/json;charset=utf-8;base64,{data}" - ) + assert "JSON" == element.string + assert f"data:application/json;charset=utf-8;base64,{data}" == element["href"] def test_extra_url(self, pytester): content = str(random.random()) @@ -536,8 +525,8 @@ class TestHTML: page = run(pytester) element = page.select_one("a[class='col-links__extra url']") - assert_that(element.string).is_equal_to("URL") - assert_that(element["href"]).is_equal_to(content) + assert "URL" == element.string + assert content == element["href"] @pytest.mark.parametrize( "mime_type, extension", @@ -575,7 +564,7 @@ class TestHTML: # assert_that(element["href"]).is_equal_to(src) element = page.select_one(".media img") - assert_that(str(element)).is_equal_to(f'<img src="{src}"/>') + assert f'<img src="{src}"/>' == str(element) @pytest.mark.parametrize("mime_type, extension", [("video/mp4", "mp4")]) def test_extra_video(self, pytester, mime_type, extension): @@ -603,9 +592,7 @@ class TestHTML: # assert_that(element["href"]).is_equal_to(src) element = page.select_one(".media video") - assert_that(str(element)).is_equal_to( - f'<video controls="">\n<source src="{src}" type="{mime_type}"/>\n</video>' - ) + assert f'<video controls="">\n<source src="{src}" type="{mime_type}"/>\n</video>' == str(element) def test_xdist(self, pytester): pytester.makepyfile("def test_xdist(): pass") @@ -634,19 +621,10 @@ class TestHTML: description_index = 5 time_index = 6 - assert_that(get_text(page, header_selector.format(time_index))).is_equal_to( - "Time" - ) - assert_that( - get_text(page, header_selector.format(description_index)) - ).is_equal_to("Description") - - assert_that(get_text(page, row_selector.format(time_index))).is_equal_to( - "A time" - ) - assert_that(get_text(page, row_selector.format(description_index))).is_equal_to( - "A description" - ) + assert "Time" == get_text(page, header_selector.format(time_index)) + assert "Description" == get_text(page, header_selector.format(description_index)) + assert "A time" == get_text(page, row_selector.format(time_index)) + assert "A description" == get_text(page, row_selector.format(description_index)) def test_results_table_hook_insert(self, pytester): header_selector = "#results-table-head tr:nth-child(1) th:nth-child({})" @@ -671,19 +649,10 @@ class TestHTML: description_index = 4 time_index = 2 - assert_that(get_text(page, header_selector.format(time_index))).is_equal_to( - "Time" - ) - assert_that( - get_text(page, header_selector.format(description_index)) - ).is_equal_to("Description") - - assert_that(get_text(page, row_selector.format(time_index))).is_equal_to( - "A time" - ) - assert_that(get_text(page, row_selector.format(description_index))).is_equal_to( - "A description" - ) + assert "Time" == get_text(page, header_selector.format(time_index)) + assert "Description" == get_text(page, header_selector.format(description_index)) + assert "A time" == get_text(page, row_selector.format(time_index)) + assert "A description" == get_text(page, row_selector.format(description_index)) def test_results_table_hook_delete(self, pytester): pytester.makeconftest( @@ -720,10 +689,10 @@ class TestHTML: page = run(pytester) header_columns = page.select("#results-table-head th") - assert_that(header_columns).is_length(3) + assert len(header_columns) == 3 row_columns = page.select_one(".results-table-row").select("td:not(.extra)") - assert_that(row_columns).is_length(3) + assert len(row_columns) == 3 @pytest.mark.parametrize("no_capture", ["", "-s"]) def test_standard_streams(self, pytester, no_capture): @@ -752,11 +721,11 @@ class TestHTML: for when in ["setup", "call", "teardown"]: for stream in ["stdout", "stderr"]: if no_capture: - assert_that(log).does_not_match(f"- Captured {stream} {when} -") - assert_that(log).does_not_match(f"this is {when} {stream}") + assert not re.match(f"- Captured {stream} {when} -", log) + assert not re.match(f"this is {when} {stream}", log) else: - assert_that(log).matches(f"- Captured {stream} {when} -") - assert_that(log).matches(f"this is {when} {stream}") + assert re.match(f"- Captured {stream} {when} -", log) + assert re.match(f"this is {when} {stream}", log) def test_collect_error(self, pytester): error_msg = "Non existent module" @@ -915,7 +884,7 @@ class TestLogCapturing: log = get_log(page) for when in ["setup", "test", "teardown"]: - assert_that(log).matches(self.LOG_LINE_REGEX.format(when)) + assert re.match(self.LOG_LINE_REGEX.format(when), log) @pytest.mark.usefixtures("log_cli") def test_setup_error(self, test_file, pytester): @@ -924,9 +893,9 @@ class TestLogCapturing: assert_results(page, error=1) log = get_log(page) - assert_that(log).matches(self.LOG_LINE_REGEX.format("setup")) - assert_that(log).does_not_match(self.LOG_LINE_REGEX.format("test")) - assert_that(log).does_not_match(self.LOG_LINE_REGEX.format("teardown")) + assert re.match(self.LOG_LINE_REGEX.format("setup"), log) + assert not re.match(self.LOG_LINE_REGEX.format("test"), log) + assert not re.match(self.LOG_LINE_REGEX.format("teardown"), log) @pytest.mark.usefixtures("log_cli") def test_test_fails(self, test_file, pytester): @@ -936,7 +905,7 @@ class TestLogCapturing: log = get_log(page) for when in ["setup", "test", "teardown"]: - assert_that(log).matches(self.LOG_LINE_REGEX.format(when)) + assert re.match(self.LOG_LINE_REGEX.format(when), log) @pytest.mark.usefixtures("log_cli") @pytest.mark.parametrize( @@ -950,7 +919,7 @@ class TestLogCapturing: for test_name in ["test_logging", "test_logging::teardown"]: log = get_log(page, test_name) for when in ["setup", "test", "teardown"]: - assert_that(log).matches(self.LOG_LINE_REGEX.format(when)) + assert re.match(self.LOG_LINE_REGEX.format(when), log) def test_no_log(self, test_file, pytester): pytester.makepyfile(test_file(assertion=True)) @@ -958,9 +927,9 @@ class TestLogCapturing: assert_results(page, passed=1) log = get_log(page, "test_logging") - assert_that(log).contains("No log output captured.") + assert "No log output captured." in log for when in ["setup", "test", "teardown"]: - assert_that(log).does_not_match(self.LOG_LINE_REGEX.format(when)) + assert not re.match(self.LOG_LINE_REGEX.format(when), log) @pytest.mark.usefixtures("log_cli") def test_rerun(self, test_file, pytester): @@ -971,8 +940,8 @@ class TestLogCapturing: assert_results(page, failed=1, rerun=2) log = get_log(page) - assert_that(log.count("Captured log setup")).is_equal_to(3) - assert_that(log.count("Captured log teardown")).is_equal_to(5) + assert log.count("Captured log setup") == 3 + assert log.count("Captured log teardown") == 5 class TestCollapsedQueryParam: @@ -999,9 +968,9 @@ class TestCollapsedQueryParam: page = run(pytester) assert_results(page, passed=1, failed=1, error=1) - assert_that(is_collapsed(page, "test_pass")).is_true() - assert_that(is_collapsed(page, "test_fail")).is_false() - assert_that(is_collapsed(page, "test_error::setup")).is_false() + assert is_collapsed(page, "test_pass") + assert not is_collapsed(page, "test_fail") + assert not is_collapsed(page, "test_error::setup") @pytest.mark.parametrize("param", ["failed,error", "FAILED,eRRoR"]) def test_specified(self, pytester, test_file, param): @@ -1009,9 +978,9 @@ class TestCollapsedQueryParam: page = run(pytester, query_params={"collapsed": param}) assert_results(page, passed=1, failed=1, error=1) - assert_that(is_collapsed(page, "test_pass")).is_false() - assert_that(is_collapsed(page, "test_fail")).is_true() - assert_that(is_collapsed(page, "test_error::setup")).is_true() + assert not is_collapsed(page, "test_pass") + assert is_collapsed(page, "test_fail") + assert is_collapsed(page, "test_error::setup") def test_all(self, pytester, test_file): pytester.makepyfile(test_file) @@ -1019,7 +988,7 @@ class TestCollapsedQueryParam: assert_results(page, passed=1, failed=1, error=1) for test_name in ["test_pass", "test_fail", "test_error::setup"]: - assert_that(is_collapsed(page, test_name)).is_true() + assert is_collapsed(page, test_name) @pytest.mark.parametrize("param", ["", 'collapsed=""', "collapsed=''"]) def test_falsy(self, pytester, test_file, param): @@ -1027,9 +996,9 @@ class TestCollapsedQueryParam: page = run(pytester, query_params={"collapsed": param}) assert_results(page, passed=1, failed=1, error=1) - assert_that(is_collapsed(page, "test_pass")).is_false() - assert_that(is_collapsed(page, "test_fail")).is_false() - assert_that(is_collapsed(page, "test_error::setup")).is_false() + assert not is_collapsed(page, "test_pass") + assert not is_collapsed(page, "test_fail") + assert not is_collapsed(page, "test_error::setup") @pytest.mark.parametrize("param", ["failed,error", "FAILED,eRRoR"]) def test_render_collapsed(self, pytester, test_file, param): @@ -1043,9 +1012,9 @@ class TestCollapsedQueryParam: page = run(pytester) assert_results(page, passed=1, failed=1, error=1) - assert_that(is_collapsed(page, "test_pass")).is_false() - assert_that(is_collapsed(page, "test_fail")).is_true() - assert_that(is_collapsed(page, "test_error::setup")).is_true() + assert not is_collapsed(page, "test_pass") + assert is_collapsed(page, "test_fail") + assert is_collapsed(page, "test_error::setup") def test_render_collapsed_precedence(self, pytester, test_file): pytester.makeini( @@ -1062,7 +1031,7 @@ class TestCollapsedQueryParam: page = run(pytester, query_params={"collapsed": "skipped"}) assert_results(page, passed=1, failed=1, error=1, skipped=1) - assert_that(is_collapsed(page, "test_pass")).is_false() - assert_that(is_collapsed(page, "test_fail")).is_false() - assert_that(is_collapsed(page, "test_error::setup")).is_false() - assert_that(is_collapsed(page, "test_skip")).is_true() + assert not is_collapsed(page, "test_pass") + assert not is_collapsed(page, "test_fail") + assert not is_collapsed(page, "test_error::setup") + assert is_collapsed(page, "test_skip") Index: pytest_html-4.1.1/testing/test_unit.py =================================================================== --- pytest_html-4.1.1.orig/testing/test_unit.py +++ pytest_html-4.1.1/testing/test_unit.py @@ -5,7 +5,6 @@ from pathlib import Path import pkg_resources import pytest -from assertpy import assert_that pytest_plugins = ("pytester",) @@ -134,7 +133,8 @@ def test_custom_css(pytester, css_file_p with open(str(path)) as f: css = f.read() - assert_that(css).contains("* " + str(css_file_path)).contains("* two.css") + assert "* " + str(css_file_path) in css + assert "* two.css" in css def test_custom_css_selfcontained(pytester, css_file_path, expandvar): @@ -153,4 +153,6 @@ def test_custom_css_selfcontained(pytest with open(pytester.path / "report.html") as f: html = f.read() - assert_that(html).contains("* " + str(css_file_path)).contains("* two.css") + + assert "* " + str(css_file_path) in html + assert "* two.css" in html
Locations
Projects
Search
Status Monitor
Help
OpenBuildService.org
Documentation
API Documentation
Code of Conduct
Contact
Support
@OBShq
Terms
openSUSE Build Service is sponsored by
The Open Build Service is an
openSUSE project
.
Sign Up
Log In
Places
Places
All Projects
Status Monitor