forked from mirrors/gecko-dev
Bug 1602544 - Add some basic print tests for WebDriver, r=webdriver-reviewers,whimboo,ato
These currently are Mozilla-only but can be trivially moved into upstream tests once there is agreement to take the feature. Depends on D57472 Differential Revision: https://phabricator.services.mozilla.com/D57473 --HG-- extra : moz-landing-system : lando
This commit is contained in:
parent
65314b0441
commit
899ef0f585
2 changed files with 158 additions and 0 deletions
|
|
@ -0,0 +1,46 @@
|
|||
import base64
|
||||
|
||||
import pytest
|
||||
|
||||
from tests.support.asserts import assert_error, assert_success
|
||||
from tests.support.inline import inline
|
||||
|
||||
|
||||
def do_print(session, options):
|
||||
return session.transport.send(
|
||||
"POST", "session/{session_id}/moz/print".format(**vars(session)),
|
||||
options)
|
||||
|
||||
|
||||
def assert_pdf(data):
|
||||
assert data.startswith("%PDF-"), "Decoded data starts with the PDF signature"
|
||||
assert data.endswith("%%EOF\n"), "Decoded data ends with the EOF flag"
|
||||
|
||||
|
||||
def test_no_browsing_context(session, closed_window):
|
||||
response = do_print(session, {})
|
||||
assert_error(response, "no such window")
|
||||
|
||||
|
||||
def test_html_document(session):
|
||||
session.url = inline("Test")
|
||||
|
||||
response = do_print(session, {})
|
||||
value = assert_success(response)
|
||||
pdf = base64.decodestring(value)
|
||||
# TODO: Test that the output is reasonable
|
||||
assert_pdf(pdf)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("options", [{"orientation": 0},
|
||||
{"orientation": "foo"},
|
||||
{"scale": "1"},
|
||||
{"scale": 3},
|
||||
{"scale": 0.01},
|
||||
{"margin": {"top": "1"}},
|
||||
{"margin": {"bottom": -1}},
|
||||
{"page": {"height": False}},
|
||||
{"shrinkToFit": "false"}])
|
||||
def test_invalid(session, options):
|
||||
response = do_print(session, options)
|
||||
assert_error(response, "invalid argument")
|
||||
|
|
@ -0,0 +1,112 @@
|
|||
# META: timeout=long
|
||||
import base64
|
||||
|
||||
import pytest
|
||||
|
||||
from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
|
||||
from tests.support.inline import inline
|
||||
from printcmd import do_print, assert_pdf
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_without_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_without_exception(dialog_type, retval):
|
||||
session.url = inline("<input/>")
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = do_print(session, {})
|
||||
value = assert_success(response)
|
||||
|
||||
pdf = base64.decodestring(value)
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
assert_pdf(pdf)
|
||||
|
||||
return check_user_prompt_closed_without_exception
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_with_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_with_exception(dialog_type, retval):
|
||||
session.url = inline("<input/>")
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = do_print(session, {})
|
||||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
return check_user_prompt_closed_with_exception
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_not_closed_but_exception(session, create_dialog):
|
||||
def check_user_prompt_not_closed_but_exception(dialog_type):
|
||||
session.url = inline("<input/>")
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = do_print(session, {})
|
||||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert session.alert.text == dialog_type
|
||||
session.alert.dismiss()
|
||||
|
||||
return check_user_prompt_not_closed_but_exception
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", True),
|
||||
("prompt", ""),
|
||||
])
|
||||
def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_without_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", True),
|
||||
("prompt", ""),
|
||||
])
|
||||
def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_without_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
|
||||
@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
|
||||
def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
|
||||
check_user_prompt_not_closed_but_exception(dialog_type)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
||||
Loading…
Reference in a new issue