Danny Robson
04249a8320
result.json test cases are only really required when we have default properties that need to be filled. Fall back to using the input.json file for other cases.
109 lines
3.0 KiB
Python
Executable File
109 lines
3.0 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
|
|
from glob import glob
|
|
import subprocess
|
|
|
|
import os.path
|
|
import os
|
|
import tempfile
|
|
|
|
validate="@CMAKE_CURRENT_BINARY_DIR@/json-schema"
|
|
compare="@CMAKE_CURRENT_BINARY_DIR@/json-compare"
|
|
src="@CMAKE_CURRENT_SOURCE_DIR@/test/json/schema"
|
|
devnull=open(os.devnull, 'w')
|
|
|
|
|
|
def test_bad(schema:str, dir:str) -> int:
|
|
failures = 0
|
|
|
|
for input in glob(os.path.join(dir, "*.json")):
|
|
code = subprocess.call([validate, schema, input], stdout=devnull, stderr=devnull)
|
|
|
|
prefix = ""
|
|
if code == 0:
|
|
prefix = "not "
|
|
failures += 1
|
|
|
|
prefix = "not " if code == 0 else ""
|
|
print(f"{prefix}ok - {input}")
|
|
|
|
return failures
|
|
|
|
|
|
def test_good(schema:str, dir:str) -> int:
|
|
# extract a list of inputs and truths. the truth list may be incomplete
|
|
# (if we're not dealing with defaults) so we can't use it directly.
|
|
inputs = glob(os.path.join(dir, "*.input.json"))
|
|
results = glob(os.path.join(dir, "*.result.json"))
|
|
|
|
unused = [x for x in glob(f"{dir}/*") if x not in inputs and x not in results]
|
|
if unused:
|
|
raise RuntimeError("unused inputs", unused)
|
|
|
|
inputs.sort()
|
|
|
|
failures = 0
|
|
|
|
for test in inputs:
|
|
# check if we have a corresponding .result.json file as the ground
|
|
# truth. if not then we assume that there shouldn't be any chance in
|
|
# the resulting json and set the truth to the test file.
|
|
(base,_) = os.path.splitext(test)
|
|
(base,_) = os.path.splitext(base)
|
|
truth = f"{base}.result.json"
|
|
|
|
if not os.path.isfile(truth):
|
|
truth = test
|
|
|
|
success = False
|
|
|
|
try:
|
|
# a two stage check:
|
|
# * apply the schema to the test file
|
|
# * test it matches the truth file
|
|
with tempfile.NamedTemporaryFile(delete=False) as out:
|
|
subprocess.check_call([validate, schema, test], stdout=out)
|
|
subprocess.check_call([compare, out.name, truth])
|
|
success = True
|
|
except subprocess.CalledProcessError:
|
|
failures += 1
|
|
|
|
prefix = "not " if not success else ""
|
|
print(f"{prefix}ok - {test}")
|
|
|
|
return failures
|
|
|
|
|
|
def validation_group(dir:str) -> int:
|
|
schema = os.path.join(dir, "schema.json")
|
|
if not os.path.isfile(schema):
|
|
raise Exception(f"schema is not present, {schema}")
|
|
|
|
failures = 0
|
|
failures += test_good(schema, os.path.join(dir, "good"))
|
|
failures += test_bad(schema, os.path.join(dir, "bad"))
|
|
|
|
return failures
|
|
|
|
def test_validation(dir:str) -> int:
|
|
failures = 0
|
|
|
|
groups = (x for x in os.listdir(dir))
|
|
groups = (os.path.join(dir, x) for x in groups)
|
|
groups = (x for x in groups if os.path.isdir(x))
|
|
|
|
for name in sorted(groups):
|
|
path = os.path.join(src, name)
|
|
if not os.path.isdir(path):
|
|
continue
|
|
failures += validation_group(path)
|
|
|
|
return failures
|
|
|
|
|
|
if __name__ == "__main__":
|
|
failures = 0
|
|
|
|
failures += test_validation(os.path.join(src, "validation"))
|
|
exit(1 if failures else 0)
|