[Tests] Upload custom test artifacts (#572)
* make_reports * add test utils * style * style
This commit is contained in:
parent
2a8477de5c
commit
0a2c42f3e2
|
@ -41,4 +41,15 @@ jobs:
|
|||
|
||||
- name: Run all non-slow selected tests on CPU
|
||||
run: |
|
||||
python -m pytest -n 2 --max-worker-restart=0 --dist=loadfile -s tests/
|
||||
python -m pytest -n 2 --max-worker-restart=0 --dist=loadfile -s -v --make-reports=tests_torch_cpu tests/
|
||||
|
||||
- name: Failure short reports
|
||||
if: ${{ failure() }}
|
||||
run: cat reports/tests_torch_cpu_failures_short.txt
|
||||
|
||||
- name: Test suite reports artifacts
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: pr_torch_test_reports
|
||||
path: reports
|
||||
|
|
|
@ -49,4 +49,15 @@ jobs:
|
|||
env:
|
||||
HUGGING_FACE_HUB_TOKEN: ${{ secrets.HUGGING_FACE_HUB_TOKEN }}
|
||||
run: |
|
||||
python -m pytest -n 1 --max-worker-restart=0 --dist=loadfile -s tests/
|
||||
python -m pytest -n 1 --max-worker-restart=0 --dist=loadfile -s -v --make-reports=tests_torch_gpu tests/
|
||||
|
||||
- name: Failure short reports
|
||||
if: ${{ failure() }}
|
||||
run: cat reports/tests_torch_gpu_failures_short.txt
|
||||
|
||||
- name: Test suite reports artifacts
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: push_torch_test_reports
|
||||
path: reports
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import os
|
||||
import random
|
||||
import re
|
||||
import unittest
|
||||
from distutils.util import strtobool
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
import torch
|
||||
|
@ -92,3 +94,157 @@ def load_image(image: Union[str, PIL.Image.Image]) -> PIL.Image.Image:
|
|||
image = PIL.ImageOps.exif_transpose(image)
|
||||
image = image.convert("RGB")
|
||||
return image
|
||||
|
||||
|
||||
# --- pytest conf functions --- #
|
||||
|
||||
# to avoid multiple invocation from tests/conftest.py and examples/conftest.py - make sure it's called only once
|
||||
pytest_opt_registered = {}
|
||||
|
||||
|
||||
def pytest_addoption_shared(parser):
|
||||
"""
|
||||
This function is to be called from `conftest.py` via `pytest_addoption` wrapper that has to be defined there.
|
||||
|
||||
It allows loading both `conftest.py` files at once without causing a failure due to adding the same `pytest`
|
||||
option.
|
||||
|
||||
"""
|
||||
option = "--make-reports"
|
||||
if option not in pytest_opt_registered:
|
||||
parser.addoption(
|
||||
option,
|
||||
action="store",
|
||||
default=False,
|
||||
help="generate report files. The value of this option is used as a prefix to report names",
|
||||
)
|
||||
pytest_opt_registered[option] = 1
|
||||
|
||||
|
||||
def pytest_terminal_summary_main(tr, id):
|
||||
"""
|
||||
Generate multiple reports at the end of test suite run - each report goes into a dedicated file in the current
|
||||
directory. The report files are prefixed with the test suite name.
|
||||
|
||||
This function emulates --duration and -rA pytest arguments.
|
||||
|
||||
This function is to be called from `conftest.py` via `pytest_terminal_summary` wrapper that has to be defined
|
||||
there.
|
||||
|
||||
Args:
|
||||
- tr: `terminalreporter` passed from `conftest.py`
|
||||
- id: unique id like `tests` or `examples` that will be incorporated into the final reports filenames - this is
|
||||
needed as some jobs have multiple runs of pytest, so we can't have them overwrite each other.
|
||||
|
||||
NB: this functions taps into a private _pytest API and while unlikely, it could break should
|
||||
pytest do internal changes - also it calls default internal methods of terminalreporter which
|
||||
can be hijacked by various `pytest-` plugins and interfere.
|
||||
|
||||
"""
|
||||
from _pytest.config import create_terminal_writer
|
||||
|
||||
if not len(id):
|
||||
id = "tests"
|
||||
|
||||
config = tr.config
|
||||
orig_writer = config.get_terminal_writer()
|
||||
orig_tbstyle = config.option.tbstyle
|
||||
orig_reportchars = tr.reportchars
|
||||
|
||||
dir = "reports"
|
||||
Path(dir).mkdir(parents=True, exist_ok=True)
|
||||
report_files = {
|
||||
k: f"{dir}/{id}_{k}.txt"
|
||||
for k in [
|
||||
"durations",
|
||||
"errors",
|
||||
"failures_long",
|
||||
"failures_short",
|
||||
"failures_line",
|
||||
"passes",
|
||||
"stats",
|
||||
"summary_short",
|
||||
"warnings",
|
||||
]
|
||||
}
|
||||
|
||||
# custom durations report
|
||||
# note: there is no need to call pytest --durations=XX to get this separate report
|
||||
# adapted from https://github.com/pytest-dev/pytest/blob/897f151e/src/_pytest/runner.py#L66
|
||||
dlist = []
|
||||
for replist in tr.stats.values():
|
||||
for rep in replist:
|
||||
if hasattr(rep, "duration"):
|
||||
dlist.append(rep)
|
||||
if dlist:
|
||||
dlist.sort(key=lambda x: x.duration, reverse=True)
|
||||
with open(report_files["durations"], "w") as f:
|
||||
durations_min = 0.05 # sec
|
||||
f.write("slowest durations\n")
|
||||
for i, rep in enumerate(dlist):
|
||||
if rep.duration < durations_min:
|
||||
f.write(f"{len(dlist)-i} durations < {durations_min} secs were omitted")
|
||||
break
|
||||
f.write(f"{rep.duration:02.2f}s {rep.when:<8} {rep.nodeid}\n")
|
||||
|
||||
def summary_failures_short(tr):
|
||||
# expecting that the reports were --tb=long (default) so we chop them off here to the last frame
|
||||
reports = tr.getreports("failed")
|
||||
if not reports:
|
||||
return
|
||||
tr.write_sep("=", "FAILURES SHORT STACK")
|
||||
for rep in reports:
|
||||
msg = tr._getfailureheadline(rep)
|
||||
tr.write_sep("_", msg, red=True, bold=True)
|
||||
# chop off the optional leading extra frames, leaving only the last one
|
||||
longrepr = re.sub(r".*_ _ _ (_ ){10,}_ _ ", "", rep.longreprtext, 0, re.M | re.S)
|
||||
tr._tw.line(longrepr)
|
||||
# note: not printing out any rep.sections to keep the report short
|
||||
|
||||
# use ready-made report funcs, we are just hijacking the filehandle to log to a dedicated file each
|
||||
# adapted from https://github.com/pytest-dev/pytest/blob/897f151e/src/_pytest/terminal.py#L814
|
||||
# note: some pytest plugins may interfere by hijacking the default `terminalreporter` (e.g.
|
||||
# pytest-instafail does that)
|
||||
|
||||
# report failures with line/short/long styles
|
||||
config.option.tbstyle = "auto" # full tb
|
||||
with open(report_files["failures_long"], "w") as f:
|
||||
tr._tw = create_terminal_writer(config, f)
|
||||
tr.summary_failures()
|
||||
|
||||
# config.option.tbstyle = "short" # short tb
|
||||
with open(report_files["failures_short"], "w") as f:
|
||||
tr._tw = create_terminal_writer(config, f)
|
||||
summary_failures_short(tr)
|
||||
|
||||
config.option.tbstyle = "line" # one line per error
|
||||
with open(report_files["failures_line"], "w") as f:
|
||||
tr._tw = create_terminal_writer(config, f)
|
||||
tr.summary_failures()
|
||||
|
||||
with open(report_files["errors"], "w") as f:
|
||||
tr._tw = create_terminal_writer(config, f)
|
||||
tr.summary_errors()
|
||||
|
||||
with open(report_files["warnings"], "w") as f:
|
||||
tr._tw = create_terminal_writer(config, f)
|
||||
tr.summary_warnings() # normal warnings
|
||||
tr.summary_warnings() # final warnings
|
||||
|
||||
tr.reportchars = "wPpsxXEf" # emulate -rA (used in summary_passes() and short_test_summary())
|
||||
with open(report_files["passes"], "w") as f:
|
||||
tr._tw = create_terminal_writer(config, f)
|
||||
tr.summary_passes()
|
||||
|
||||
with open(report_files["summary_short"], "w") as f:
|
||||
tr._tw = create_terminal_writer(config, f)
|
||||
tr.short_test_summary()
|
||||
|
||||
with open(report_files["stats"], "w") as f:
|
||||
tr._tw = create_terminal_writer(config, f)
|
||||
tr.summary_stats()
|
||||
|
||||
# restore:
|
||||
tr._tw = orig_writer
|
||||
tr.reportchars = orig_reportchars
|
||||
config.option.tbstyle = orig_tbstyle
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
# Copyright 2022 The HuggingFace Team. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# tests directory-specific settings - this file is run automatically
|
||||
# by pytest before any tests are run
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
from os.path import abspath, dirname, join
|
||||
|
||||
|
||||
# allow having multiple repository checkouts and not needing to remember to rerun
|
||||
# 'pip install -e .[dev]' when switching between checkouts and running tests.
|
||||
git_repo_path = abspath(join(dirname(dirname(__file__)), "src"))
|
||||
sys.path.insert(1, git_repo_path)
|
||||
|
||||
# silence FutureWarning warnings in tests since often we can't act on them until
|
||||
# they become normal warnings - i.e. the tests still need to test the current functionality
|
||||
warnings.simplefilter(action="ignore", category=FutureWarning)
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
from diffusers.testing_utils import pytest_addoption_shared
|
||||
|
||||
pytest_addoption_shared(parser)
|
||||
|
||||
|
||||
def pytest_terminal_summary(terminalreporter):
|
||||
from diffusers.testing_utils import pytest_terminal_summary_main
|
||||
|
||||
make_reports = terminalreporter.config.getoption("--make-reports")
|
||||
if make_reports:
|
||||
pytest_terminal_summary_main(terminalreporter, id=make_reports)
|
Loading…
Reference in New Issue