Synchronize the build scripts/config/docs with apache-airflow repo
diff --git a/README.md b/README.md
index 6ac3d23..dc8fd40 100644
--- a/README.md
+++ b/README.md
@@ -571,6 +571,14 @@
)
```
+* Run scheduler (or dag file processor you have setup with standalone dag file processor) for few parsing
+ loops (you can pass --num-runs parameter to it or keep it running in the background). The script relies
+ on example DAGs being serialized to the DB and this only
+ happens when scheduler runs with ``core/load_examples`` set to True.
+
+* Run webserver - reachable at the host/port for the test script you want to run. Make sure it had enough
+ time to initialize.
+
Run `python test_python_client.py` and you should see colored output showing attempts to connect and status.
diff --git a/pyproject.toml b/pyproject.toml
index a8a0e5b..59022ef 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -65,12 +65,11 @@
[tool.hatch.envs.test]
dependencies = [
"pytest-cov>=2.8.1",
- "python_dateutil >= 2.5.3",
"urllib3 >= 1.25.3",
]
[tool.hatch.envs.test.scripts]
-run-coverage = "pytest --cov-config=pyproject.toml --cov=airflow_client test --color=yes"
+run-coverage = "pytest test"
run = "run-coverage --no-cov"
[[tool.hatch.envs.test.matrix]]
@@ -103,3 +102,18 @@
include = [
"/airflow_client",
]
+
+[tool.pytest.ini_options]
+# make sure that pytest.ini is not read from pyproject.toml in paraent directories
+addopts = "--color=yes --cov-config=pyproject.toml --cov=airflow_client"
+norecursedirs = [
+]
+log_level = "INFO"
+filterwarnings = [
+]
+python_files = [
+ "test_*.py",
+]
+testpaths = [
+ "test",
+]
diff --git a/test_python_client.py b/test_python_client.py
index ea0f42d..d36f6d1 100644
--- a/test_python_client.py
+++ b/test_python_client.py
@@ -26,6 +26,7 @@
from __future__ import annotations
+import sys
import uuid
import airflow_client.client
@@ -124,5 +125,6 @@
if errors:
print("\n[red]There were errors while running the script - see above for details")
+ sys.exit(1)
else:
print("\n[green]Everything went well")