forked from TobikoData/sqlmesh
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Makefile
182 lines (121 loc) · 3.87 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
.PHONY: docs
install-dev:
pip3 install -e ".[dev,web,slack]"
install-cicd-test:
pip3 install -e ".[dev,web,slack,cicdtest]"
install-doc:
pip3 install -r ./docs/requirements.txt
install-engine-test:
pip3 install -e ".[dev,web,slack,mysql,postgres,databricks,redshift,bigquery,snowflake,trino,mssql,motherduck]"
install-pre-commit:
pre-commit install
style:
pre-commit run --all-files
py-style:
SKIP=prettier,eslint pre-commit run --all-files
ui-style:
SKIP=autoflake,isort,black,mypy pre-commit run --all-files
doc-test:
PYTEST_PLUGINS=tests.common_fixtures pytest --doctest-modules sqlmesh/core sqlmesh/utils
package:
pip3 install wheel && python3 setup.py sdist bdist_wheel
publish: package
pip3 install twine && python3 -m twine upload dist/*
package-tests:
pip3 install wheel && python3 tests/setup.py sdist bdist_wheel
publish-tests: package-tests
pip3 install twine && python3 -m twine upload -r tobiko-private tests/dist/*
develop:
python3 setup.py develop
airflow-init:
export AIRFLOW_ENGINE_OPERATOR=spark && make -C ./examples/airflow init
airflow-run:
make -C ./examples/airflow run
airflow-stop:
make -C ./examples/airflow stop
airflow-clean:
make -C ./examples/airflow clean
airflow-psql:
make -C ./examples/airflow psql
airflow-spark-sql:
make -C ./examples/airflow spark-sql
docs-serve:
mkdocs serve
api-docs:
python pdoc/cli.py -o docs/_readthedocs/html/
api-docs-serve:
python pdoc/cli.py
ui-up:
docker-compose up --build -d && $(if $(shell which open), open http://localhost:8001, echo "Open http://localhost:8001 in your browser.")
ui-down:
docker-compose down
ui-build:
docker-compose -f docker-compose.yml -f docker-compose.build.yml run app
clean-build:
rm -rf build/ && rm -rf dist/ && rm -rf *.egg-info
dev-publish: ui-build clean-build publish
jupyter-example:
jupyter lab tests/slows/jupyter/example_outputs.ipynb
engine-up:
docker-compose -f ./tests/core/engine_adapter/docker-compose.yaml up -d
engine-down:
docker-compose -f ./tests/core/engine_adapter/docker-compose.yaml down
fast-test:
pytest -n auto -m "fast and not cicdonly"
slow-test:
pytest -n auto -m "(fast or slow) and not cicdonly"
cicd-test:
pytest -n auto -m "fast or slow"
core-fast-test:
pytest -n auto -m "fast and not web and not github and not dbt and not airflow and not jupyter"
core-slow-test:
pytest -n auto -m "(fast or slow) and not web and not github and not dbt and not airflow and not jupyter"
airflow-fast-test:
pytest -n auto -m "fast and airflow"
airflow-test:
pytest -n auto -m "(fast or slow) and airflow"
airflow-local-test:
export AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@localhost/airflow && \
pytest -n 1 -m "docker and airflow"
airflow-docker-test:
make -C ./examples/airflow docker-test
airflow-local-test-with-env: develop airflow-clean airflow-init airflow-run airflow-local-test airflow-stop
airflow-docker-test-with-env: develop airflow-clean airflow-init airflow-run airflow-docker-test airflow-stop
engine-slow-test:
pytest -n auto -m "(fast or slow) and engine"
engine-docker-test:
pytest -n auto -m "docker and engine"
engine-remote-test:
pytest -n auto -m "remote and engine"
engine-test:
pytest -n auto -m "engine"
dbt-test:
pytest -n auto -m "dbt and not cicdonly"
github-test:
pytest -n auto -m "github"
jupyter-test:
pytest -n auto -m "jupyter"
web-test:
pytest -n auto -m "web"
bigquery-test:
pytest -n auto -m "bigquery"
databricks-test:
pytest -n auto -m "databricks"
duckdb-test:
pytest -n auto -m "duckdb"
mssql-test:
pytest -n auto -m "mssql"
mysql-test:
pytest -n auto -m "mysql"
postgres-test:
pytest -n auto -m "postgres"
redshift-test:
pytest -n auto -m "redshift"
snowflake-test:
pytest -n auto -m "snowflake"
spark-test:
pytest -n auto -m "spark"
spark-pyspark-test:
pytest -n auto -m "spark_pyspark"
trino-test:
pytest -n auto -m "trino or trino_iceberg or trino_delta"