forked from airbnb/chronon
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig.yml
More file actions
119 lines (108 loc) · 3.9 KB
/
config.yml
File metadata and controls
119 lines (108 loc) · 3.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
version: 2.1
base_defaults: &base_defaults
resource_class: xlarge
working_directory: /home/chronon/workspace
executors:
docker_baseimg_executor:
resource_class: xlarge
working_directory: /home/chronon/workspace
docker:
- image: krisnaru/chronon:krish-test-6
jobs:
"Pull Docker Image":
<<: *base_defaults
docker:
- image: docker:17.05.0-ce-git
steps:
- setup_remote_docker:
docker_layer_caching: true
- checkout
- run:
name: Pull existing docker image
command: |
set +o pipefail
docker pull krisnaru/chronon:krish-test-6 || true
"Scala 12 -- Spark 3 Tests":
executor: docker_baseimg_executor
steps:
- checkout
- run:
name: Run Spark 3.1.1 tests
shell: /bin/bash -leuxo pipefail
command: |
bazel test --config scala_2.12 --config spark_3.1 //spark:test
"Scala 13 -- Tests":
executor: docker_baseimg_executor
steps:
- checkout
- run:
name: Run Scala 13 tests
shell: /bin/bash -leuxo pipefail
command: |
bazel test --config scala_2.13 --config spark_3.2 //spark:test
# run these separately as we need a isolated JVM to not have Spark session settings interfere with other runs
# long term goal is to refactor the current testing spark session builder and avoid adding new single test to CI
"Scala 13 -- Delta Lake Format Tests":
executor: docker_baseimg_executor
steps:
- checkout
- run:
name: Run Scala 13 tests for Delta Lake format
environment:
format_test: deltalake
shell: /bin/bash -leuxo pipefail
command: |
bazel test --config scala_2.13 --config spark_3.1 //spark:TableUtilsFormatTest
# "Chronon Python Lint":
# executor: docker_baseimg_executor
# steps:
# - checkout
# - run:
# name: Run Chronon Python lint
# shell: /bin/bash -leuxo pipefail
# command: |
# conda activate chronon_py
# cd /chronon/api/py/ai/chronon
# pip install importlib-metadata==4.11.4 #Install importlib-metadata < 5
# flake8 --extend-ignore=W605,Q000,F631,E203
"Chronon Python Tests":
executor: docker_baseimg_executor
steps:
- checkout
- run:
name: Run Chronon Python tests
shell: /bin/bash -leuxo pipefail
command: |
bazel test //api/py:api_test
# "Scalafmt Check":
# executor: docker_baseimg_executor
# steps:
# - checkout
# - run:
# name: Run ScalafmtCheck
# shell: /bin/bash -leuxo pipefail
# command: |
# conda activate chronon_py
# sbt +scalafmtCheck
workflows:
build_test_deploy:
jobs:
- "Pull Docker Image"
- "Scala 12 -- Spark 3 Tests":
requires:
- "Pull Docker Image"
- "Scala 13 -- Tests":
requires:
- "Pull Docker Image"
- "Scala 13 -- Delta Lake Format Tests":
requires:
- "Pull Docker Image"
# - "Scalafmt Check":
# requires:
# - "Pull Docker Image"
- "Chronon Python Tests":
requires:
- "Pull Docker Image"
# - "Chronon Python Lint":
# requires:
# - "Pull Docker Image"