-
Notifications
You must be signed in to change notification settings - Fork 379
/
evergreen.yml
5960 lines (5485 loc) · 194 KB
/
evergreen.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#
# This file defines the tasks and platforms for WiredTiger in the
# MongoDB continuous integration system (https://evergreen.mongodb.com).
#
#######################################
# Project Settings #
#######################################
stepback: true
pre:
- func: "cleanup"
- func: "setup environment"
post:
- func: "dump stacktraces"
- func: "upload stacktraces"
- func: "dump stderr/stdout"
- func: "upload artifact"
vars:
postfix: -${execution}
- func: "save wt hang analyzer core/debugger files"
- func: "cleanup"
timeout:
- func: "run wt hang analyzer"
exec_timeout_secs: 21600 # 6 hrs
#######################################
# Functions #
#######################################
functions:
"setup environment":
- command: expansions.update
type: setup
params:
updates:
# The expansion is used for each task that runs a WiredTiger test. The expansions are
# created before each task and is meant to be used at the start each task. All of these
# variables are common among the build variants, if there are any specific variables that
# needs to be set, users can add onto the additional_env_vars in the variant.
- key: PREPARE_TEST_ENV
value: |
export WT_TOPDIR=$(git rev-parse --show-toplevel)
export WT_BUILDDIR=$WT_TOPDIR/cmake_build
if [ "$OS" = "Windows_NT" ]; then
export PATH=/cygdrive/c/python/Python311:/cygdrive/c/python/Python311/Scripts:$PATH
export PYTHONPATH="($WT_TOPDIR/lang/python/wiredtiger):$(cygpath -w $WT_TOPDIR/lang/python)"
else
export PATH=/opt/mongodbtoolchain/v4/bin:$PATH
export LD_LIBRARY_PATH=$WT_BUILDDIR
fi
# Utilize tcmalloc through preloading in the environment.
#
if [[ ${ENABLE_TCMALLOC|0} -eq 1 ]]; then
# ASan is not designed to work with tcmalloc:
# https://github.com/gperftools/gperftools/issues/810#issuecomment-230168100
#
if [[ "${CMAKE_BUILD_TYPE|}" =~ ASan ]]; then
echo "Do not enable tcmalloc for ASan builds."
exit 1
fi
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$WT_TOPDIR/TCMALLOC_LIB/lib"
export LD_PRELOAD=$WT_TOPDIR/TCMALLOC_LIB/lib/libtcmalloc.so
fi
# Create the common sanitizer options and export the specific sanitizer environment
# variables.
COMMON_SAN_OPTIONS="abort_on_error=1:disable_coredump=0"
if [[ "${CMAKE_BUILD_TYPE|}" =~ ASan ]]; then
export ASAN_OPTIONS="$COMMON_SAN_OPTIONS:unmap_shadow_on_exit=1"
export TESTUTIL_BYPASS_ASAN=1
elif [[ "${CMAKE_BUILD_TYPE|}" =~ MSan ]]; then
export MSAN_OPTIONS="$COMMON_SAN_OPTIONS:verbosity=3"
export TESTUTIL_MSAN=1
elif [[ "${CMAKE_BUILD_TYPE|}" =~ TSan ]]; then
export TSAN_OPTIONS="$COMMON_SAN_OPTIONS:verbosity=3"
elif [[ "${CMAKE_BUILD_TYPE|}" =~ UBSan ]]; then
export UBSAN_OPTIONS="$COMMON_SAN_OPTIONS:print_stacktrace=1"
fi
${additional_env_vars}
# The expansion is used for any task that requires the mongodbtoolchain binaries.
- key: PREPARE_PATH
value: |
if [ "$OS" = "Windows_NT" ]; then
export PATH=/cygdrive/c/python/Python311:/cygdrive/c/python/Python311/Scripts:$PATH
else
export PATH=/opt/mongodbtoolchain/v4/bin:$PATH
fi
# Since Bazel (currently used in SCons) uses EngFlow's remote execution system instead of icecream,
# additional credentials need to be setup to maintain efficient compilation speed.
"get engflow creds": &get_engflow_creds
- command: s3.get
display_name: "get engflow key"
params:
aws_key: ${engflow_key}
aws_secret: ${engflow_secret}
remote_file: engflow/engflow.key
bucket: serverengflow
local_file: "mongo/engflow.key"
- command: s3.get
display_name: "get engflow cert"
params:
aws_key: ${engflow_key}
aws_secret: ${engflow_secret}
remote_file: engflow/engflow.cert
bucket: serverengflow
local_file: "mongo/engflow.cert"
- command: shell.exec
params:
display_name: "generate evergreen engflow bazelrc"
shell: bash
working_dir: mongo
script: |
set -o errexit
set -o verbose
# Pulled from evergreen/generate_evergreen_engflow_bazelrc.sh
# FIXME-SERVER-86966: consider consolidating once prelude.sh is runnable in the perf project.
source ./evergreen/bazel_RBE_supported.sh
if bazel_rbe_supported; then
uri="https://spruce.mongodb.com/task/${task_id}?execution=${execution}"
echo "build --tls_client_certificate=./engflow.cert" > .bazelrc.evergreen_engflow_creds
echo "build --tls_client_key=./engflow.key" >> .bazelrc.evergreen_engflow_creds
echo "build --bes_keywords=engflow:CiCdPipelineName=${build_variant}" >> .bazelrc.evergreen_engflow_creds
echo "build --bes_keywords=engflow:CiCdJobName=${task_name}" >> .bazelrc.evergreen_engflow_creds
echo "build --bes_keywords=engflow:CiCdUri=$uri" >> .bazelrc.evergreen_engflow_creds
echo "build --bes_keywords=evg:project=${project}" >> .bazelrc.evergreen_engflow_creds
echo "build --workspace_status_command=./evergreen/engflow_workspace_status.sh" >> .bazelrc.evergreen_engflow_creds
fi
"get project":
command: git.get_project
type: setup
params:
directory: wiredtiger
"fetch artifacts": &fetch_artifacts
command: s3.get
type: setup
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: wiredtiger/${build_variant}/${revision}/artifacts/${dependent_task|compile}_${build_id}.tgz
bucket: build_external
extract_to: ${destination|wiredtiger}
"fetch endian format artifacts" :
- command: s3.get
type: setup
params:
aws_key: ${aws_key}
aws_secret: ${aws_secret}
remote_file: wiredtiger/${endian_format}/${revision}/artifacts/${remote_file}.tgz
bucket: build_external
extract_to: wiredtiger/cmake_build/test/format
"fetch mongo-tests repo" :
command: shell.exec
type: setup
params:
shell: bash
script: |
set -o errexit
set -o verbose
git clone https://github.com/wiredtiger/mongo-tests
"fetch mongo repo" :
command: shell.exec
type: setup
params:
shell: bash
script: |
set -o errexit
set -o verbose
mongo_repo=https://github.com/mongodb/mongo
branch=${branch_name}
if [[ $branch =~ "mongodb-" ]]; then
mongo_branch=v$(echo $branch | cut -d'-' -f 2)
else
mongo_branch=master
fi
git clone $mongo_repo -b $mongo_branch
"import wiredtiger into mongo" :
command: shell.exec
type: setup
params:
shell: bash
script: |
set -o errexit
set -o verbose
cp -a wiredtiger mongo/src/third_party/
"compile mongodb" :
command: shell.exec
params:
shell: bash
working_dir: "mongo"
script: |
set -o errexit
set -o verbose
${PREPARE_PATH}
virtualenv -p python3 venv
source venv/bin/activate
python3 -m pip install 'poetry==1.5.1'
python3 -m poetry install --no-root --sync
./buildscripts/scons.py --variables-files=etc/scons/mongodbtoolchain_stable_gcc.vars --link-model=dynamic --ninja generate-ninja ICECC=icecc CCACHE=ccache
ninja -j$(nproc --all) install-mongod
"configure wiredtiger": &configure_wiredtiger
command: shell.exec
params:
working_dir: "wiredtiger"
shell: bash
script: |
set -o errexit
${PREPARE_PATH}
# Not setting verbose mode as we have sensitive keys that could be logged.
# Define common config flags for the tasks to make it cleaner when configuring the tasks.
# Note that the config flags are resolved prior to changing to cmake_build directory.
DEFINED_EVERGREEN_CONFIG_FLAGS="${CMAKE_BUILD_TYPE|} \
${CMAKE_INSTALL_PREFIX|-DCMAKE_INSTALL_PREFIX=$(pwd)/cmake_build/LOCAL_INSTALL} \
${CMAKE_TOOLCHAIN_FILE|-DCMAKE_TOOLCHAIN_FILE=../cmake/toolchains/mongodbtoolchain_stable_gcc.cmake} \
${ENABLE_LAZYFS|} \
${NONSTANDALONE|} \
${ENABLE_SHARED|} \
${ENABLE_STATIC|} \
${HAVE_BUILTIN_EXTENSION_LZ4|} \
${HAVE_BUILTIN_EXTENSION_SNAPPY|} \
${HAVE_BUILTIN_EXTENSION_ZLIB|} \
${HAVE_BUILTIN_EXTENSION_ZSTD|} \
${HAVE_UNITTEST} \
${CODE_COVERAGE_FLAGS} \
${NON_BARRIER_DIAGNOSTIC_YIELDS|} \
${HAVE_DIAGNOSTIC|} \
${GNU_C_VERSION|} \
${GNU_CXX_VERSION|} \
${CLANG_C_VERSION|} \
${CLANG_CXX_VERSION|} \
${ENABLE_AZURE|} \
${ENABLE_CPPSUITE|} \
${ENABLE_GCP|} \
${ENABLE_S3|} \
${IMPORT_AZURE_SDK|} \
${IMPORT_GCP_SDK|} \
${IMPORT_S3_SDK|} \
${SPINLOCK_TYPE|} \
${ENABLE_COLORIZE_OUTPUT|-DENABLE_COLORIZE_OUTPUT=0} \
${CC_OPTIMIZE_LEVEL|}"
# The RHEL PPC platform does not have ZSTD. Strip it out.
if [ "${build_variant|}" = "rhel8-ppc" ] && [[ "$DEFINED_EVERGREEN_CONFIG_FLAGS" =~ (\-DHAVE_BUILTIN_EXTENSION_ZSTD=1) ]]; then
DEFINED_EVERGREEN_CONFIG_FLAGS=${DEFINED_EVERGREEN_CONFIG_FLAGS/\-DHAVE_BUILTIN_EXTENSION_ZSTD=1/}
fi
if [[ "${build_variant|}" =~ "macos-1300" ]]; then
# For mac builds, we want explicitly tell cmake which python to use, as
# well as the matching library directory and header files. The find_libpython
# module gives us the library.
SYSPY=${python_binary}
$SYSPY -mvenv venv
source venv/bin/activate
pip3 install find_libpython
SYSPYLIB=`find_libpython`
SYSPYINCDEF=
# We have the shared library to link to, it may be named simply 'Python3' or 'Python'.
# If that's the case, use the associated dylib symlink found in an expected relative
# location. Also get the location of the header files. We'll give this all to cmake.
base=$(basename $SYSPYLIB)
if [ "$base" = 'Python3' -o "$base" = 'Python' ]; then
SYSPYDIR=$(dirname $SYSPYLIB)
NSYSPYLIB=$(ls $SYSPYDIR/lib/libpython*.dylib 2>/dev/null | head -1)
if [ -f "$NSYSPYLIB" ]; then
SYSPYLIB=$NSYSPYLIB
fi
if [ -d "$SYSPYDIR/Headers" ]; then
SYSPYINCDEF="$SYSPYDIR/Headers"
fi
fi
if [ "${build_variant|}" = "macos-1300-arm64" ]; then
DEFINED_EVERGREEN_CONFIG_FLAGS="$DEFINED_EVERGREEN_CONFIG_FLAGS -DPython3_EXECUTABLE=$SYSPY -DPython3_LIBRARY=$SYSPYLIB -DPython3_INCLUDE_DIR=$SYSPYINCDEF"
else
DEFINED_EVERGREEN_CONFIG_FLAGS="$DEFINED_EVERGREEN_CONFIG_FLAGS -DPYTHON_EXECUTABLE:FILEPATH=$SYSPY -DPYTHON_LIBRARY=$SYSPYLIB -DPYTHON_INCLUDE_DIR=$SYSPYINCDEF"
fi
fi
if [ "$OS" = "Windows_NT" ]; then
# Use the Windows powershell script to configure the CMake build.
# We execute it in a powershell environment as its easier to detect and source the Visual Studio
# toolchain in a native Windows environment. We can't easily execute the build in a cygwin environment.
echo "Using config flags $DEFINED_EVERGREEN_CONFIG_FLAGS ${windows_configure_flags|}"
powershell.exe -NonInteractive '.\test\evergreen\build_windows.ps1' -configure 1 $DEFINED_EVERGREEN_CONFIG_FLAGS ${windows_configure_flags|}
else
echo "Using config flags $DEFINED_EVERGREEN_CONFIG_FLAGS ${posix_configure_flags|}"
# Fetch the gperftools library if needed. This will also get tcmalloc.
if [[ ${ENABLE_TCMALLOC|0} -eq 1 ]] && [ ! -d "./automation-scripts" ]; then
is_cmake_build=true
git clone git@github.com:wiredtiger/automation-scripts.git
. automation-scripts/evergreen/find_gperftools.sh ${s3_access_key} ${s3_secret_key} ${build_variant} $is_cmake_build
fi
# Compiling with CMake.
echo "Find CMake"
. test/evergreen/find_cmake.sh
# If we've fetched the wiredtiger artifact from a previous compilation/build, it's best to remove
# the previous build directory so we can create a fresh configuration. We can't use the the previous
# CMake Cache configuration as its likely it will have absolute paths related to the previous build machine.
echo "Remove the cmake_build directory, if it already exists"
if [ -d cmake_build ]; then rm -r cmake_build; fi
echo "Create a new cmake_build directory"
mkdir -p cmake_build
cd cmake_build
echo "Call CMake"
$CMAKE $DEFINED_EVERGREEN_CONFIG_FLAGS ${posix_configure_flags|} -G "${cmake_generator|Ninja}" ./..
echo "Completed CMake"
fi
"python config check":
command: shell.exec
type: setup
params:
working_dir: "wiredtiger/cmake_build"
shell: bash
script: |
set -o errexit
# Confirm that the Python binary matches the version of that configured by CMake.
${python_binary|python3} ../test/evergreen/python_version_check.py -v -c ./CMakeCache.txt -s ${python_config_search_string}
"make wiredtiger": &make_wiredtiger
command: shell.exec
params:
working_dir: "wiredtiger"
shell: bash
script: |
set -o errexit
set -o verbose
echo "Starting 'make wiredtiger' step"
${PREPARE_PATH}
if [ "$OS" = "Windows_NT" ]; then
# Use the Windows powershell script to execute Ninja build (can't execute directly in a cygwin environment).
powershell.exe '.\test\evergreen\build_windows.ps1 -build 1'
else
# Compiling with CMake generated Ninja file.
cd cmake_build
if [ "${cmake_generator|Ninja}" = "Ninja" ]; then
ninja -j ${num_jobs} 2>&1
else
make -j ${num_jobs} 2>&1
fi
fi
echo "Ending 'make wiredtiger' step"
"compile wiredtiger":
- *configure_wiredtiger
- *make_wiredtiger
"dump stacktraces": &dump_stacktraces
command: shell.exec
params:
working_dir: "wiredtiger/cmake_build"
shell: bash
script: |
set -o errexit
set -o verbose
${python_binary|python3} ../test/evergreen/print_stack_trace.py
"upload stacktraces": &upload_stacktraces
command: s3.put
type: setup
params:
aws_secret: ${aws_secret}
aws_key: ${aws_key}
local_files_include_filter:
- wiredtiger/cmake_build/*stacktrace.txt
bucket: build_external
permissions: public-read
content_type: text/plain
remote_file: wiredtiger/${build_variant}/${revision}/artifacts/${task_name}_${build_id}/
"run data validation stress test checkpoint":
- *fetch_artifacts
- command: shell.exec
params:
working_dir: "wiredtiger/cmake_build/test/checkpoint"
shell: bash
script: |
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
../../../tools/run_parallel.sh 'nice ./recovery-test.sh "${data_validation_stress_test_args} ${run_test_checkpoint_args}" WT_TEST.$t test_checkpoint' 120
"run tiered storage test":
- command: shell.exec
params:
working_dir: "wiredtiger/cmake_build"
shell: bash
include_expansions_in_env:
- aws_sdk_s3_ext_access_key
- aws_sdk_s3_ext_secret_key
script: |
set -o errexit
${PREPARE_TEST_ENV}
# Set the Azure credentials using config variable.
export AZURE_STORAGE_CONNECTION_STRING="${azure_sdk_ext_access_key}"
# GCP requires a path to a credentials file for authorization. To not expose the private
# information within the file, we use a placeholder private variable which are replaced
# in the command line with the evergreen expansion variables and stored in a temporary
# file.
file=$(mktemp --suffix ".json")
# Use '|' as the delimiter instead of default behaviour because the private key contains
# slash characters.
sed -e 's|gcp_project_id|${gcp_sdk_ext_project_id}|' \
-e 's|gcp_private_key|'"${gcp_sdk_ext_private_key}"'|' \
-e 's|gcp_private_id|${gcp_sdk_ext_private_key_id}|' \
-e 's|gcp_client_email|${gcp_sdk_ext_client_email}|' \
-e 's|gcp_client_id|${gcp_sdk_ext_client_id}|' \
-e 's|gcp_client_x509_cert_url|${gcp_sdk_ext_client_x509_cert_url}|' ../test/evergreen/gcp_auth.json > $file
export GOOGLE_APPLICATION_CREDENTIALS="$file"
virtualenv -p python3 venv
source venv/bin/activate
pip3 install boto3
pip3 install azure-storage-blob
pip3 install google-cloud-storage
# Run Python testing for all tiered tests.
python3 ../test/suite/run.py -j $(nproc) ${tiered_storage_test_name}
"compile wiredtiger docs":
- command: shell.exec
params:
working_dir: "wiredtiger"
shell: bash
script: |
set -o errexit
set -o verbose
# Check if specific branches are provided to the function through the expansion variable
# defined in the documentation-update build variant. If none are specified, use the
# current branch.
if [ -z ${doc_update_branches} ]; then
branches=$(git rev-parse --abbrev-ref HEAD)
else
branches=${doc_update_branches}
fi
# Because of Evergreen's expansion syntax, this is used to process each branch separately.
IFS=,
for branch in $branches; do
echo "Checking out branch $branch ..."
git checkout $branch
# Java API is removed in newer branches via WT-6675.
if [ $branch == "mongodb-4.2" ]; then
pushd build_posix
bash reconf
../configure CFLAGS="-DMIGHT_NOT_RUN -Wno-error" --enable-java --enable-python --enable-strict
(cd lang/python && make ../../../lang/python/wiredtiger_wrap.c)
(cd lang/java && make ../../../lang/java/wiredtiger_wrap.c)
elif [ $branch == "mongodb-5.0" ] || [ $branch == "mongodb-4.4" ]; then
pushd build_posix
bash reconf
../configure CFLAGS="-DMIGHT_NOT_RUN -Wno-error" --enable-python --enable-strict
(cd lang/python && make ../../../lang/python/wiredtiger_wrap.c)
else
. test/evergreen/find_cmake.sh
if [ -d cmake_build ]; then rm -r cmake_build; fi
mkdir -p cmake_build
pushd cmake_build
# Adding -DENABLE_PYTHON=1 -DENABLE_STRICT=1 as 6.0 does not default these like develop.
$CMAKE -DCMAKE_C_FLAGS="-DMIGHT_NOT_RUN -Wno-error" -DENABLE_PYTHON=1 -DENABLE_STRICT=1 ../.
make -C lang/python -j ${num_jobs}
fi
# Pop to root project directory.
popd
# Generate WiredTiger documentation.
(cd dist && bash s_docs && echo "The documentation for $branch was successfully generated.")
# Save generated documentation
mv docs docs-$branch
done
# Checkout the default ("develop") branch again to leave wiredtiger/ in the same state we started with
git checkout develop
"update wiredtiger docs":
- command: shell.exec
type: setup
params:
shell: bash
script: |
# Use a single function to update the documentation of each supported WiredTiger branch.
# This is useful as not all branches have a dedicated Evergreen project. Furthermore, the
# documentation-update task is not triggered by every commit. We rely on the activity of
# the develop branch to update the documentation of all supported branches.
set -o errexit
set -o verbose
if [[ "${branch_name}" != "develop" ]]; then
echo "We only run the documentation update task on the WiredTiger (develop) Evergreen project."
exit 0
fi
git clone git@github.com:wiredtiger/wiredtiger.github.com.git
cd wiredtiger.github.com
# Branches to update are defined through an expansion variable.
branches=${doc_update_branches}
# Go through each branch to stage the doc changes.
IFS=,
for branch in $branches; do
# Synchronize the generated documentation with the current one.
echo "Synchronizing documentation for branch $branch ..."
rsync -aq ../wiredtiger/docs-$branch/ $branch/ --delete
# Commit and push the changes if any.
if [[ $(git status "$branch" --porcelain) ]]; then
git add $branch
git commit -m "Update auto-generated docs for $branch" \
--author="doc-build-bot <svc-wiredtiger-doc-build@10gen.com>"
else
echo "No documentation changes for $branch."
fi
done
- command: shell.exec
type: setup
params:
shell: bash
silent: true
script: |
set -o errexit
# We could have exited the previous command for the same reason.
if [[ "${branch_name}" != "develop" ]]; then
echo "We only run the documentation update task on the WiredTiger (develop) Evergreen project."
exit 0
fi
# Push the above-generated commit
${PREPARE_PATH}
virtualenv -p python3 venv
source venv/bin/activate
python -m pip install PyGithub
export GITHUB_OWNER="wiredtiger"
export GITHUB_REPO="wiredtiger.github.com"
export GITHUB_APP_ID="${doc_update_github_app_id}"
export GITHUB_APP_PRIVATE_KEY="${doc_update_github_app_private_key}"
# Make sure the below script is called under the default ("develop") branch.
(cd wiredtiger && git checkout develop)
python wiredtiger/test/evergreen/doc_update.py
"make check directory":
command: shell.exec
params:
working_dir: "wiredtiger"
shell: bash
script: |
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
. test/evergreen/find_cmake.sh
cd cmake_build/${directory}
$CTEST ${extra_args} --output-on-failure 2>&1
"make check all":
command: shell.exec
params:
working_dir: "wiredtiger"
shell: bash
script: |
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
. test/evergreen/find_cmake.sh
cd cmake_build
echo "Using number of parallel processes '-j ${num_jobs}' for 'make check all'"
$CTEST -L check -j ${num_jobs} --output-on-failure ${extra_args|} 2>&1
# The following cppsuite tasks define a greater overall task.
"cppsuite test run": &cppsuite_test_run
command: shell.exec
params:
# The tests need to be executed in the cppsuite directory as some required libraries have
# their paths defined relative to this directory.
# The below script saves the exit code from the test to use it later in this function. By
# doing this we can define our own custom artifact upload task without it being cancelled by
# the test failing.
# Additionally if the test fails perf statistics won't be uploaded as they may be invalid
# due to the test failure.
working_dir: "wiredtiger/cmake_build/test/cppsuite"
shell: bash
script: |
set -o verbose
${PREPARE_TEST_ENV}
./run -t ${test_name} -C '${test_config}' -f ${test_config_filename} -l 2
exit_code=$?
echo "$exit_code" > cppsuite_exit_code
if [ "$exit_code" != 0 ]; then
echo "[{\"info\":{\"test_name\": \"${test_name}\"},\"metrics\": []}]" > ${test_name}.json
fi
exit 0
# The following cppsuite tasks define a greater overall task.
"cppsuite test run all": &cppsuite_test_run_all
command: shell.exec
params:
# The tests need to be executed in the cppsuite directory as some required libraries have
# their paths defined relative to this directory.
working_dir: "wiredtiger/cmake_build/test/cppsuite"
shell: bash
script: |
set -o verbose
${PREPARE_TEST_ENV}
./run -C '${test_config}' -l 2
# Delete unnecessary data from the upload.
"cppsuite test remove files": &cppsuite_remove_files
command: shell.exec
type: setup
params:
shell: bash
script: |
rm -rf wiredtiger/cmake_build/examples
rm -rf wiredtiger/cmake_build/bench
mv wiredtiger/cmake_build/test/cppsuite wiredtiger/cmake_build/
rm -rf wiredtiger/cmake_build/test/
mkdir wiredtiger/cmake_build/test/
mv wiredtiger/cmake_build/cppsuite wiredtiger/cmake_build/test/cppsuite
# Custom cppsuite archive tasks.
"cppsuite archive": &cppsuite_archive
command: archive.targz_pack
type: setup
params:
target: archive.tgz
source_dir: wiredtiger/cmake_build/
include:
- "./**"
# Custom cppsuite s3 artifact upload task.
"cppsuite s3 put": &cppsuite_s3_put
command: s3.put
type: setup
params:
aws_secret: ${aws_secret}
aws_key: ${aws_key}
local_file: archive.tgz
bucket: build_external
permissions: public-read
content_type: application/tar
display_name: cppsuite-test
remote_file: wiredtiger/${build_variant}/${revision}/artifacts/${task_name}_${build_id}${postfix|}.tgz
# FIXME-WT-8538 This task prevents us from saving the same artifacts to evergreen twice. It can be
# removed when we implement a generalised approach in WT-8538
"cppsuite remove dir": &cppsuite_remove_dir
command: shell.exec
params:
shell: bash
script: |
set -o verbose
if [ -f wiredtiger/cmake_build/test/cppsuite/cppsuite_exit_code ]; then
exit_code=`cat wiredtiger/cmake_build/test/cppsuite/cppsuite_exit_code`
else
exit_code=0
fi
rm -rf wiredtiger
exit "$exit_code"
# The cppsuite test per task function. Doesn't upload perf statistics to evergreen.
"cppsuite test":
- *cppsuite_test_run
# Since we later remove the WiredTiger folder, we need to check for core dumps now.
- *dump_stacktraces
- *upload_stacktraces
# Cleanup tasks.
- *cppsuite_remove_files
- *cppsuite_archive
- *cppsuite_s3_put
- *cppsuite_remove_dir
# This cppsuite test function uploads perf statistics and should only be used on perf variants.
"cppsuite perf test":
- *cppsuite_test_run
# Since we later remove the WiredTiger folder, we need to check for core dumps now.
- *dump_stacktraces
- *upload_stacktraces
- command: perf.send
type: setup
params:
file: ./wiredtiger/cmake_build/test/cppsuite/${test_name}.json
# Cleanup tasks.
- *cppsuite_remove_files
- *cppsuite_archive
- *cppsuite_s3_put
- *cppsuite_remove_dir
"wt2853_perf test":
command: shell.exec
params:
working_dir: "wiredtiger/cmake_build/bench/wt2853_perf"
shell: bash
script: |
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
./test_wt2853_perf ${wt2853_perf_args}
"csuite test":
command: shell.exec
params:
working_dir: "wiredtiger/cmake_build"
shell: bash
script: |
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
$(pwd)/test/csuite/${test_name}/test_${test_name} ${test_args|} 2>&1
"unit test":
command: shell.exec
params:
working_dir: "wiredtiger"
shell: bash
script: |
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
cd cmake_build
threads_command=""
if [[ -n "${num_jobs}" ]]; then
echo "Using num_jobs '-j ${num_jobs}' for 'unit test'"
threads_command="-j ${num_jobs}"
fi
if [ ${check_coverage|false} = true ]; then
${python_binary|python3} ../test/suite/run.py ${unit_test_args|-v 2} ${unit_test_variant_args} $threads_command 2>&1 || echo "Ignoring failed test as we are checking test coverage"
else
${python_binary|python3} ../test/suite/run.py ${unit_test_args|-v 2} ${unit_test_variant_args} $threads_command 2>&1
fi
"code coverage analysis":
command: shell.exec
params:
working_dir: ${working_dir}
shell: bash
script: |
set -o errexit
set -o verbose
echo "Performing code coverage analysis in ${working_dir}"
${PREPARE_PATH}
virtualenv -p python3 venv
source venv/bin/activate
pip3 install lxml==4.8.0 Pygments==2.11.2 Jinja2==3.0.3 gcovr==5.0
mkdir -p coverage_report
GCOV=/opt/mongodbtoolchain/v4/bin/gcov gcovr -f src -j 4 --html-self-contained --html-details coverage_report/2_coverage_report.html --json-summary-pretty --json-summary coverage_report/1_coverage_report_summary.json --json coverage_report/full_coverage_report.json
${python_binary|python3} test/evergreen/code_coverage_analysis.py -s coverage_report/1_coverage_report_summary.json -t time.txt
# Generate Atlas compatible format report.
if [ ! -z ${generate_atlas_format} ]; then
${python_binary|python3} test/evergreen/code_coverage_analysis.py -c component_coverage -o coverage_report/atlas_out_code_coverage.json -s coverage_report/1_coverage_report_summary.json -t time.txt
fi
"code coverage publish report":
command: s3.put
type: setup
params:
aws_secret: ${aws_secret}
aws_key: ${aws_key}
local_files_include_filter: wiredtiger/coverage_report/*
bucket: build_external
permissions: public-read
content_type: text/html
remote_file: wiredtiger/${build_variant}/${revision}/${task_name}_${build_id}-${execution}/
"code coverage publish main page":
command: s3.put
type: setup
params:
aws_secret: ${aws_secret}
aws_key: ${aws_key}
local_file: wiredtiger/coverage_report/2_coverage_report.html
bucket: build_external
permissions: public-read
content_type: text/html
display_name: "Coverage report main page"
remote_file: wiredtiger/${build_variant}/${revision}/${task_name}_${build_id}-${execution}/1_coverage_report_main.html
"format test":
command: shell.exec
params:
working_dir: "wiredtiger/cmake_build/test/format"
shell: bash
script: |
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
# Fail, show the configuration file.
fail() {
echo "======= FAILURE =========="
[ -f RUNDIR/CONFIG ] && cat RUNDIR/CONFIG
exit 1
}
for i in $(seq ${times|1}); do
./t -c ${config|../../../test/format/CONFIG.stress} ${trace_args|-T bulk,txn,retain=50} ${extra_args|} || fail
done
"format test predictable":
command: shell.exec
params:
working_dir: "wiredtiger/cmake_build/test/format"
shell: bash
script: |
# To test predictable replay, we run test/format three times with the same data seed
# each time, and compare the keys and values found in the WT home directories.
# The first run is a timed one. When it's completed, we get the run's stable timestamp,
# and do the subsequent runs up to that stable timestamp. This, along with predictable
# replay using the same data seed, should guarantee we have equivalent data created.
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
# Get a random value with leading zeroes removed, /bin/sh version.
rando() {
tr -cd 0-9 </dev/urandom | head -c 5 | sed -e 's/0*\(.\)/\1/'
}
# Fail, showing the configuration file.
fail() {
echo "======= FAILURE =========="
for file; do
if [ -f "$file" ]; then
echo Contents of "$file":
cat "$file"
echo "================"
fi
done
exit 1
}
runtime=3 # minutes
config=../../../test/format/CONFIG.replay
for i in $(seq ${times}); do
echo Iteration $i/${times}
x2=$(rando)
x3=$(rando)
rm -rf RUNDIR_1 RUNDIR_2 RUNDIR_3
first_run_args="-c $config runs.timer=$runtime"
./t -h RUNDIR_1 $first_run_args ${extra_args} || fail RUNDIR_1/CONFIG 2>&1
stable_hex=$(../../../tools/wt_timestamps RUNDIR_1 | sed -e '/stable=/!d' -e 's/.*=//')
ops=$(echo $((0x$stable_hex)))
# Do the second run up to the stable timestamp, using the same data seed,
# but with a different extra seed. Compare it when done.
common_args="-c RUNDIR_1/CONFIG runs.timer=0 runs.ops=$ops"
./t -h RUNDIR_2 $common_args random.extra_seed=$x2 || fail RUNDIR_2/CONFIG 2>&1
../../../tools/wt_cmp_dir RUNDIR_1 RUNDIR_2 || fail RUNDIR_1/CONFIG RUNDIR_2/CONFIG 2>&1
# Do the third run up to the stable timestamp, using the same data seed,
# but with a different extra seed. Compare it to the second run when done.
./t -h RUNDIR_3 $common_args random.extra_seed=$x3 || fail RUNDIR_3/CONFIG 2>&1
../../../tools/wt_cmp_dir RUNDIR_2 RUNDIR_3 || fail RUNDIR_2/CONFIG RUNDIR_3/CONFIG 2>&1
done
"format test script":
command: shell.exec
params:
working_dir: "wiredtiger/cmake_build/test/format"
shell: bash
add_expansions_to_env: true
script: |
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
${additional_san_vars}
${format_test_setting}
for i in $(seq ${times|1}); do
./format.sh -j ${num_jobs} ${format_test_script_args|} 2>&1
done
"format test tiered":
command: shell.exec
params:
working_dir: "wiredtiger/cmake_build/test/format"
shell: bash
script: |
# To make sure we have plenty of flush_tier calls, we set the flush frequency high
# and the time between checkpoints low. We specify only using tables, as that's the
# only kind of URI that participates in tiered storage.
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
format_args="tiered_storage.storage_source=dir_store tiered_storage.flush_frequency=60 checkpoint.wait=15 runs.source=table runs.timer=10 runs.in_memory=0"
for i in $(seq ${times}); do
echo Iteration $i/${times}
rm -rf RUNDIR
./t $format_args ${extra_args}
./t -R $format_args ${extra_args}
done
"many dbs test":
command: shell.exec
params:
working_dir: "wiredtiger/cmake_build/test/manydbs"
shell: bash
script: |
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
./test_manydbs ${many_db_args|} 2>&1
"thread test":
command: shell.exec
params:
working_dir: "wiredtiger/cmake_build/test/thread"
shell: bash
script: |
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
./t ${thread_test_args|} 2>&1
"recovery stress test script":
command: shell.exec
params:
working_dir: "wiredtiger/cmake_build/test/csuite"
shell: bash
script: |
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
for i in $(seq ${times|1}); do
# Run the various combinations of args. Let time and threads be random. Add a
# timing stress to test_timestamp_abort every other run.
if [ $(( $i % 2 )) -eq 0 ]; then
test_timestamp_abort_args=-s
else
test_timestamp_abort_args=
fi
# Run current version with write-no-sync txns.
./random_abort/test_random_abort 2>&1
./timestamp_abort/test_timestamp_abort $test_timestamp_abort_args 2>&1
# Current version with memory-based txns (MongoDB usage).
./random_abort/test_random_abort -m 2>&1
./timestamp_abort/test_timestamp_abort -m $test_timestamp_abort_args 2>&1
# V1 log compatibility mode with write-no-sync txns.
./random_abort/test_random_abort -C 2>&1
./timestamp_abort/test_timestamp_abort -C $test_timestamp_abort_args 2>&1
# V1 log compatibility mode with memory-based txns.
./random_abort/test_random_abort -C -m 2>&1
./timestamp_abort/test_timestamp_abort -C -m $test_timestamp_abort_args 2>&1
./truncated_log/test_truncated_log ${truncated_log_args|} 2>&1
# Just let the system take a breath
sleep 10s
done
"schema abort predictable":
command: shell.exec
params:
working_dir: "wiredtiger/cmake_build/test/csuite/schema_abort"
shell: bash
script: |
# Get a random value with leading zeroes removed, /bin/sh version.
rando() {
tr -cd 0-9 </dev/urandom | head -c 5 | sed -e 's/0*\(.\)/\1/'
}
# Run schema_abort in a way that can test predictable replay.
set -o errexit
set -o verbose
${PREPARE_TEST_ENV}
runtime=20 # seconds
nthreads=5
toolsdir=../../../../tools
wtutil=../../../wt
r=$(rando)$(rando)
x0=$(rando)$(rando)
rm -rf RUNDIR_0
# The first run is for calibration only. We just want to run for the designated
# time and get an appropriate stop timestamp that can be used in later runs.
calibration_run_args="-PSD$r,E$x0 -T $nthreads -t $runtime"
./test_schema_abort -p -h RUNDIR_0 $calibration_run_args || exit 1
echo "Finished calibration run"
stable_hex=$($toolsdir/wt_timestamps RUNDIR_0/WT_HOME | sed -e '/stable=/!d' -e 's/.*=//')
op_count=$(echo $((0x$stable_hex)))
for i in $(seq ${times}); do
echo Iteration $i/${times}
x1=$(rando)$(rando)
x2=$(rando)$(rando)
rm -rf RUNDIR_1 RUNDIR_2
# Run with up to a slightly different timestamp for each iteration.
ops=$(($op_count + $(rando) % 100))
# Do two runs up to the stable timestamp, using the same data seed,
# but with a different extra seed. Compare it when done.