Newer
Older
workflow:
# See https://docs.gitlab.com/ee/ci/jobs/job_control.html#avoid-duplicate-pipelines
rules:
# To avoid duplicate pipelines we disable merge request events,
# leaving only pushes and manual triggering.
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: never
- if: $CI_PIPELINE_SOURCE == "push"
- if: $CI_PIPELINE_SOURCE == "web"
REGISTRY: docker-public.binary.picodata.io
BASE_IMAGE: ${REGISTRY}/picodata-build-base
CARGO_HOME: /shared-storage/picodata/.cargo
CACHE_ARCHIVE: /shared-storage/picodata/cache.tar
# Helps to tolerate spurious network failures
GET_SOURCES_ATTEMPTS: 3
# job:rules explained:
#
# - if build-base changes on master branch (compared to HEAD~1)
# * build-base-image (with tag latest) and push
# * test (on base-image:latest)
# - if build-base changes on development branch (compared to master)
# * build-base-image (with tag sha)
# * test (on base-image:sha)
# - else (if build-base doesn't change)
# * skip build-base-image
# * just test (on base-image:latest)
#
# Anchor syntax explained here:
# https://docs.gitlab.com/ee/ci/yaml/yaml_optimization.html
#
.rules:
- &if-build-base-changes-on-master-branch
if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
changes:
# implies compare_to HEAD~1
paths: &build-base-changes-paths
- docker-build-base/**
- .gitlab-ci.yml
- &if-build-base-changes-on-dev-branch
if: $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes:
compare_to: master
paths: *build-base-changes-paths
- &else {}
rules:
- <<: *if-build-base-changes-on-master-branch
variables:
BASE_IMAGE_TAG: latest
- <<: *if-build-base-changes-on-dev-branch
variables:
BASE_IMAGE_TAG: ${CI_COMMIT_SHA}
variables:
GIT_DEPTH: 1
GIT_STRATEGY: fetch
GIT_SUBMODULE_STRATEGY: none
-t ${BASE_IMAGE}:${BASE_IMAGE_TAG}
-f ./docker-build-base/Dockerfile
./docker-build-base
- |
# Push image to registry
if [ "${CI_COMMIT_BRANCH}" == "${CI_DEFAULT_BRANCH}" ]; then
echo "Pushing ${BASE_IMAGE}:${BASE_IMAGE_TAG}"
mkdir -p $CI_PROJECT_DIR/.docker
echo $DOCKER_AUTH_RW > $CI_PROJECT_DIR/.docker/config.json
docker --config $CI_PROJECT_DIR/.docker/ push ${BASE_IMAGE}:${BASE_IMAGE_TAG}
else
echo "Skip pushing image on a non-master branch"
variables:
GIT_DEPTH: 100
GIT_SUBMODULE_STRATEGY: recursive
before_script:
# Gitlab CI implicitly clones specific refs (e.g. `refs/pipelines/xxxxxxx`),
# but it doesn't imply fetching tags. We clone them manually with the
# `git fetch` command.
#
# Tags in `tarantool-sys` and `luajit` submodules are necessary for
# the build scripts. Without them the job fails.
# Fetch tags
ci-log-section start "fetch-submodule-tags" Fetching tags for submodules
for s in tarantool-sys tarantool-sys/third_party/luajit; do
echo "Fetching tag for $s"
pushd $s
until git describe; do git fetch --deepen 100; done
popd
done
ci-log-section end "fetch-submodule-tags"
# Gitlab CI caching is shit. So we implement it manually
- |
# Restore cache
if [ "$CI_COMMIT_BRANCH" == "$CI_DEFAULT_BRANCH" ]; then
echo "Skip restoring cache on the master branch"
elif git diff origin/"$CI_DEFAULT_BRANCH" "$CI_COMMIT_SHA" --submodule=short | grep '^[+-]Subproject commit'; then
echo "Skip restoring cache because submodule(s) changed"
elif [ -f "${CACHE_ARCHIVE}" ]; then
ci-log-section start "restore-cache" Restoring cache from ${CACHE_ARCHIVE} ...
tar -xf ${CACHE_ARCHIVE}
echo "Ok"
du -sh ${CACHE_PATHS} || true
ci-log-section end "restore-cache"
else
echo "No cache found"
fi
test-linux:
extends: .test
tags:
rules:
- <<: *if-build-base-changes-on-master-branch
variables:
BASE_IMAGE_TAG: latest
- <<: *if-build-base-changes-on-dev-branch
variables:
BASE_IMAGE_TAG: ${CI_COMMIT_SHA}
- <<: *else
variables:
BASE_IMAGE_TAG: latest
image:
name: ${BASE_IMAGE}:${BASE_IMAGE_TAG}
pull_policy: if-not-present
- |
# Check rust version consistency
ci-log-section start "rust-version" Checking rust version consistency ...
exec 3>&1 # duplicate stdout fd
grep_rust_version() { echo "$1: $(sed -nr "s/.*rust-version = \"(\S+)\".*/\1/p" $1)"; }
grep_toolchain() { echo "$1: $(sed -nr "s/.*--default-toolchain (\S+).*/\1/p" $1)"; }
UNIQUE_VERSIONS=$(
{
grep_rust_version Cargo.toml;
grep_toolchain Makefile;
grep_toolchain docker-build-base/Dockerfile;
grep_toolchain helm/picodata.Dockerfile;
} \
| tee /dev/fd/3 \
| cut -d: -f2- | sort | uniq | wc -l
);
ci-log-section end "rust-version"
if [ "$UNIQUE_VERSIONS" != "1" ]; then
echo "Error: checking rust version consistency failed"
exit 1
fi
- |
# Pipenv install
ci-log-section start "pipenv-install" Installing pip dependencies ...
PIPENV_VENV_IN_PROJECT=1 PIP_NO_CACHE_DIR=true python3.10 -m pipenv install --deploy
ci-log-section end "pipenv-install"
# There are no Rust tests for `webui` feature, it's checked in pytest
- cargo build --locked
- cargo test --locked
- cargo build --features webui,error_injection --locked
- >
pipenv run pytest -v
--numprocesses auto
--junitxml=junit_pytest.xml
--with-webui
- |
# Save cache
if [ "$CI_COMMIT_BRANCH" == "$CI_DEFAULT_BRANCH" ]; then
ci-log-section start "save-cache" Saving cache to ${CACHE_ARCHIVE} ...
du -sh ${CACHE_PATHS} || true
TMPEXT=$RANDOM
tar -cf "${CACHE_ARCHIVE}.${TMPEXT}" ${CACHE_PATHS}
mv -f "${CACHE_ARCHIVE}.${TMPEXT}" "${CACHE_ARCHIVE}"
echo Ok
du -sh ${CACHE_ARCHIVE}
ci-log-section end "save-cache"
else
echo "Skip saving cache on a non-master branch"
fi
artifacts:
when: always
paths:
- junit_pytest.xml
reports:
junit: junit_pytest.xml
test-mac-m1:
extends: .test
tags:
- mac-dev-m1
script:
- export CARGO_HOME=$HOME/.cargo
- cargo -V
- cargo build --locked
- cargo build --features webui --locked
# There are no Rust tests for `webui` feature.
# It will be checked during integration tests.
- cargo fmt -- -v --check
- cargo clippy --version
- cargo clippy --all-features -- --deny clippy::all --no-deps
# - |
# # Pipenv install
# ci-log-section start "pipenv-install" Installing pip dependencies ...
# PIPENV_VENV_IN_PROJECT=1 PIP_NO_CACHE_DIR=true python3.10 -m pipenv install --deploy
# ci-log-section end "pipenv-install"
# - pipenv run pytest --numprocesses auto -v
# - pipenv run lint
# - |
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
test-docker:
stage: test
tags:
- shell
rules:
- if: $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes:
compare_to: master
paths:
- helm/picodata.Dockerfile
- helm/picodata-diag.Dockerfile
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: on_success
- # else
when: never
variables:
GIT_DEPTH: 100
GIT_STRATEGY: fetch
GIT_SUBMODULE_STRATEGY: recursive
before_script:
- export PATH=docker-build-base:$PATH
- *fetch-tags
script:
- |
# Build docker images
for image in picodata picodata-diag; do
ci-log-section start "test-docker-${image}" Building docker image ${image}
docker build \
--label GIT_COMMIT=${CI_COMMIT_SHA} \
-f helm/${image}.Dockerfile .
ci-log-section end "test-docker-${image}"
done
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
gamayun-prepare:
extends: .test
when: manual
rules:
- <<: *if-build-base-changes-on-master-branch
variables:
BASE_IMAGE_TAG: latest
- <<: *if-build-base-changes-on-dev-branch
variables:
BASE_IMAGE_TAG: ${CI_COMMIT_SHA}
- <<: *else
variables:
BASE_IMAGE_TAG: latest
image:
name: ${BASE_IMAGE}:${BASE_IMAGE_TAG}
pull_policy: if-not-present
tags:
- picodata-docker
script:
- export CARGO_HOME=$HOME/.cargo
- cargo -V
- cargo clippy --locked --message-format=json > clippy.json
- |
# Save cache
CACHE_ARCHIVE="${CACHE_ARCHIVE}_clippy"
ci-log-section start "save-cache" Saving cache to ${CACHE_ARCHIVE} ...
du -sh ${CACHE_PATHS} || true
TMPEXT=$RANDOM
tar -cf "${CACHE_ARCHIVE}.${TMPEXT}" clippy.json
mv -f "${CACHE_ARCHIVE}.${TMPEXT}" "${CACHE_ARCHIVE}"
echo Ok
du -sh ${CACHE_ARCHIVE}
ci-log-section end "save-cache"
gamayun-run:
extends: .test
needs: ["gamayun-prepare"]
tags:
- picodata-shell
script:
- CACHE_ARCHIVE="/data/gitlab-runner/${CACHE_ARCHIVE}_clippy"
- ls -la ${CACHE_ARCHIVE}
- |
# Restore cache
if [ -f "${CACHE_ARCHIVE}" ]; then
ci-log-section start "restore-cache" Restoring cache from ${CACHE_ARCHIVE} ...
tar -xf ${CACHE_ARCHIVE}
echo "Ok"
ci-log-section end "restore-cache"
else
echo "No cache found"
fi
# create an ssh tunnel to gamayun server to allow the report uploading
- TUNNEL="ssh -4 -L 9000:localhost:9000 sonar-reports -N -f"
- |
find . -type d -name 'test*' -o -name 'doc' -o -name 'docs' | xargs -n1 rm -rvf
find tarantool-sys/third_party/ -type f -name '*.htm*' | xargs -n 1 rm -rfv
find tarantool-sys/vendor/ -type f -name '*.htm*' | xargs -n 1 rm -rfv
find . -type d -name stress-test | xargs -n 1 rm -rvf
find . -type d -name integration-tests | xargs -n 1 rm -rvf
find . -type d -name 'example*' | xargs -n 1 rm -rvf
find . -type d -name 'docsrc' | xargs -n 1 rm -rvf
find . -name '*.md' | xargs -n 1 rm -rvf
find http tarantool-sys vshard -type d -name .github | xargs -n 1 rm -rfv
- PATCH_DIR=$(pwd)/certification_patches
- |
(cd tarantool-sys; find ${PATCH_DIR} -name "tarantool-sys_*" | xargs -n 1 git apply)
(cd tarantool-sys/third_party/curl; find ${PATCH_DIR} -name "curl_*" | xargs -n 1 git apply)
(cd tarantool-sys/third_party/zstd; find ${PATCH_DIR} -name "zstd_*" | xargs -n 1 git apply)
(cd tarantool-sys/third_party/luajit; find ${PATCH_DIR} -name "luajit_*" | xargs -n 1 git apply)
(cd tarantool-sys/third_party/nghttp2; find ${PATCH_DIR} -name "nghttp2_*" | xargs -n 1 git apply)
(cd tarantool-sys/third_party/checks; find ${PATCH_DIR} -name "checks_*" | xargs -n 1 git apply)
(cd http; find ${PATCH_DIR} -name "http_*" | xargs -n 1 git apply)
(cd vshard; find ${PATCH_DIR} -name "vshard_*" | xargs -n 1 git apply)
- |
docker run --rm -t \
-v $PWD:/tmp/src:rw \
-e "SONAR_OPS=-Dsonar.python.version=3 -Dsonar.login=${SONAR} -Dsonar.projectKey=Picodata-CI -Dsonar.exclusions=**/*.mod" \
-e "SONAR_SCANNER_OPTS="-Xmx4096m"" \
-e "CARGO_CLIPPY_FILE=clippy.json" \
-u $(id -u):$(id -g) --ulimit nofile=100000:100000 --network=host \
docker.binary.picodata.io/gamayun
build-webui-bundle:
stage: pack
tags:
- docker
image:
docker-proxy.binary.picodata.io/node:21
only:
- web
- tags
variables:
GIT_SUBMODULE_STRATEGY: recursive
script:
- make build-webui-bundle
artifacts:
paths:
- picodata-webui/dist/bundle.json
# Relative paths are interpreted relative to `src/`
# Escape it with extra `../`
WEBUI_BUNDLE: ../picodata-webui/dist/bundle.json
PRESERVE_ENVVARS: VER_TNT,WEBUI_BUNDLE,GIT_DESCRIBE
GIT_DEPTH: 100
GIT_SUBMODULE_STRATEGY: recursive
DOCKER_AUTH_CONFIG: $DOCKER_AUTH_RO
before_script:
- git clone https://github.com/packpack/packpack.git packpack
- |
# Describe tarantool-sys
pushd tarantool-sys
until git describe; do git fetch --deepen 100; done
export VER_TNT=$(
# 2.10.5-17-gf818b9108 -> '2.10.5.17'
# 2.11.0-entrypoint-1137-g4686b909 -> '2.11.0.1137'
# 3.0.0-alpha1-14-g342c242-dev -> '3.0.0.14'
$SED -n 's/-[a-z]\+[a-z0-9]*//p' | $SED -n 's/^\([0-9\.]*\)-\([0-9]*\)\(-\([a-z0-9]*\)\)*/\1-\2/p'
popd
- |
# Describe picodata
until git describe; do git fetch --deepen 100; done
export GIT_DESCRIBE=$(git describe --long)
echo $GIT_DESCRIBE
needs:
- build-webui-bundle
pack-sources:
extends: .pack
script:
- OS=centos DIST=7 BUILDDIR=$PWD/build_sources packpack/packpack tarball
artifacts:
paths:
- build_sources/picodata*.tar.xz
- sed -i "s/(id -u)/(id -u) -o/g" packpack/packpack
- OS=centos DIST=7 BUILDDIR=$PWD/build_centos packpack/packpack
- OS=centos DIST=8 BUILDDIR=$PWD/build_centos packpack/packpack
- OS=ubuntu DIST=focal BUILDDIR=$PWD/build_${DIST}/ RELEASE=${DIST} packpack/packpack
- OS=ubuntu DIST=jammy BUILDDIR=$PWD/build_${DIST}/ RELEASE=${DIST} packpack/packpack
- build_focal/*.deb
- build_jammy/*.deb
- OS=debian DIST=bullseye BUILDDIR=$PWD/build_debian/ RELEASE=${DIST} packpack/packpack
- DOCKER_REPO=docker-picodata.binary.picodata.io/packpack/alt DOCKER_IMAGE=p10 BUILDDIR=$PWD/build_alt packpack/packpack
- DOCKER_REPO=docker-picodata.binary.picodata.io/packpack/alt DOCKER_IMAGE=p9 BUILDDIR=$PWD/build_alt packpack/packpack
pack-redos:
extends: .pack
script:
- OS=redos DIST=7.3 BUILDDIR=$PWD/build_redos/ packpack/packpack
artifacts:
paths:
- build_redos/picodata*.rpm
pack-astralinux:
extends: .pack
script:
- DOCKER_REPO=docker-picodata.binary.picodata.io/packpack/astra DOCKER_IMAGE=orel-2.12 BUILDDIR=$PWD/build_astra packpack/packpack
artifacts:
paths:
- build_astra/*.deb
pack-rosalinux:
extends: .pack
script:
- DOCKER_REPO=docker-picodata.binary.picodata.io/packpack/rosa DOCKER_IMAGE=chrome BUILDDIR=$PWD/build_rosa packpack/packpack
artifacts:
paths:
- build_rosa/picodata*.rpm
pack-macos-arm:
extends: .pack
tags:
- mac-dev-m1
variables:
PKGNAME: 'picodata'
BUILDDIR: 'build_mac_arm'
MACOS: 'ventura'
script: |
export CARGO_HOME=$HOME/.cargo
VER=$(git describe --long | $SED -n 's/^\([0-9\.]*\)-\([0-9]*\)-\([a-z0-9]*\)/\1.\2/p')
MAJOR=$(echo $VER | cut -f 1-2 -d '.')
echo "-------------------------------------------------------------------"
echo "Prepare bottle"
echo "-------------------------------------------------------------------"
make build
export DESTDIR="$BUILDDIR/$PKGNAME@$MAJOR/$VER"
make install
cp AUTHORS README.md LICENSE $DESTDIR
mv $DESTDIR/usr/bin $DESTDIR/ && rm -rf $DESTDIR/usr
mkdir -p $DESTDIR/.bottle/{etc,var}
pushd $BUILDDIR
tar -caf $PKGNAME@$MAJOR-$VER.arm64_$MACOS.bottle.tar.gz $PKGNAME@$MAJOR/$VER
popd
echo "-------------------------------------------------------------------"
echo "Prepare tarball with sources"
echo "-------------------------------------------------------------------"
mkdir $BUILDDIR/$PKGNAME-$VER
rsync -r -l \
--exclude=.git --exclude='.gitignore' --exclude='.gitmodules' \
--exclude=FreeBSD --exclude=debian --exclude=rpm --exclude=rump \
--exclude=apk --exclude=$BUILDDIR --exclude=packpack \
* $BUILDDIR/$PKGNAME-$VER/
cd $BUILDDIR
tar --uid=0 --gid=0 -caPf $PKGNAME-$VER.tar.gz $PKGNAME-$VER
shasum -a 256 $PKGNAME*$VER*.tar.gz
needs:
- build-webui-bundle
artifacts:
paths:
- $BUILDDIR/*.tar.gz
sign-rpm-packages:
variables:
DOCKER_AUTH_CONFIG: $DOCKER_AUTH_RO
stage: sign
tags:
- shell
only:
- web
- tags
before_script:
- echo "$GPG_KEY_KDY" | base64 -d > build_centos/kdy.asc
- echo "$GPG_KEY_KDY" | base64 -d > build_alt/kdy.asc
- echo "$GPG_KEY_KDY" | base64 -d > build_redos/kdy.asc
- echo "$GPG_KEY_KDY" | base64 -d > build_rosa/kdy.asc
- docker run --rm -e KEY_FILE=kdy.asc -v $PWD/build_centos:/build docker-picodata.binary.picodata.io/rpmsign:centos7
- docker run --rm -e KEY_FILE=kdy.asc -v $PWD/build_alt:/build docker-picodata.binary.picodata.io/rpmsign:centos7
- docker run --rm -e KEY_FILE=kdy.asc -v $PWD/build_redos:/build docker-picodata.binary.picodata.io/rpmsign:centos7
- docker run --rm -e KEY_FILE=kdy.asc -v $PWD/build_rosa:/build docker-picodata.binary.picodata.io/rpmsign:centos7
- build_centos/picodata*.rpm
- build_alt/picodata*.rpm
sign-astralinux-packages:
variables:
DOCKER_AUTH_CONFIG: $DOCKER_AUTH_RO
stage: sign
tags:
- shell
only:
- web
- tags
before_script:
- echo "$GPG_KEY_ASTRA" | base64 -d > build_astra/pico.asc
- echo "$GPG_PASS_ASTRA" > build_astra/pico.pass
script:
- docker run --rm -e KEY_FILE=pico.asc -e PASS_FILE=pico.pass -e SIGNER="5A7D5C9D749260B6CCD24D72A45397D5554CBECD" -v $PWD/build_astra:/build docker-picodata.binary.picodata.io/astrasign:orel-2.12
artifacts:
paths:
- build_astra/picodata*_signed.deb
dependencies:
- pack-astralinux
stage: deploy
tags:
- shell
only:
- web
- tags
before_script:
- eval $(ssh-agent -s)
- echo "$DEPLOY_PROD_SSH_KEY" | base64 -d | ssh-add -
script:
# Sources
- echo "Deploying sources..."
- scp -o stricthostkeychecking=no build_sources/picodata*.tar.xz ansible@picodata.io:/data/nginx/www/packrepo/tarantool-picodata/sources/
- echo "Deploying rpm-centos7-package..."
- scp -o stricthostkeychecking=no build_centos/picodata*.el7.*rpm ansible@picodata.io:/data/nginx/www/packrepo/tarantool-picodata/el/7/x86_64/
- ssh -o stricthostkeychecking=no ansible@picodata.io "cd /data/nginx/www/packrepo/tarantool-picodata/el/7/ && createrepo --update x86_64 && gpg --no-tty --yes -u kdy@picodata.io --detach-sign --armor x86_64/repodata/repomd.xml"
- echo "rpm-centos7-package successfully deployed."
- echo "Deploying rpm-centos8-package..."
- scp -o stricthostkeychecking=no build_centos/picodata*.el8.*rpm ansible@picodata.io:/data/nginx/www/packrepo/tarantool-picodata/el/8/x86_64/
- ssh -o stricthostkeychecking=no ansible@picodata.io "cd /data/nginx/www/packrepo/tarantool-picodata/el/8/ && createrepo --update x86_64 && gpg --no-tty --yes -u kdy@picodata.io --detach-sign --armor x86_64/repodata/repomd.xml"
- echo "rpm-centos8-package successfully deployed."
- echo "Deploying ubuntu focal deb-packages..."
- ssh -o stricthostkeychecking=no ansible@picodata.io "mkdir -p ~/.deb/ubuntu"
- scp -o stricthostkeychecking=no build_focal/picodata*deb ansible@picodata.io:.deb/ubuntu/
- ssh -o stricthostkeychecking=no ansible@picodata.io "reprepro --keepunreferencedfiles -b /data/nginx/www/packrepo/tarantool-picodata/ubuntu/ -C main includedeb focal ~/.deb/ubuntu/picodata*focal*deb; rm ~/.deb/ubuntu/picodata*focal*deb"
- echo "ubuntu focal deb-packages successfully deployed."
- echo "Deploying ubuntu jammy deb-packages..."
- ssh -o stricthostkeychecking=no ansible@picodata.io "mkdir -p ~/.deb/ubuntu"
- scp -o stricthostkeychecking=no build_jammy/picodata*deb ansible@picodata.io:.deb/ubuntu/
- ssh -o stricthostkeychecking=no ansible@picodata.io "reprepro --keepunreferencedfiles -b /data/nginx/www/packrepo/tarantool-picodata/ubuntu/ -C main includedeb jammy ~/.deb/ubuntu/picodata*jammy*deb; rm ~/.deb/ubuntu/picodata*jammy*deb"
- echo "ubuntu jammy deb-packages successfully deployed."
- ssh -o stricthostkeychecking=no ansible@picodata.io "mkdir -p ~/.deb/debian"
- scp -o stricthostkeychecking=no build_debian/picodata*deb ansible@picodata.io:.deb/debian/
- ssh -o stricthostkeychecking=no ansible@picodata.io "reprepro --keepunreferencedfiles -b /data/nginx/www/packrepo/tarantool-picodata/debian/ -C main includedeb bullseye ~/.deb/debian/picodata*bullseye*deb; rm ~/.deb/debian/picodata*bullseye*deb"
- echo "debian packages successfully deployed."
- ssh -o stricthostkeychecking=no ansible@picodata.io "mkdir -p /tmp/altlinux/"
- echo "Deploying altlinux-p9 package..."
- scp -o stricthostkeychecking=no build_alt/picodata*.p9.*rpm ansible@picodata.io:/tmp/altlinux/
- echo "altlinux-p9 package successfully deployed."
- echo "Deploying altlinux-p10 package..."
- scp -o stricthostkeychecking=no build_alt/picodata*.p10.*rpm ansible@picodata.io:/tmp/altlinux/
- ssh -o stricthostkeychecking=no ansible@picodata.io "/usr/local/bin/repogen.sh"
- echo "altlinux-p10 package successfully deployed."
- scp -o stricthostkeychecking=no build_redos/picodata*.el7.*rpm ansible@picodata.io:/data/nginx/www/packrepo/tarantool-picodata/redos/7/x86_64/
- ssh -o stricthostkeychecking=no ansible@picodata.io "cd /data/nginx/www/packrepo/tarantool-picodata/redos/7/ && createrepo --update x86_64 && gpg --no-tty --yes -u kdy@picodata.io --detach-sign --armor x86_64/repodata/repomd.xml"
- echo "RedOS 7 package successfully deployed."
- echo "Deploying Astralinux package..."
- ssh -o stricthostkeychecking=no ansible@picodata.io "mkdir -p ~/.deb/astra"
- scp -o stricthostkeychecking=no build_astra/picodata*_signed.deb ansible@picodata.io:.deb/astra/
- ssh -o stricthostkeychecking=no ansible@picodata.io "reprepro --keepunreferencedfiles -b /data/nginx/www/packrepo/tarantool-picodata/astra -C main includedeb orel ~/.deb/astra/picodata*_signed.deb; rm -rf ~/.deb/astra"
- echo "Astralinux-packages successfully deployed."
- echo
# ROSAlinux
- echo "Deploying Rosa Linux package..."
- scp -o stricthostkeychecking=no build_rosa/picodata*-rosa*rpm ansible@picodata.io:/data/nginx/www/packrepo/tarantool-picodata/rosa/chrome/x86_64
- ssh -o stricthostkeychecking=no ansible@picodata.io "cd /data/nginx/www/packrepo/tarantool-picodata/rosa/chrome && createrepo --update x86_64 && gpg --no-tty --yes -u kdy@picodata.io --detach-sign --armor x86_64/repodata/repomd.xml"
- echo "ROSA Linux package successfully deployed."
deploy-macos-arm:
stage: deploy
tags:
- mac-dev-m1
only:
- web
- tags
when: manual
variables:
PKGNAME: 'picodata'
BUILDDIR: 'build_mac_arm'
script: |
until git describe; do git fetch --deepen 100; done
VER=$(git describe --long | gsed -n 's/^\([0-9\.]*\)-\([0-9]*\)-\([a-z0-9]*\)/\1.\2/p')
MAJOR=$(echo $VER | cut -f 1-2 -d '.')
curl -v -H "Authorization: Basic $RAW_AUTH_RW" --upload-file $BUILDDIR/$PKGNAME-$VER.tar.gz $RAW_REGISTRY/brew/packages/
curl -v -H "Authorization: Basic $RAW_AUTH_RW" --upload-file $BUILDDIR/$PKGNAME@$MAJOR-$VER.*.bottle.tar.gz $RAW_REGISTRY/brew/bottle/
dependencies:
- pack-macos-arm
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
deploy-docker:
stage: deploy
tags:
- shell
only:
- web
- tags
variables:
GIT_DEPTH: 100
GIT_STRATEGY: fetch
GIT_SUBMODULE_STRATEGY: recursive
before_script:
- export PATH=docker-build-base:$PATH
- *fetch-tags
- mkdir -p $CI_PROJECT_DIR/.docker
- echo $DOCKER_AUTH_RW > $CI_PROJECT_DIR/.docker/config.json
script:
- |
# Rebuild and push docker images
for image in picodata picodata-diag; do
ci-log-section start "deploy-docker-${image}" Building and pushing docker image ${image}
docker build \
--label GIT_COMMIT=${CI_COMMIT_SHA} \
-t ${REGISTRY}/${image}:latest \
-f helm/${image}.Dockerfile .
docker --config $CI_PROJECT_DIR/.docker push ${REGISTRY}/${image}:latest
ci-log-section end "deploy-docker-${image}"
done
variables:
DOCKER_AUTH_CONFIG: $DOCKER_AUTH_RO
needs:
- deploy-packages
check-deployment-rpm:
extends: .check-deployment
parallel:
matrix:
- BASE_IMAGE: centos:7
PACKAGE: el/7/x86_64/picodata-release-1.1.2.0-1.el7.x86_64.rpm
PACKAGE: el/8/x86_64/picodata-release-1.1.2.0-1.el8.x86_64.rpm
PACKAGE: redos/7/x86_64/picodata-release-1.1.2.0-1.el7.x86_64.rpm
GIT_FETCH_PARAM: deepen
- BASE_IMAGE: docker-picodata.binary.picodata.io/rosa/rosa-chrome:2021.1
PACKAGE: rosa/chrome/x86_64/picodata-release-1.1.2.0-1-rosa2021.1.x86_64.rpm
GIT_FETCH_PARAM: deepen
before_script:
- until git describe; do git fetch --${GIT_FETCH_PARAM} 100; done
- export VER=$(git describe --long | sed -n 's/^\([0-9\.]*\)-\([0-9]*\)-\([a-z0-9]*\)/\1.\2/p')
script:
- rpm --import https://download.picodata.io/tarantool-picodata/el/RPM-GPG-KEY-kdy
- yum install -y https://download.picodata.io/tarantool-picodata/${PACKAGE}
check-deployment-deb:
extends: .check-deployment
variables:
DEBIAN_FRONTEND: noninteractive
TZ: Europe/Moscow
parallel:
matrix:
- BASE_IMAGE: debian:bullseye
- BASE_IMAGE: ubuntu:focal
- BASE_IMAGE: ubuntu:jammy
before_script:
- apt update
- apt install -y curl gpg software-properties-common git
- export DIST=$(lsb_release -si | tr [:upper:] [:lower:])
- export CODENAME=$(lsb_release -sc)
- until git describe; do git fetch --deepen 100; done
- export VER=$(git describe --long | sed -n 's/^\([0-9\.]*\)-\([0-9]*\)-\([a-z0-9]*\)/\1.\2/p')
script:
- curl -s https://download.picodata.io/tarantool-picodata/ubuntu/picodata.gpg.key | gpg --no-default-keyring --keyring gnupg-ring:/etc/apt/trusted.gpg.d/picodata.gpg --import
- chmod 644 /etc/apt/trusted.gpg.d/picodata.gpg
- add-apt-repository -y "deb [arch=amd64] https://download.picodata.io/tarantool-picodata/${DIST}/ ${CODENAME} main"
- apt update
- apt install -y picodata=${VER}-${CODENAME}
check-deployment-alt:
extends: .check-deployment
parallel:
matrix:
- DIST: p10
- DIST: p9
image: docker.binary.picodata.io/altlinux/base:${DIST}
before_script:
- apt-get update
- apt-get install -y curl git apt-https
- until git describe; do git fetch --deepen 100; done
- export VER=$(git describe --long | sed -n 's/^\([0-9\.]*\)-\([0-9]*\)-\([a-z0-9]*\)/\1.\2/p')
script:
- curl https://download.picodata.io/tarantool-picodata/altlinux/${DIST}/x86_64/RPMS.main/picodata-${VER}-1.${DIST}.x86_64.rpm -o picodata.rpm
- apt-get install -y ./picodata.rpm
- apt-get remove -y picodata
# Checking for install from repo
- apt-get install -y https://download.picodata.io/tarantool-picodata/altlinux/${DIST}/picodata-release-1.0.2.7-1.${DIST}.x86_64.rpm
- apt-get update
- apt-get install -y picodata=${VER}
check-deployment-astra:
extends: .check-deployment
image: docker-picodata.binary.picodata.io/astra/orel:2.12
before_script:
- apt-get update
- apt-get install -y curl git apt-transport-https
- until git describe; do git fetch --deepen 100; done
- export VER=$(git describe --long | sed -n 's/^\([0-9\.]*\)-\([0-9]*\)-\([a-z0-9]*\)/\1.\2/p')
script:
- curl -s https://download.picodata.io/tarantool-picodata/ubuntu/picodata.gpg.key | gpg --no-default-keyring --keyring gnupg-ring:/etc/apt/trusted.gpg.d/picodata.gpg --import
- chmod 644 /etc/apt/trusted.gpg.d/picodata.gpg
- echo "deb [arch=amd64] https://download.picodata.io/tarantool-picodata/astra/ orel main" > /etc/apt/sources.list.d/picodata.list
- apt-get update
- apt-get install -y picodata=${VER}-1