Merge branch 'cirrus'

This commit is contained in:
Julian Ospald 2022-12-16 19:12:49 +08:00
commit a7e6e7c27d
Signed by: hasufell
GPG Key ID: 3786C5262ECB4A3F
4 changed files with 14 additions and 30 deletions

View File

@ -14,9 +14,10 @@ task:
CIRRUS_CLONE_SUBMODULES: true
AWS_ACCESS_KEY_ID: ENCRYPTED[3e99c4ac040871f213abd616ec66952d954dc289cdd97772f88e58a74d08a2250133437780fe98b7aedf7ef1fb32f5eb]
AWS_SECRET_ACCESS_KEY: ENCRYPTED[5910cfd77a922ff7fc06eeb6a6b9f79d4867863e541f06eb2c4cfecae0613650e3e0588373fa8d9249d295d76cf9cb3b]
S3_HOST: ENCRYPTED[ce961780a33159f7d1d8046956b5ac6ebc3bfc8149428e5f538576cda51d9f3d0c35b79cdd1e325793639ff6e31f889d]
install_script: pkg install -y ghc hs-cabal-install git bash misc/compat10x misc/compat11x misc/compat12x gmake
script:
- bash .github/scripts/build.sh
- bash .github/scripts/test.sh
binaries_artifacts:
path: "out/x86_64-portbld-freebsd-ghcup-*"
path: "out/*"

View File

@ -47,10 +47,6 @@ git_describe
# build
ecabal update
if ! cabal-cache version ; then
build_cabal_cache "$HOME/.local/bin"
fi
if [ "${RUNNER_OS}" = "Linux" ] ; then
if [ "${ARCH}" = "32" ] ; then
build_with_cache -w "${GHC}" --ghc-options='-split-sections -optl-static' -ftui --enable-tests

View File

@ -16,7 +16,7 @@ sync_from() {
fi
cabal-cache sync-from-archive \
--host-name-override=s3.us-west-004.backblazeb2.com \
--host-name-override=${S3_HOST} \
--host-port-override=443 \
--host-ssl-override=True \
--region us-west-2 \
@ -30,7 +30,7 @@ sync_to() {
fi
cabal-cache sync-to-archive \
--host-name-override=s3.us-west-004.backblazeb2.com \
--host-name-override=${S3_HOST} \
--host-port-override=443 \
--host-ssl-override=True \
--region us-west-2 \
@ -95,6 +95,7 @@ download_cabal_cache() {
esac
;;
"FreeBSD")
url=https://downloads.haskell.org/~ghcup/unofficial-bindists/cabal-cache/1.0.5.1/x86_64-freebsd-cabal-cache-1.0.5.1
;;
"Windows")
exe=".exe"
@ -125,29 +126,6 @@ download_cabal_cache() {
)
}
build_cabal_cache() {
(
set -e
GHC_CABAL_CACHE=ghc-8.10.7
dest="$1"
if [ "${GHC_CABAL_CACHE}" != "ghc" ] && [ "${GHC_CABAL_CACHE}" != "ghc-${GHC_VER}" ] ; then
if ! ${GHC_CABAL_CACHE} --numeric-version ; then
ghcup -v install ghc --force ${GHC_CABAL_CACHE#ghc-}
fi
fi
cd /tmp
ecabal install -w "${GHC_CABAL_CACHE}" --overwrite-policy=always --install-method=copy --installdir="$dest" cabal-cache
mkdir -p "${CI_PROJECT_DIR}/out"
cp "$dest/cabal-cache" "${CI_PROJECT_DIR}/out/cabal-cache-${RUNNER_OS}-${DISTRO}-${ARCH}"
if [ "${GHC_CABAL_CACHE}" != "ghc" ] && [ "${GHC_CABAL_CACHE}" != "ghc-${GHC_VER}" ] ; then
ghcup -v rm ghc ${GHC_CABAL_CACHE#ghc-}
fi
)
}
build_with_cache() {
ecabal configure "$@"
ecabal build --dependencies-only "$@" --dry-run

View File

@ -21,6 +21,7 @@ jobs:
JSON_VERSION: "0.0.7"
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
S3_HOST: ${{ secrets.S3_HOST }}
strategy:
fail-fast: true
matrix:
@ -51,6 +52,7 @@ jobs:
DISTRO: Alpine
AWS_SECRET_ACCESS_KEY: ${{ env.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ env.AWS_ACCESS_KEY_ID }}
S3_HOST: ${{ env.S3_HOST }}
- if: matrix.ARCH == '64'
name: Run build (64 bit linux)
@ -64,6 +66,7 @@ jobs:
DISTRO: Alpine
AWS_SECRET_ACCESS_KEY: ${{ env.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ env.AWS_ACCESS_KEY_ID }}
S3_HOST: ${{ env.S3_HOST }}
- if: always()
name: Upload artifact
@ -82,6 +85,7 @@ jobs:
JSON_VERSION: "0.0.7"
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
S3_HOST: ${{ secrets.S3_HOST }}
strategy:
fail-fast: true
matrix:
@ -122,6 +126,7 @@ jobs:
DISTRO: Ubuntu
AWS_SECRET_ACCESS_KEY: ${{ env.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ env.AWS_ACCESS_KEY_ID }}
S3_HOST: ${{ env.S3_HOST }}
- if: matrix.ARCH == 'ARM64'
uses: docker://hasufell/arm64v8-ubuntu-haskell:focal
@ -135,6 +140,7 @@ jobs:
DISTRO: Ubuntu
AWS_SECRET_ACCESS_KEY: ${{ env.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ env.AWS_ACCESS_KEY_ID }}
S3_HOST: ${{ env.S3_HOST }}
- if: always()
name: Upload artifact
@ -153,6 +159,7 @@ jobs:
JSON_VERSION: "0.0.7"
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
S3_HOST: ${{ secrets.S3_HOST }}
strategy:
fail-fast: false
matrix:
@ -184,6 +191,7 @@ jobs:
DISTRO: na
AWS_SECRET_ACCESS_KEY: ${{ env.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ env.AWS_ACCESS_KEY_ID }}
S3_HOST: ${{ env.S3_HOST }}
HOMEBREW_CHANGE_ARCH_TO_ARM: 1
- if: always()
@ -383,6 +391,7 @@ jobs:
ARCH: 64
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
S3_HOST: ${{ secrets.S3_HOST }}
steps:
- name: Checkout code
uses: actions/checkout@v3