Skip to content

Make it possible to import large datasets in batches #585

Make it possible to import large datasets in batches

Make it possible to import large datasets in batches #585

Workflow file for this run

name: CI
on:
pull_request:
push:
env:
SMTP_PORT: 2500
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v1
- name: Setup Node
uses: actions/setup-node@v2
with:
node-version: '18.12.0'
- name: Setup Scala
uses: olafurpg/setup-scala@v10
with:
java-version: "adopt@1.8"
- name: Build the docker-compose stack
run: docker-compose up -d
- name: Cache node modules
uses: actions/cache@v2
env:
cache-name: cache-node-modules
with:
path: '**/node_modules'
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-build-${{ env.cache-name }}-
${{ runner.os }}-build-
${{ runner.os }}-
- name: Install NPM deps
run: npm install
- name: Run Jest tests
run: npm test
- name: Coursier cache
uses: coursier/cache-action@v5
- name: Build and test
run: |
sbt -v -Dfile.encoding=UTF-8 "+test"
rm -rf "$HOME/.ivy2/local" || true
find $HOME/.ivy2/cache -name "ivydata-*.properties" -delete || true
find $HOME/.cache/coursier/v1 -name "ivydata-*.properties" -delete || true
find $HOME/.sbt -name "*.lock" -delete || true