Skip to content

Update

Update #319

Workflow file for this run

name: Build
on:
pull_request:
push:
branches: ["main", "release/*", "project/*", "test/*"]
tags: ["Second_Life*"]
jobs:
# The whole point of the setup job is that we want to set variables once
# that will be consumed by multiple subsequent jobs.
setup:
runs-on: ubuntu-latest
outputs:
release_run: ${{ steps.setvar.outputs.release_run }}
config: ${{ steps.setvar.outputs.config }}
bugsplat_db: ${{ steps.setvar.outputs.bugsplat_db }}
env:
# Build with a tag like "Second_Life#abcdef0" to generate a release page
# (used for builds we are planning to deploy).
# Even though inputs.release_run is specified with type boolean, which
# correctly presents a checkbox, its *value* is a GH workflow string
# 'true' or 'false'. If you simply test github.event.inputs.release_run,
# it always evaluates as true because it's a non-empty string either way.
# When you want to use a string variable as a workflow YAML boolean, it's
# important to ensure it's the empty string when false. If you omit || '',
# its value when false is "false", which is interpreted as true.
RELEASE_RUN: ${{ (github.event.inputs.release_run || github.ref_type == 'tag' && startsWith(github.ref_name, 'Second_Life')) && 'Y' || '' }}
FROM_FORK: ${{ github.event.pull_request.head.repo.fork }}
steps:
- name: Set Variables
id: setvar
shell: bash
run: |
echo "release_run=$RELEASE_RUN" >> "$GITHUB_OUTPUT"
# PR from fork; don't build with Bugsplat, proprietary libs
echo 'config=ReleaseOS' >> $GITHUB_OUTPUT
echo "bugsplat_db=" >> $GITHUB_OUTPUT
build:
needs: setup
continue-on-error: ${{ matrix.experimental }}
strategy:
matrix:
runner: [ubuntu-24.04]
experimental: [false]
runs-on: ${{ matrix.runner }}
outputs:
viewer_channel: ${{ steps.build.outputs.viewer_channel }}
viewer_version: ${{ steps.build.outputs.viewer_version }}
viewer_branch: ${{ steps.which-branch.outputs.branch }}
relnotes: ${{ steps.which-branch.outputs.relnotes }}
imagename: ${{ steps.build.outputs.imagename }}
config: ${{ needs.setup.outputs.config }}
env:
AUTOBUILD_ADDRSIZE: 64
AUTOBUILD_BUILD_ID: ${{ github.run_id }}
AUTOBUILD_CONFIGURATION: ${{ needs.setup.outputs.config }}
# authorizes fetching private constituent packages
AUTOBUILD_GITHUB_TOKEN: ${{ secrets.SHARED_AUTOBUILD_GITHUB_TOKEN }}
AUTOBUILD_INSTALLABLE_CACHE: ${{ github.workspace }}/.autobuild-installables
AUTOBUILD_VARIABLES_FILE: ${{ github.workspace }}/.build-variables/variables
# Direct autobuild to store vcs_url, vcs_branch and vcs_revision in
# autobuild-package.xml.
AUTOBUILD_VCS_INFO: "true"
AUTOBUILD_VSVER: "170"
DEVELOPER_DIR: "/Applications/Xcode_16.0.app/Contents/Developer"
# Ensure that Linden viewer builds engage Bugsplat.
BUGSPLAT_DB: ${{ needs.setup.outputs.bugsplat_db }}
build_coverity: false
build_log_dir: ${{ github.workspace }}/.logs
build_viewer: true
BUILDSCRIPTS_SHARED: ${{ github.workspace }}/.shared
# extracted and committed to viewer repo
BUILDSCRIPTS_SUPPORT_FUNCTIONS: ${{ github.workspace }}/buildscripts_support_functions
GIT_REF: ${{ github.head_ref || github.ref }}
LL_SKIP_REQUIRE_SYSROOT: 1
# Setting this variable directs Linden's TUT test driver code to capture
# test-program log output at the specified level, but to display it only if
# the individual test fails.
LOGFAIL: DEBUG
master_message_template_checkout: ${{ github.workspace }}/.master-message-template
# Only set variants to the one configuration: don't let build.sh loop
# over variants, let GitHub distribute variants over multiple hosts.
variants: ${{ needs.setup.outputs.config }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
- name: Setup python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Checkout build variables
uses: actions/checkout@v4
with:
repository: secondlife/build-variables
ref: master
path: .build-variables
- name: Checkout master-message-template
uses: actions/checkout@v4
with:
repository: secondlife/master-message-template
path: .master-message-template
- name: Install autobuild and python dependencies
run: pip3 install autobuild llsd
- name: Cache autobuild packages
id: cache-installables
uses: actions/cache@v4
with:
path: .autobuild-installables
key: ${{ runner.os }}-64-${{ needs.setup.outputs.config }}-${{ hashFiles('autobuild.xml') }}
restore-keys: |
${{ runner.os }}-64-${{ needs.setup.outputs.config }}-
${{ runner.os }}-64-
- name: Install Linux dependencies
if: runner.os == 'Linux'
run: |
sudo apt update
sudo apt install -y \
libpulse-dev libunwind-dev \
libgl1-mesa-dev libglu1-mesa-dev libxinerama-dev \
libxcursor-dev libxfixes-dev libgstreamer1.0-dev \
libgstreamer-plugins-base1.0-dev ninja-build libxft-dev \
llvm mold libpipewire-0.3-dev libdbus-1-dev
sudo locale-gen en_US.UTF-8
sudo locale-gen en_GB.UTF-8
sudo locale-gen fr_FR.UTF-8
- name: Install windows dependencies
if: runner.os == 'Windows'
run: choco install nsis-unicode
- name: Determine source branch
id: which-branch
uses: secondlife/viewer-build-util/which-branch@v2
with:
token: ${{ github.token }}
- name: Build
id: build
shell: bash
env:
AUTOBUILD_VCS_BRANCH: ${{ steps.which-branch.outputs.branch }}
RUNNER_OS: ${{ runner.os }}
run: |
# set up things the viewer's build.sh script expects
set -x
mkdir -p "$build_log_dir"
mkdir -p "$BUILDSCRIPTS_SHARED/packages/lib/python"
source "$BUILDSCRIPTS_SUPPORT_FUNCTIONS"
if [[ "$OSTYPE" =~ cygwin|msys ]]
then
native_path() { cygpath --windows "$1"; }
shell_path() { cygpath --unix "$1"; }
else
native_path() { echo "$1"; }
shell_path() { echo "$1"; }
fi
finalize()
{
case "$1" in
true|0)
record_success "Build Succeeded"
;;
*)
record_failure "Build Failed with $1"
;;
esac
}
initialize_build()
{
echo "initialize_build"
}
initialize_version()
{
export revision="$AUTOBUILD_BUILD_ID"
}
python_cmd()
{
if [[ "x${1:0:1}" == "x-" ]] # -m, -c, etc.
then # if $1 is a switch, don't try to twiddle paths
"$(shell_path "$PYTHON_COMMAND")" "$@"
elif [[ "$(basename "$1")" == "codeticket.py" ]]
then # ignore any attempt to contact codeticket
echo "## $@"
else # running a script at an explicit path: fix path for Python
local script="$1"
shift
"$(shell_path "$PYTHON_COMMAND")" "$(native_path "$script")" "$@"
fi
}
repo_branch()
{
echo "$AUTOBUILD_VCS_BRANCH"
}
record_dependencies_graph()
{
echo "TODO: generate and post dependency graph"
}
# Since we're not uploading to codeticket, DO NOT sleep for minutes.
sleep()
{
echo "Not sleeping for $1 seconds"
}
export -f native_path shell_path finalize initialize_build initialize_version
export -f python_cmd repo_branch record_dependencies_graph sleep
## Useful for diagnosing Windows LLProcess/LLLeap test failures
##export APR_LOG="${RUNNER_TEMP}/apr.log"
export arch=$(uname | cut -b-6)
# Surprise! GH Windows runner's MINGW6 is a $arch value we've never
# seen before, so numerous tests don't know about it.
[[ "$arch" == "MINGW6" ]] && arch=CYGWIN
export AUTOBUILD="$(which autobuild)"
# determine the viewer channel from the branch or tag name
# trigger an EDU build by including "edu" in the tag
edu=${{ github.ref_type == 'tag' && contains(github.ref_name, 'edu') }}
echo "ref_type=${{ github.ref_type }}, ref_name=${{ github.ref_name }}, edu='$edu'"
branch=$AUTOBUILD_VCS_BRANCH
if [[ "$edu" == "true" ]]
then
export viewer_channel="Second Life Release edu"
elif [[ "$branch" == "develop" ]];
then
export viewer_channel="Second Life Develop"
else
IFS='/' read -ra ba <<< "$branch"
prefix=${ba[0]}
if [ "$prefix" == "project" ]; then
IFS='_' read -ra prj <<< "${ba[1]}"
# uppercase first letter of each word
export viewer_channel="Second Life Project ${prj[*]^}"
elif [[ "$prefix" == "release" || "$prefix" == "main" ]];
then
export viewer_channel="Second Life Release"
else
export viewer_channel="Second Life Test"
fi
fi
echo "::notice::$RUNNER_OS viewer_channel=$viewer_channel"
echo "viewer_channel=$viewer_channel" >> "$GITHUB_OUTPUT"
# On windows we need to point the build to the correct python
# as neither CMake's FindPython nor our custom Python.cmake module
# will resolve the correct interpreter location.
if [[ "$RUNNER_OS" == "Windows" ]]; then
export PYTHON="$(native_path "$(which python)")"
echo "Python location: $PYTHON"
export PYTHON_COMMAND="$PYTHON"
else
export PYTHON_COMMAND="python3"
fi
export PYTHON_COMMAND_NATIVE="$(native_path "$PYTHON_COMMAND")"
./build.sh
# Each artifact is downloaded as a distinct .zip file. Multiple jobs
# (per the matrix above) writing the same filepath to the same
# artifact name will *overwrite* that file. Moreover, they can
# interfere with each other, causing the upload to fail.
# https://github.com/actions/upload-artifact#uploading-to-the-same-artifact
# Given the size of our installers, and the fact that we typically
# only want to download just one instead of a single zip containing
# several, generate a distinct artifact name for each installer.
# If the matrix above can run multiple builds on the same
# platform, we must disambiguate on more than the platform name.
# e.g. if we were still running Windows 32-bit builds, we'd need to
# qualify the artifact with bit width.
if [[ "$AUTOBUILD_CONFIGURATION" == "ReleaseOS" ]]
then cfg_suffix='OS'
else cfg_suffix=''
fi
echo "artifact=$RUNNER_OS$cfg_suffix" >> $GITHUB_OUTPUT
- name: Upload executable
if: steps.build.outputs.viewer_app
uses: actions/upload-artifact@v4
with:
name: "${{ steps.build.outputs.artifact }}-app"
path: |
${{ steps.build.outputs.viewer_app }}
# The other upload of nontrivial size is the symbol file. Use a distinct
# artifact for that too.
- name: Upload symbol file
if: steps.build.outputs.symbolfile
uses: actions/upload-artifact@v4
with:
name: "${{ steps.build.outputs.artifact }}-symbols"
path: ${{ steps.build.outputs.symbolfile }}
- name: Upload metadata
uses: actions/upload-artifact@v4
with:
name: "${{ steps.build.outputs.artifact }}-metadata"
# emitted by build.sh, possibly multiple lines
path: |
${{ steps.build.outputs.metadata }}
- name: Upload physics package
uses: actions/upload-artifact@v4
# should only be set for viewer-private
if: needs.setup.outputs.config == 'Release' && steps.build.outputs.physicstpv
with:
name: "${{ steps.build.outputs.artifact }}-physics"
# emitted by build.sh, zero or one lines
path: |
${{ steps.build.outputs.physicstpv }}
sign-and-package-windows:
env:
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
needs: build
runs-on: windows-latest
steps:
- name: Sign and package Windows viewer
if: env.AZURE_KEY_VAULT_URI && env.AZURE_CERT_NAME && env.AZURE_CLIENT_ID && env.AZURE_CLIENT_SECRET && env.AZURE_TENANT_ID
uses: secondlife/viewer-build-util/sign-pkg-windows@v2
with:
vault_uri: "${{ env.AZURE_KEY_VAULT_URI }}"
cert_name: "${{ env.AZURE_CERT_NAME }}"
client_id: "${{ env.AZURE_CLIENT_ID }}"
client_secret: "${{ env.AZURE_CLIENT_SECRET }}"
tenant_id: "${{ env.AZURE_TENANT_ID }}"
sign-and-package-mac:
env:
NOTARIZE_CREDS_MACOS: ${{ secrets.NOTARIZE_CREDS_MACOS }}
SIGNING_CERT_MACOS: ${{ secrets.SIGNING_CERT_MACOS }}
SIGNING_CERT_MACOS_IDENTITY: ${{ secrets.SIGNING_CERT_MACOS_IDENTITY }}
SIGNING_CERT_MACOS_PASSWORD: ${{ secrets.SIGNING_CERT_MACOS_PASSWORD }}
needs: build
runs-on: macos-latest
steps:
- name: Unpack Mac notarization credentials
if: env.NOTARIZE_CREDS_MACOS
id: note-creds
shell: bash
run: |
# In NOTARIZE_CREDS_MACOS we expect to find:
# USERNAME="..."
# PASSWORD="..."
# TEAM_ID="..."
eval "${{ env.NOTARIZE_CREDS_MACOS }}"
echo "::add-mask::$USERNAME"
echo "::add-mask::$PASSWORD"
echo "::add-mask::$TEAM_ID"
echo "note_user=$USERNAME" >> "$GITHUB_OUTPUT"
echo "note_pass=$PASSWORD" >> "$GITHUB_OUTPUT"
echo "note_team=$TEAM_ID" >> "$GITHUB_OUTPUT"
# If we didn't manage to retrieve all of these credentials, better
# find out sooner than later.
[[ -n "$USERNAME" && -n "$PASSWORD" && -n "$TEAM_ID" ]]
- name: Sign and package Mac viewer
if: env.SIGNING_CERT_MACOS && env.SIGNING_CERT_MACOS_IDENTITY && env.SIGNING_CERT_MACOS_PASSWORD && steps.note-creds.outputs.note_user && steps.note-creds.outputs.note_pass && steps.note-creds.outputs.note_team
uses: secondlife/viewer-build-util/sign-pkg-mac@v2
with:
channel: ${{ needs.build.outputs.viewer_channel }}
imagename: ${{ needs.build.outputs.imagename }}
cert_base64: ${{ env.SIGNING_CERT_MACOS }}
cert_name: ${{ env.SIGNING_CERT_MACOS_IDENTITY }}
cert_pass: ${{ env.SIGNING_CERT_MACOS_PASSWORD }}
note_user: ${{ steps.note-creds.outputs.note_user }}
note_pass: ${{ steps.note-creds.outputs.note_pass }}
note_team: ${{ steps.note-creds.outputs.note_team }}
post-windows-symbols:
env:
BUGSPLAT_USER: ${{ secrets.BUGSPLAT_USER }}
BUGSPLAT_PASS: ${{ secrets.BUGSPLAT_PASS }}
needs: build
if: needs.build.outputs.config == 'Release'
runs-on: ubuntu-latest
steps:
- name: Download viewer exe
uses: actions/download-artifact@v4
with:
name: Windows-app
path: _artifacts
- name: Download Windows Symbols
if: env.BUGSPLAT_USER && env.BUGSPLAT_PASS
uses: actions/download-artifact@v4
with:
name: Windows-symbols
- name: Extract viewer pdb
if: env.BUGSPLAT_USER && env.BUGSPLAT_PASS
shell: bash
run: |
tar -xJf "${{ needs.build.outputs.viewer_channel }}.sym.tar.xz" -C _artifacts
- name: Post Windows symbols
if: env.BUGSPLAT_USER && env.BUGSPLAT_PASS
uses: secondlife-3p/symbol-upload@v10
with:
username: ${{ env.BUGSPLAT_USER }}
password: ${{ env.BUGSPLAT_PASS }}
database: "SecondLife_Viewer_2018"
application: ${{ needs.build.outputs.viewer_channel }}
version: ${{ needs.build.outputs.viewer_version }}
directory: _artifacts
files: "**/{SecondLifeViewer.exe,llwebrtc.dll,*.pdb}"
post-mac-symbols:
env:
BUGSPLAT_USER: ${{ secrets.BUGSPLAT_USER }}
BUGSPLAT_PASS: ${{ secrets.BUGSPLAT_PASS }}
needs: build
if: needs.build.outputs.config == 'Release'
runs-on: ubuntu-latest
steps:
- name: Download Mac Symbols
if: env.BUGSPLAT_USER && env.BUGSPLAT_PASS
uses: actions/download-artifact@v4
with:
name: macOS-symbols
- name: Post Mac symbols
if: env.BUGSPLAT_USER && env.BUGSPLAT_PASS
uses: secondlife-3p/symbol-upload@v10
with:
username: ${{ env.BUGSPLAT_USER }}
password: ${{ env.BUGSPLAT_PASS }}
database: "SecondLife_Viewer_2018"
application: ${{ needs.build.outputs.viewer_channel }}
version: ${{ needs.build.outputs.viewer_version }} (${{ needs.build.outputs.viewer_version }})
directory: .
files: "**/*.xcarchive.zip"
release:
needs: [setup, build, sign-and-package-windows, sign-and-package-mac]
runs-on: ubuntu-latest
if: needs.setup.outputs.release_run
steps:
- uses: actions/download-artifact@v4
with:
pattern: "*-installer"
- uses: actions/download-artifact@v4
with:
pattern: "*-metadata"
- uses: actions/download-artifact@v4
with:
pattern: "Linux-app"
- name: Rename metadata
run: |
cp Windows-metadata/autobuild-package.xml Windows-autobuild-package.xml
cp Windows-metadata/newview/viewer_version.txt Windows-viewer_version.txt
cp macOS-metadata/autobuild-package.xml macOS-autobuild-package.xml
cp macOS-metadata/newview/viewer_version.txt macOS-viewer_version.txt
cp Linux-metadata/autobuild-package.xml Linux-autobuild-package.xml
cp Linux-metadata/newview/viewer_version.txt Linux-viewer_version.txt
# forked from softprops/action-gh-release
- name: Create GitHub release
id: release
uses: secondlife-3p/action-gh-release@v1
with:
# name the release page for the branch
name: "${{ needs.build.outputs.viewer_branch }}"
# SL-20546: want the channel and version to be visible on the
# release page
body: |
Build ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
${{ needs.build.outputs.viewer_channel }}
${{ needs.build.outputs.viewer_version }}
${{ needs.setvar.outputs.relnotes }}
prerelease: true
generate_release_notes: true
target_commitish: ${{ github.sha }}
previous_tag: release
append_body: true
fail_on_unmatched_files: true
files: |
macOS-installer/*.dmg
Windows-installer/*.exe
Linux-app/*.tar.gz
*-autobuild-package.xml
*-viewer_version.txt
- name: post release URL
run: |
echo "::notice::Release ${{ steps.release.outputs.url }}"