diff --git a/.github/workflows/api-client/.gitignore b/.github/workflows/api-client/.gitignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/.github/workflows/api-client/.gitignore @@ -0,0 +1 @@ +node_modules diff --git a/.github/workflows/api-client/main.js b/.github/workflows/api-client/main.js new file mode 100644 index 0000000..93c15d5 --- /dev/null +++ b/.github/workflows/api-client/main.js @@ -0,0 +1,248 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// A script to communicate with the GitHub API to perform certain actions in +// the workflow. + +const fs = require('fs'); +const https = require('https'); +const path = require('path'); + +// octokit is the official API client of GitHub. +const { Octokit } = require('@octokit/core'); + +const repo = process.env['GITHUB_REPOSITORY']; + +const octokit = new Octokit({ + auth: process.env['GITHUB_TOKEN'], +}); + +const COMMAND_MAP = {}; + +const MAX_REDIRECTS = 3; + + +// Convert a camelCase name to kebab-case. +function camelCaseToKebabCase(name) { + // Split the camelCase name into parts with a zero-length lookahead regex on + // any capital letter. Something like "methodName" should be split into + // ["method", "Name"]. + const nameParts = name.split(/(?=[A-Z])/); + + // Convert those parts into a kebab-case name. + return nameParts.map(part => part.toLowerCase()).join('-'); +} + +// Register a method that the user can invoke on the command-line. We use +// (cheap) introspection to find the argument names, so that we can +// automatically document usage of each command without worrying about the docs +// getting out of sync with the code. +function registerCommand(method) { + const methodName = method.name; + const commandName = camelCaseToKebabCase(methodName); + + // Hack out the arguments from the stringified function. This is terrible + // and will not work in the general case of all JavaScript, but it does work + // here. (Don't be like me.) + const firstLine = method.toString().split('\n')[0]; + const argString = firstLine.split('(')[1].split(')')[0]; + const camelArgs = argString.replace(/\s+/g, '').split(','); + const args = camelArgs.map(camelCaseToKebabCase); + + COMMAND_MAP[commandName] = { + commandName, + method, + args, + }; +} + +// A helper function to make calls to the GitHub Repo API. +async function repoApiCall(method, apiPath, data, upload=false) { + const url = `${method} /repos/${repo}${apiPath}`; + + // Clone the "data" passed in. + const options = Object.assign({}, data); + + // If we're uploading, that goes to a different API endpoint. + if (upload) { + options.baseUrl = 'https://uploads.github.com'; + } + + const response = await octokit.request(url, options); + return response.data; +} + + +async function draftRelease(tagName) { + // Turns "refs/tags/foo" into "foo". + tagName = tagName.replace('refs/tags/', ''); + + const response = await repoApiCall('POST', '/releases', { + tag_name: tagName, + name: tagName, + draft: true, + }); + + return response.id; +} +registerCommand(draftRelease); + +async function uploadAsset(releaseId, assetPath) { + const baseName = path.basename(assetPath); + const data = await fs.promises.readFile(assetPath); + + const apiPath = `/releases/${releaseId}/assets?name=${baseName}`; + await repoApiCall('POST', apiPath, { + headers: { + 'content-type': 'application/octet-stream', + 'content-length': data.length, + }, + data, + }, /* upload= */ true); +} +// Not registered as an independent command. + +async function uploadAllAssets(releaseId, folderPath) { + const folderContents = await fs.promises.readdir(folderPath); + for (const assetFilename of folderContents) { + const assetPath = path.join(folderPath, assetFilename); + await uploadAsset(releaseId, assetPath); + } +} +registerCommand(uploadAllAssets); + +// A helper function that will fetch via HTTPS and follow redirects. +function fetchViaHttps(url, outputStream, redirectCount=0) { + if (redirectCount > MAX_REDIRECTS) { + throw new Error('Too many redirects!'); + } + + return new Promise((resolve, reject) => { + const request = https.get(url, (response) => { + if (response.statusCode == 301 || response.statusCode == 302) { + // Handle HTTP redirects. + const newUrl = response.headers.location; + + resolve(fetchViaHttps(newUrl, outputStream, redirectCount + 1)); + } else if (response.statusCode == 200) { + response.pipe(outputStream); + outputStream.on('finish', resolve); + } else { + reject(new Error(`Bad HTTP status code: ${response.statusCode}`)); + } + }); + request.on('error', reject); + }); +} + +async function downloadAllAssets(releaseId, outputPath) { + // If the output path does not exist, create it. + try { + await fs.promises.stat(outputPath); + } catch (error) { + await fs.promises.mkdir(outputPath); + } + + const apiPath = `/releases/${releaseId}/assets`; + const assetList = await repoApiCall('GET', apiPath); + for (const asset of assetList) { + const url = asset.browser_download_url; + const assetPath = path.join(outputPath, asset.name); + const outputStream = fs.createWriteStream(assetPath); + + console.log(`Fetching ${url} to ${assetPath}`); + await fetchViaHttps(url, outputStream); + } +} +registerCommand(downloadAllAssets); + +async function publishRelease(releaseId) { + await repoApiCall('PATCH', `/releases/${releaseId}`, { draft: false }); +} +registerCommand(publishRelease); + +async function updateReleaseBody(releaseId, body) { + // NOTE: If you update the release body without specifying tag_name, it gets + // reset, resulting in a new tag being created with an auto-generated name + // like "untagged-SHA1". So we need to fetch the existing name before we + // update the body, and we need to specify it here. This is not mentioned in + // GitHub's docs, and may be a bug on their end. + const release = await getRelease(releaseId); + await repoApiCall('PATCH', `/releases/${releaseId}`, { + body, + tag_name: release.tag_name, + }); +} +registerCommand(updateReleaseBody); + +async function getRelease(releaseId) { + return await repoApiCall('GET', `/releases/${releaseId}`); +} +registerCommand(getRelease); + + +// We expect a command and arguments. +const commandName = process.argv[2]; +const args = process.argv.slice(3); +const command = COMMAND_MAP[commandName]; +let okay = true; + +if (!commandName) { + console.error('No command selected!'); + okay = false; +} else if (!command) { + console.error(`Unknown command: ${commandName}`); + okay = false; +} else if (args.length != command.args.length) { + console.error(`Wrong number of arguments for command: ${commandName}`); + okay = false; +} + +// If something is wrong with the way the script was called, print usage +// information. The list of commands and their arguments are gleaned from +// COMMAND_MAP, which was populated by registerCommand() and introspection of +// the command functions. +if (!okay) { + console.error(''); + console.error('Usage:'); + const thisScript = path.basename(process.argv[1]); + + for (possibleCommand of Object.values(COMMAND_MAP)) { + console.error( + ' ', + thisScript, + possibleCommand.commandName, + ...possibleCommand.args.map(arg => `<${arg}>`)); + } + process.exit(1); +} + +// Run the command with the given arguments. +(async () => { + let response; + + try { + response = await command.method(...args); + } catch (error) { + console.error('Command failed!'); + console.error(''); + console.error(error); + process.exit(1); + } + + // If there's a return value, print it. + if (response) { + console.log(response); + } +})(); diff --git a/.github/workflows/api-client/package-lock.json b/.github/workflows/api-client/package-lock.json new file mode 100644 index 0000000..4c919b8 --- /dev/null +++ b/.github/workflows/api-client/package-lock.json @@ -0,0 +1,122 @@ +{ + "requires": true, + "lockfileVersion": 1, + "dependencies": { + "@octokit/auth-token": { + "version": "2.4.5", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.4.5.tgz", + "integrity": "sha512-BpGYsPgJt05M7/L/5FoE1PiAbdxXFZkX/3kDYcsvd1v6UhlnE5e96dTDr0ezX/EFwciQxf3cNV0loipsURU+WA==", + "requires": { + "@octokit/types": "^6.0.3" + } + }, + "@octokit/core": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.5.1.tgz", + "integrity": "sha512-omncwpLVxMP+GLpLPgeGJBF6IWJFjXDS5flY5VbppePYX9XehevbDykRH9PdCdvqt9TS5AOTiDide7h0qrkHjw==", + "requires": { + "@octokit/auth-token": "^2.4.4", + "@octokit/graphql": "^4.5.8", + "@octokit/request": "^5.6.0", + "@octokit/request-error": "^2.0.5", + "@octokit/types": "^6.0.3", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + } + }, + "@octokit/endpoint": { + "version": "6.0.12", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz", + "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==", + "requires": { + "@octokit/types": "^6.0.3", + "is-plain-object": "^5.0.0", + "universal-user-agent": "^6.0.0" + } + }, + "@octokit/graphql": { + "version": "4.6.4", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.6.4.tgz", + "integrity": "sha512-SWTdXsVheRmlotWNjKzPOb6Js6tjSqA2a8z9+glDJng0Aqjzti8MEWOtuT8ZSu6wHnci7LZNuarE87+WJBG4vg==", + "requires": { + "@octokit/request": "^5.6.0", + "@octokit/types": "^6.0.3", + "universal-user-agent": "^6.0.0" + } + }, + "@octokit/openapi-types": { + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-9.6.0.tgz", + "integrity": "sha512-L+8x7DpcNtHkMbTxxCxg3cozvHUNP46rOIzFwoMs0piWwQzAGNXqlIQO2GLvnKTWLUh99DkY+UyHVrP4jXlowg==" + }, + "@octokit/request": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.0.tgz", + "integrity": "sha512-4cPp/N+NqmaGQwbh3vUsYqokQIzt7VjsgTYVXiwpUP2pxd5YiZB2XuTedbb0SPtv9XS7nzAKjAuQxmY8/aZkiA==", + "requires": { + "@octokit/endpoint": "^6.0.1", + "@octokit/request-error": "^2.1.0", + "@octokit/types": "^6.16.1", + "is-plain-object": "^5.0.0", + "node-fetch": "^2.6.1", + "universal-user-agent": "^6.0.0" + } + }, + "@octokit/request-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz", + "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==", + "requires": { + "@octokit/types": "^6.0.3", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + }, + "@octokit/types": { + "version": "6.25.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.25.0.tgz", + "integrity": "sha512-bNvyQKfngvAd/08COlYIN54nRgxskmejgywodizQNyiKoXmWRAjKup2/LYwm+T9V0gsKH6tuld1gM0PzmOiB4Q==", + "requires": { + "@octokit/openapi-types": "^9.5.0" + } + }, + "before-after-hook": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.2.tgz", + "integrity": "sha512-3pZEU3NT5BFUo/AD5ERPWOgQOCZITni6iavr5AUw5AUwQjMlI0kzu5btnyD39AF0gUEsDPwJT+oY1ORBJijPjQ==" + }, + "deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" + }, + "node-fetch": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", + "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "requires": { + "wrappy": "1" + } + }, + "universal-user-agent": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz", + "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==" + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + } + } +} diff --git a/.github/workflows/api-client/package.json b/.github/workflows/api-client/package.json new file mode 100644 index 0000000..94e3904 --- /dev/null +++ b/.github/workflows/api-client/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "@octokit/core": "^3.5.1" + } +} diff --git a/.github/workflows/get-version.sh b/.github/workflows/get-version.sh new file mode 100755 index 0000000..78bc6e1 --- /dev/null +++ b/.github/workflows/get-version.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Pull a tag or version number from versions.txt. + +dir=$(dirname "$0") +key="$1" + +cat "$dir"/versions.txt | grep "^$key:" | sed -e 's/.*: //' diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 0000000..772c358 --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,542 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A workflow to build and release fresh binaries. +name: Release + +# Runs when a new tag is created. Creates a release for that tag, then builds +# ffmpeg and ffprobe on all OS & CPU combinations, then attaches them to the +# release. +on: + push: + tags: + - "*" + +# NOTE: The versions of the software we build are stored in versions.txt. + +# By default, run all commands in a bash shell. On Windows, the default would +# otherwise be powershell. Each shell command should begin with "set -e" (to +# make any failed command fail the script immediately) and "set -x" (to log +# what commands are being run). +defaults: + run: + shell: bash + +jobs: + # On a single Linux host, draft a release. Later, different hosts will build + # for each OS/CPU in parallel, and then attach the resulting binaries to this + # draft. + draft_release: + name: Draft release + runs-on: ubuntu-latest + outputs: + release_id: ${{ steps.draft_release.outputs.release_id }} + steps: + - name: Draft release + id: draft_release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + set -e + set -x + + # Check out this repo and install node deps so that we can run the + # API client. + repo_tag=$(echo "$GITHUB_REF" | sed -e 's@.*/@@') + git clone --depth 1 https://github.com/"$GITHUB_REPOSITORY" repo-src -b "$repo_tag" + (cd repo-src/.github/workflows/api-client && npm install) + + # Create a draft release associated with the tag that triggered this + # workflow. + tag="${{ github.ref }}" + release_id=$(node ./repo-src/.github/workflows/api-client/main.js draft-release "$tag") + echo "::set-output name=release_id::$release_id" + + # On several different hosts, build ffmpeg's dependencies, then ffmpeg itself. + # The deps are all built as static libraries. + build: + needs: draft_release + strategy: + matrix: + # TODO: Add Mac arm64? + # TODO: Add Windows arm64? + # These are the OS images that we will run. self-hosted-linux-arm64 is + # a self-hosted runner, as mid-2021, GitHub still does not offer arm64 + # VMs. + os: ["ubuntu-latest", "macos-latest", "windows-latest", "self-hosted-linux-arm64"] + + # Associate additional properties with each of these OS options. + # Commenting out an OS above is not enough to remove one. Its section + # here must also be commented out. + include: + - os: ubuntu-latest + os_name: linux + target_arch: x64 + exe_ext: "" + - os: macos-latest + os_name: osx + target_arch: x64 + exe_ext: "" + - os: windows-latest + os_name: win + target_arch: x64 + exe_ext: ".exe" + - os: self-hosted-linux-arm64 + os_name: linux + target_arch: arm64 + exe_ext: "" + + name: Build ${{ matrix.os_name }} ${{ matrix.target_arch }} + runs-on: ${{ matrix.os }} + + steps: + - name: Install Linux packages + if: runner.os == 'Linux' + run: | + set -e + set -x + + # Install missing packages on Linux. + # TODO: Some of these are already on GitHub's VMs, but not our + # self-hosted runner. Try to make the self-hosted runner image more + # compatible with what GitHub offers by default. + sudo apt -y update + sudo apt -y upgrade + sudo apt -y install \ + cmake \ + mercurial \ + nasm \ + npm \ + pkg-config \ + yasm \ + libffmpeg-nvenc-dev \ + libvdpau-dev + + # Use sudo in install commands on Linux. + echo "SUDO=sudo" >> "$GITHUB_ENV" + + - name: Install macOS packages + if: runner.os == 'macOS' + run: | + set -e + set -x + + # Use homebrew to install missing packages on mac. + brew install \ + md5sha1sum \ + mercurial \ + nasm \ + yasm + + # Unlink pre-installed homebrew packages that conflict with our + # static library builds below. They are still installed, but will no + # longer be symlinked into default library paths, and the ffmpeg + # build will not pick up pre-installed shared libraries we don't want. + # Only our static versions will be used. + brew unlink \ + lame \ + opus \ + opusfile \ + xz + + # Use sudo in install commands on macOS. + echo "SUDO=sudo" >> "$GITHUB_ENV" + + - name: Add msys2 to the Windows path + if: runner.os == 'Windows' + run: | + # At this point, we're running Git Bash. After this step, we will be + # running msys bash, just as we would be when debugging via SSH with + # mxschmitt/action-tmate. + echo "C:\\msys64\\usr\\bin" >> "$GITHUB_PATH" + echo "C:\\msys64\\mingw64\\bin" >> "$GITHUB_PATH" + + - name: Install Windows packages + if: runner.os == 'Windows' + run: | + set -e + set -x + + # Install msys packages we will need. + pacman -Sy --noconfirm \ + git \ + mercurial \ + nasm \ + yasm + + # Make sure that cmake generates makefiles and not ninja files. + echo "CMAKE_GENERATOR=MSYS Makefiles" >> "$GITHUB_ENV" + + # Make sure that pkg-config searches the path where we will install + # things. + echo "PKG_CONFIG_PATH=/usr/local/lib/pkgconfig" >> "$GITHUB_ENV" + + - name: Check out repo source + run: | + set -e + set -x + + # Check out this repo and install node deps so that we can run the + # API client. + repo_tag=$(echo "$GITHUB_REF" | sed -e 's@.*/@@') + git clone --depth 1 https://github.com/"$GITHUB_REPOSITORY" repo-src -b "$repo_tag" + (cd repo-src/.github/workflows/api-client && npm install) + + - name: Install libvpx + run: | + set -e + set -x + + tag=$(repo-src/.github/workflows/get-version.sh libvpx) + git clone --depth 1 https://chromium.googlesource.com/webm/libvpx -b "$tag" + cd libvpx + + # NOTE: disabling unit tests and examples significantly reduces build + # time (by 80% as tested on a Jetson Nano) + ./configure \ + --enable-vp8 \ + --enable-vp9 \ + --enable-runtime-cpu-detect \ + --disable-unit-tests \ + --disable-examples \ + --enable-static \ + --disable-shared + + make + $SUDO make install + + - name: Install aom + run: | + set -e + set -x + + tag=$(repo-src/.github/workflows/get-version.sh aom) + git clone --depth 1 https://aomedia.googlesource.com/aom/ -b "$tag" + + # AOM insists on being built out-of-tree. + mkdir aom_build + cd aom_build + + # NOTE: without CMAKE_INSTALL_PREFIX on Windows, files are installed + # to c:\Program Files. + cmake ../aom \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DENABLE_DOCS=OFF \ + -DENABLE_EXAMPLES=OFF \ + -DENABLE_TESTS=OFF \ + -DENABLE_TESTDATA=OFF \ + -DENABLE_TOOLS=OFF \ + -DCONFIG_RUNTIME_CPU_DETECT=1 \ + -DCONFIG_SHARED=0 + + make + $SUDO make install + + # This adjustment to the aom linker flags is needed, at least on + # arm, to successfully link against it statically. (-lm missing) + $SUDO sed -e 's/-laom/-laom -lm/' -i.bk /usr/local/lib/pkgconfig/aom.pc + + - name: Install x264 + run: | + set -e + set -x + + tag=$(repo-src/.github/workflows/get-version.sh x264) + git clone --depth 1 https://code.videolan.org/videolan/x264.git -b "$tag" + cd x264 + + ./configure \ + --enable-static + + # Only build and install the static library. + make libx264.a + $SUDO make install-lib-static + + - name: Install x265 + run: | + set -e + set -x + + tag=$(repo-src/.github/workflows/get-version.sh x265) + hg clone http://hg.videolan.org/x265 -r "$tag" + cd x265/build + + # NOTE: without CMAKE_INSTALL_PREFIX on Windows, files are installed + # to c:\Program Files. + cmake ../source \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DENABLE_SHARED=OFF \ + -DENABLE_CLI=OFF + + make + $SUDO make install + + # This adjustment to the x265 linker flags is needed, at least on + # arm, to successfully link against it statically. (-lgcc_s not + # found (or needed), and -lpthread missing) + $SUDO sed -e 's/-lgcc_s -lgcc -lgcc_s -lgcc/-lpthread -lgcc/' -i.bk /usr/local/lib/pkgconfig/x265.pc + + - name: Install lame + run: | + set -e + set -x + + version=$(repo-src/.github/workflows/get-version.sh lame) + curl -L -o lame-"$version".tar.gz https://sourceforge.net/projects/lame/files/lame/"$version"/lame-"$version".tar.gz/download + tar xzf lame-"$version".tar.gz + cd lame-"$version" + + # Only build and install the library (--disable-front-end). The + # frontend doesn't build on Windows, and we don't need it anyway. + # On Windows, somehow prefix defaults to / instead of /usr/local, but + # only on some projects. No idea why that is the default on Windows, + # but --prefix=/usr/local fixes it. + ./configure \ + --prefix=/usr/local \ + --disable-frontend \ + --enable-static \ + --disable-shared + + make + $SUDO make install + + - name: Install opus + run: | + set -e + set -x + + version=$(repo-src/.github/workflows/get-version.sh opus) + curl -LO https://archive.mozilla.org/pub/opus/opus-"$version".tar.gz + tar xzf opus-"$version".tar.gz + cd opus-"$version" + + # On Windows, we can't link later if we build with -D_FORTIFY_SOURCE + # now. But there is no configure option for this, so we edit the + # configure script instead. + sed -e 's/-D_FORTIFY_SOURCE=2//' -i.bk configure + + # On Windows, somehow prefix defaults to / instead of /usr/local, but + # only on some projects. No idea why that is the default on Windows, + # but --prefix=/usr/local fixes it. + # On Windows, we also need to disable-stack-protector. + ./configure \ + --prefix=/usr/local \ + --disable-extra-programs \ + --disable-stack-protector \ + --enable-static \ + --disable-shared + + make + $SUDO make install + + # The pkgconfig linker flags for static opus don't work when ffmpeg + # checks for opus in configure. Linking libm after libopus fixes it. + $SUDO sed -e 's/-lopus/-lopus -lm/' -i.bk /usr/local/lib/pkgconfig/opus.pc + + - name: Build ffmpeg and ffprobe + run: | + set -e + set -x + + tag=$(repo-src/.github/workflows/get-version.sh ffmpeg) + git clone --depth 1 https://git.ffmpeg.org/ffmpeg.git -b "$tag" + cd ffmpeg + + # Set some OS-specific environment variables and flags. + if [[ "${{ runner.os }}" == "Linux" ]]; then + export CFLAGS="-static" + export LDFLAGS="-static" + + # Enable platform-specific hardware acceleration. + PLATFORM_CONFIGURE_FLAGS="--enable-nvenc --enable-vdpau" + elif [[ "${{ runner.os }}" == "macOS" ]]; then + export CFLAGS="-static" + # You can't do a _truly_ static build on macOS except the kernel. + # So don't set LDFLAGS. See https://stackoverflow.com/a/3801032 + + # Enable platform-specific hardware acceleration. + PLATFORM_CONFIGURE_FLAGS="--enable-videotoolbox" + + # Disable x86 ASM on macOS. It fails to build with an error about + # how macho64 format can't contain 32-bit assembly. I'm not sure + # how else to resolve this, and from my searches, it appears that + # others are not having this problem with ffmpeg. + # TODO: Try building from master branch to see if this has been + # resolved more recently than n4.4. + PLATFORM_CONFIGURE_FLAGS="$PLATFORM_CONFIGURE_FLAGS --disable-x86asm --disable-inline-asm" + elif [[ "${{ runner.os }}" == "Windows" ]]; then + export CFLAGS="-static" + # Surprisingly, /usr/local/lib is not in the mingw linker path by + # default. + export LDFLAGS="-static -L/usr/local/lib" + + # Convince ffmpeg that we want to build for mingw64 (native + # Windows), not msys (which involves some posix emulation). Since + # we're in an msys environment, ffmpeg reasonably assumes we're + # building for that environment if we don't specify this. + PLATFORM_CONFIGURE_FLAGS="--target-os=mingw64" + fi + + ./configure \ + --pkg-config-flags="--static" \ + --disable-ffplay \ + --enable-libvpx \ + --enable-libaom \ + --enable-libx264 \ + --enable-libx265 \ + --enable-libmp3lame \ + --enable-libopus \ + --enable-runtime-cpudetect \ + --enable-gpl \ + --enable-version3 \ + --enable-static \ + $PLATFORM_CONFIGURE_FLAGS + + make + # No "make install" for ffmpeg. + + - name: Check that executables are static + run: | + set -e + set -x + + cd ffmpeg + + if [[ "${{ runner.os }}" == "Linux" ]]; then + # If ldd succeeds, then these are dynamic executables, so we fail + # this step if ldd succeeds. + ldd ffmpeg && exit 1 + ldd ffprobe && exit 1 + elif [[ "${{ runner.os }}" == "Windows" ]]; then + # These will still be dynamic executables, but they should not link + # against anything outside of /c/Windows. The grep command will + # succeed if it can find anything outside /c/Windows, and then we + # fail if that succeeds. + ldd ffmpeg.exe | grep -qvi /c/Windows/ && exit 1 + ldd ffprobe.exe | grep -qvi /c/Windows/ && exit 1 + elif [[ "${{ runner.os }}" == "macOS" ]]; then + # These will still be dynamic executables, but they should not link + # against anything outside of /usr/lib or /System/Library. The + # grep command will succeed if it can find anything outside + # these two folders, and then we fail if that succeeds. + otool -L ffmpeg | grep '\t' | grep -Evq '(/System/Library|/usr/lib)' && exit 1 + otool -L ffprobe | grep '\t' | grep -Evq '(/System/Library|/usr/lib)' && exit 1 + fi + + # After commands that we expect to fail (greps and ldd commands + # above), we still need a successful command at the end of the script + # to make this step of the workflow a success. + true + + - name: Prepare assets + run: | + set -e + set -x + + mkdir assets + SUFFIX="-${{ matrix.os_name }}-${{ matrix.target_arch }}${{ matrix.exe_ext}}" + cp ffmpeg/ffmpeg assets/ffmpeg"$SUFFIX" + cp ffmpeg/ffprobe assets/ffprobe"$SUFFIX" + + # Show sizes and MD5 sums that can be verified by users later if they + # want to check for authenticity. + cd assets + wc -c * + md5sum * + + - name: Attach assets to release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + set -e + set -x + + # Attach the build outputs to the draft release. Each machine will + # do this separately and in parallel. Later, another job will take + # over to collect them all and use their MD5 sums to create the + # release notes (the "body" of the release). + release_id="${{ needs.draft_release.outputs.release_id }}" + node ./repo-src/.github/workflows/api-client/main.js \ + upload-all-assets "$release_id" assets/ + + # NOTE: Uncomment this step to debug failures via SSH. + #- name: Debug + # uses: mxschmitt/action-tmate@v3.6 + # with: + # limit-access-to-actor: true + # if: failure() + + publish_release: + name: Publish release + needs: [draft_release, build] + runs-on: ubuntu-latest + steps: + - name: Publish release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + set -e + set -x + + # Check out this repo and install node deps so that we can run the + # API client. + repo_tag=$(echo "$GITHUB_REF" | sed -e 's@.*/@@') + git clone --depth 1 https://github.com/"$GITHUB_REPOSITORY" repo-src -b "$repo_tag" + (cd repo-src/.github/workflows/api-client && npm install) + + # Compile the release notes (the "body" of the release) with the date + # and the versions of the software we built. + + # The format provided by "date -I" is "YYYY-MM-DD". + echo "Date:" >> body.txt + echo " - $(date -I)" >> body.txt + echo "" >> body.txt + + echo "$GITHUB_REPOSITORY version:" >> body.txt + echo " - $repo_tag" >> body.txt + echo "" >> body.txt + + echo "Software versions:" >> body.txt + cat repo-src/.github/workflows/versions.txt | \ + sed -e 's/^/ - /' >> body.txt + echo "" >> body.txt + + # Update the release notes with this preliminary version. This is + # what gets emailed out when we publish the release below. + release_id="${{ needs.draft_release.outputs.release_id }}" + node ./repo-src/.github/workflows/api-client/main.js \ + update-release-body "$release_id" "$(cat body.txt)" + + # Now we have to take the release out of draft mode. Until we do, we + # can't get download URLs for the assets. + node ./repo-src/.github/workflows/api-client/main.js \ + publish-release "$release_id" + + # The downloads are sometimes a bit flaky (responding with 404) if we + # don't put some delay between publication and download. This number + # is arbitrary, but experimentally, it seems to solve the issue. + sleep 30 + + # Next, download the assets. + node ./repo-src/.github/workflows/api-client/main.js \ + download-all-assets "$release_id" assets/ + + # Now add the MD5 sums to the release notes. + echo "MD5 sums:" >> body.txt + (cd assets; md5sum * | sed -e 's/^/ - /') >> body.txt + + # Now update the release notes one last time, with the MD5 sums + # appended. + node ./repo-src/.github/workflows/api-client/main.js \ + update-release-body "$release_id" "$(cat body.txt)" diff --git a/.github/workflows/versions.txt b/.github/workflows/versions.txt new file mode 100644 index 0000000..facc780 --- /dev/null +++ b/.github/workflows/versions.txt @@ -0,0 +1,8 @@ +ffmpeg: n4.4 +libvpx: v1.9.0 +aom: v3.1.2 +x264: stable +x265: stable +lame: 3.100 +opus: 1.3.1 +vorbis: 1.3.7 diff --git a/README.md b/README.md index 8b47ace..7c40159 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,59 @@ # static-ffmpeg-binaries -Static binaries of FFmpeg, for multiple OS & CPU combinations, built from source in a GitHub Actions workflow. + +Static binaries of FFmpeg, for multiple OS & CPU combinations, built from +source in a GitHub Actions workflow. + +To download binaries, visit the [releases page][releases]. + + +# License + +The GitHub Actions workflows and other scripts in this repo are covered by the +Apache license. +Please see the [workflow source][workflow], [API client source][api-client], +[version script source][version-script], and see [the Apache license][apache] +for license details. + +The resulting FFmpeg binaries are built using GPL libraries, and are therefore +published under the GPL license. +Please see the [releases page][releases] for binaries, and see [FFmpeg's GPL +license][gpl] for license details. + + +# How are they built? + +FFmpeg and its key dependencies are all built from source and linked statically. +Each run of the GitHub Actions workflow logs the MD5 sums of the binaries, and +it places the MD5 sums into the release notes. You can see how they were built, +and you can verify that they haven't been tampered with. The sums in the +workflow logs, release notes, and the binaries should all match. +You can read the details in the [workflow source][workflow]. + +No third-party GitHub Actions have been used in this workflow, to protect +against supply-chain attacks. + + +# Triggering a build + +Update the version numbers as needed in the [version file][version-file], then +create a tag on the new commit. Full builds will be triggered, and binaries +will be attached to a release on the new tag. + + +# Tag names + +Tag names should follow the form of `$FFMPEG_VERSION-$WORKFLOW_RELEASE_NUMBER`. +For example, the first time we release a build based on FFmpeg n4.4, the tag +should be "n4.4-1". If we need to update the dependencies, or change the +configuration, or make any other changes to the workflow that don't change the +FFmpeg version, the next release would be "n4.4-2". When FFmpeg n4.5 is +released upstream, we could update to that and then tag "n4.5-1". + + +[releases]: https://github.com/joeyparrish/static-ffmpeg-binaries/releases +[workflow]: https://github.com/joeyparrish/static-ffmpeg-binaries/blob/main/.github/workflows/release.yaml +[api-client]: https://github.com/joeyparrish/static-ffmpeg-binaries/blob/main/.github/workflows/api-client/main.js +[version-script]: https://github.com/joeyparrish/static-ffmpeg-binaries/blob/main/.github/workflows/get-version.sh +[version-file]: https://github.com/joeyparrish/static-ffmpeg-binaries/blob/main/.github/workflows/versions.txt +[apache]: https://github.com/joeyparrish/static-ffmpeg-binaries/blob/main/LICENSE +[gpl]: https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.GPLv3