mirror of
https://github.com/game-ci/unity-builder.git
synced 2026-01-29 12:19:06 +08:00
Compare commits
11 Commits
feature/pr
...
v4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d4ee0697f | ||
|
|
3a2abf9037 | ||
|
|
cfdebb67c1 | ||
|
|
ab64768ceb | ||
|
|
00fa0d3772 | ||
|
|
d587557287 | ||
|
|
6e0bf17345 | ||
|
|
2822af505e | ||
|
|
8ec161b981 | ||
|
|
88a89c94a0 | ||
|
|
f7f3f70c57 |
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"files.autoSave": "on",
|
||||
"files.autoSaveWhen": "on",
|
||||
"files.autoSaveDelay": 1000,
|
||||
|
||||
"editor.formatOnSave": false,
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnType": false,
|
||||
|
||||
"editor.codeActionsOnSave": {},
|
||||
|
||||
"git.autorefresh": false,
|
||||
"git.confirmSync": false,
|
||||
"git.autofetch": false,
|
||||
|
||||
"editor.defaultFormatter": null
|
||||
}
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true,
|
||||
"jest/globals": true
|
||||
"jest/globals": true,
|
||||
"es2020": true
|
||||
},
|
||||
"rules": {
|
||||
// Error out for code formatting errors
|
||||
@@ -77,13 +78,5 @@
|
||||
"unicorn/prefer-spread": "off",
|
||||
// Temp disable to prevent mixing changes with other PRs
|
||||
"i18n-text/no-en": "off"
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["jest.setup.js"],
|
||||
"rules": {
|
||||
"import/no-commonjs": "off"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
10
.github/workflows/build-tests-mac.yml
vendored
10
.github/workflows/build-tests-mac.yml
vendored
@@ -24,6 +24,13 @@ jobs:
|
||||
targetPlatform:
|
||||
- StandaloneOSX # Build a MacOS executable
|
||||
- iOS # Build an iOS executable
|
||||
include:
|
||||
# Additionally test enableGpu build for a standalone windows target
|
||||
- unityVersion: 6000.0.36f1
|
||||
targetPlatform: StandaloneOSX
|
||||
- unityVersion: 6000.0.36f1
|
||||
targetPlatform: StandaloneOSX
|
||||
buildProfile: 'Assets/Settings/Build Profiles/Sample macOS Build Profile.asset'
|
||||
|
||||
steps:
|
||||
###########################
|
||||
@@ -65,6 +72,7 @@ jobs:
|
||||
projectPath: ${{ matrix.projectPath }}
|
||||
unityVersion: ${{ matrix.unityVersion }}
|
||||
targetPlatform: ${{ matrix.targetPlatform }}
|
||||
buildProfile: ${{ matrix.buildProfile }}
|
||||
customParameters: -profile SomeProfile -someBoolean -someValue exampleValue
|
||||
# We use dirty build because we are replacing the default project settings file above
|
||||
allowDirtyBuild: true
|
||||
@@ -74,6 +82,6 @@ jobs:
|
||||
###########################
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Build ${{ matrix.targetPlatform }} on MacOS (${{ matrix.unityVersion }})
|
||||
name: Build ${{ matrix.targetPlatform }} on MacOS (${{ matrix.unityVersion }})${{ matrix.buildProfile && ' With Build Profile' || '' }}
|
||||
path: build
|
||||
retention-days: 14
|
||||
|
||||
10
.github/workflows/build-tests-windows.yml
vendored
10
.github/workflows/build-tests-windows.yml
vendored
@@ -34,7 +34,12 @@ jobs:
|
||||
unityVersion: 2023.2.2f1
|
||||
targetPlatform: StandaloneWindows64
|
||||
enableGpu: true
|
||||
|
||||
- unityVersion: 6000.0.36f1
|
||||
targetPlatform: StandaloneWindows64
|
||||
- unityVersion: 6000.0.36f1
|
||||
targetPlatform: StandaloneWindows64
|
||||
buildProfile: 'Assets/Settings/Build Profiles/Sample Windows Build Profile.asset'
|
||||
|
||||
steps:
|
||||
###########################
|
||||
# Checkout #
|
||||
@@ -79,6 +84,7 @@ jobs:
|
||||
projectPath: ${{ matrix.projectPath }}
|
||||
unityVersion: ${{ matrix.unityVersion }}
|
||||
targetPlatform: ${{ matrix.targetPlatform }}
|
||||
buildProfile: ${{ matrix.buildProfile }}
|
||||
enableGpu: ${{ matrix.enableGpu }}
|
||||
customParameters: -profile SomeProfile -someBoolean -someValue exampleValue
|
||||
allowDirtyBuild: true
|
||||
@@ -140,6 +146,6 @@ jobs:
|
||||
###########################
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Build ${{ matrix.targetPlatform }} on Windows (${{ matrix.unityVersion }})${{ matrix.enableGpu && ' With GPU' || '' }}
|
||||
name: Build ${{ matrix.targetPlatform }} on Windows (${{ matrix.unityVersion }})${{ matrix.enableGpu && ' With GPU' || '' }}${{ matrix.buildProfile && ' With Build Profile' || '' }}
|
||||
path: build
|
||||
retention-days: 14
|
||||
|
||||
231
.github/workflows/cloud-runner-ci-pipeline.yml
vendored
Normal file
231
.github/workflows/cloud-runner-ci-pipeline.yml
vendored
Normal file
@@ -0,0 +1,231 @@
|
||||
name: Cloud Runner CI Pipeline
|
||||
|
||||
on:
|
||||
push: { branches: [cloud-runner-develop, cloud-runner-preview, main] }
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
runGithubIntegrationTests:
|
||||
description: 'Run GitHub Checks integration tests'
|
||||
required: false
|
||||
default: 'false'
|
||||
|
||||
permissions:
|
||||
checks: write
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
env:
|
||||
GKE_ZONE: 'us-central1'
|
||||
GKE_REGION: 'us-central1'
|
||||
GKE_PROJECT: 'unitykubernetesbuilder'
|
||||
GKE_CLUSTER: 'game-ci-github-pipelines'
|
||||
GCP_LOGGING: true
|
||||
GCP_PROJECT: unitykubernetesbuilder
|
||||
GCP_LOG_FILE: ${{ github.workspace }}/cloud-runner-logs.txt
|
||||
AWS_REGION: eu-west-2
|
||||
AWS_DEFAULT_REGION: eu-west-2
|
||||
AWS_STACK_NAME: game-ci-team-pipelines
|
||||
CLOUD_RUNNER_BRANCH: ${{ github.ref }}
|
||||
DEBUG: true
|
||||
UNITY_EMAIL: ${{ secrets.UNITY_EMAIL }}
|
||||
UNITY_PASSWORD: ${{ secrets.UNITY_PASSWORD }}
|
||||
UNITY_SERIAL: ${{ secrets.UNITY_SERIAL }}
|
||||
PROJECT_PATH: test-project
|
||||
UNITY_VERSION: 2019.3.15f1
|
||||
USE_IL2CPP: false
|
||||
USE_GKE_GCLOUD_AUTH_PLUGIN: true
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Tests
|
||||
if: github.event.event_type != 'pull_request_target'
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
test:
|
||||
- 'cloud-runner-end2end-locking'
|
||||
- 'cloud-runner-end2end-caching'
|
||||
- 'cloud-runner-end2end-retaining'
|
||||
- 'cloud-runner-caching'
|
||||
- 'cloud-runner-environment'
|
||||
- 'cloud-runner-image'
|
||||
- 'cloud-runner-hooks'
|
||||
- 'cloud-runner-local-persistence'
|
||||
- 'cloud-runner-locking-core'
|
||||
- 'cloud-runner-locking-get-locked'
|
||||
steps:
|
||||
- name: Checkout (default)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: false
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-2
|
||||
- run: yarn
|
||||
- run: yarn run test "${{ matrix.test }}" --detectOpenHandles --forceExit --runInBand
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
UNITY_EMAIL: ${{ secrets.UNITY_EMAIL }}
|
||||
UNITY_PASSWORD: ${{ secrets.UNITY_PASSWORD }}
|
||||
UNITY_SERIAL: ${{ secrets.UNITY_SERIAL }}
|
||||
PROJECT_PATH: test-project
|
||||
TARGET_PLATFORM: StandaloneWindows64
|
||||
cloudRunnerTests: true
|
||||
versioning: None
|
||||
KUBE_STORAGE_CLASS: local-path
|
||||
PROVIDER_STRATEGY: local-docker
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
GIT_PRIVATE_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
k8sTests:
|
||||
name: K8s Tests
|
||||
if: github.event.event_type != 'pull_request_target'
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
test:
|
||||
# - 'cloud-runner-async-workflow'
|
||||
- 'cloud-runner-end2end-locking'
|
||||
- 'cloud-runner-end2end-caching'
|
||||
- 'cloud-runner-end2end-retaining'
|
||||
- 'cloud-runner-kubernetes'
|
||||
- 'cloud-runner-environment'
|
||||
- 'cloud-runner-github-checks'
|
||||
steps:
|
||||
- name: Checkout (default)
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
lfs: false
|
||||
- run: yarn
|
||||
- name: actions-k3s
|
||||
uses: debianmaster/actions-k3s@v1.0.5
|
||||
with:
|
||||
version: 'latest'
|
||||
- run: yarn run test "${{ matrix.test }}" --detectOpenHandles --forceExit --runInBand
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
UNITY_EMAIL: ${{ secrets.UNITY_EMAIL }}
|
||||
UNITY_PASSWORD: ${{ secrets.UNITY_PASSWORD }}
|
||||
UNITY_SERIAL: ${{ secrets.UNITY_SERIAL }}
|
||||
PROJECT_PATH: test-project
|
||||
TARGET_PLATFORM: StandaloneWindows64
|
||||
cloudRunnerTests: true
|
||||
versioning: None
|
||||
KUBE_STORAGE_CLASS: local-path
|
||||
PROVIDER_STRATEGY: k8s
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
GIT_PRIVATE_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
awsTests:
|
||||
name: AWS Tests
|
||||
if: github.event.event_type != 'pull_request_target'
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
test:
|
||||
- 'cloud-runner-end2end-locking'
|
||||
- 'cloud-runner-end2end-caching'
|
||||
- 'cloud-runner-end2end-retaining'
|
||||
- 'cloud-runner-environment'
|
||||
- 'cloud-runner-s3-steps'
|
||||
steps:
|
||||
- name: Checkout (default)
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
lfs: false
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-2
|
||||
- run: yarn
|
||||
- run: yarn run test "${{ matrix.test }}" --detectOpenHandles --forceExit --runInBand
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
UNITY_EMAIL: ${{ secrets.UNITY_EMAIL }}
|
||||
UNITY_PASSWORD: ${{ secrets.UNITY_PASSWORD }}
|
||||
UNITY_SERIAL: ${{ secrets.UNITY_SERIAL }}
|
||||
PROJECT_PATH: test-project
|
||||
TARGET_PLATFORM: StandaloneWindows64
|
||||
cloudRunnerTests: true
|
||||
versioning: None
|
||||
KUBE_STORAGE_CLASS: local-path
|
||||
PROVIDER_STRATEGY: aws
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
GIT_PRIVATE_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
buildTargetTests:
|
||||
name: Local Build Target Tests
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
providerStrategy:
|
||||
#- aws
|
||||
- local-docker
|
||||
#- k8s
|
||||
targetPlatform:
|
||||
- StandaloneOSX # Build a macOS standalone (Intel 64-bit).
|
||||
- StandaloneWindows64 # Build a Windows 64-bit standalone.
|
||||
- StandaloneLinux64 # Build a Linux 64-bit standalone.
|
||||
- WebGL # WebGL.
|
||||
- iOS # Build an iOS player.
|
||||
# - Android # Build an Android .apk.
|
||||
steps:
|
||||
- name: Checkout (default)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: false
|
||||
- run: yarn
|
||||
- uses: ./
|
||||
id: unity-build
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
UNITY_EMAIL: ${{ secrets.UNITY_EMAIL }}
|
||||
UNITY_PASSWORD: ${{ secrets.UNITY_PASSWORD }}
|
||||
UNITY_SERIAL: ${{ secrets.UNITY_SERIAL }}
|
||||
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
GIT_PRIVATE_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
cloudRunnerTests: true
|
||||
versioning: None
|
||||
targetPlatform: ${{ matrix.targetPlatform }}
|
||||
providerStrategy: ${{ matrix.providerStrategy }}
|
||||
- run: |
|
||||
cp ./cloud-runner-cache/cache/${{ steps.unity-build.outputs.CACHE_KEY }}/build/${{ steps.unity-build.outputs.BUILD_ARTIFACT }} ${{ steps.unity-build.outputs.BUILD_ARTIFACT }}
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.providerStrategy }} Build (${{ matrix.targetPlatform }})
|
||||
path: ${{ steps.unity-build.outputs.BUILD_ARTIFACT }}
|
||||
retention-days: 14
|
||||
|
||||
githubChecksIntegration:
|
||||
name: GitHub Checks Integration
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.runGithubIntegrationTests == 'true'
|
||||
env:
|
||||
RUN_GITHUB_INTEGRATION_TESTS: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'yarn'
|
||||
- run: yarn install --frozen-lockfile
|
||||
- run: yarn test cloud-runner-github-checks-integration-test --detectOpenHandles --forceExit --runInBand
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -1,83 +0,0 @@
|
||||
name: cloud-runner-integrity-localstack
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runGithubIntegrationTests:
|
||||
description: 'Run GitHub Checks integration tests'
|
||||
required: false
|
||||
default: 'false'
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
checks: write
|
||||
contents: read
|
||||
actions: write
|
||||
packages: read
|
||||
pull-requests: write
|
||||
statuses: write
|
||||
id-token: write
|
||||
|
||||
env:
|
||||
AWS_REGION: us-east-1
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
AWS_STACK_NAME: game-ci-local
|
||||
AWS_ENDPOINT: http://localhost:4566
|
||||
AWS_ENDPOINT_URL: http://localhost:4566
|
||||
AWS_ACCESS_KEY_ID: test
|
||||
AWS_SECRET_ACCESS_KEY: test
|
||||
CLOUD_RUNNER_BRANCH: ${{ github.ref }}
|
||||
DEBUG: true
|
||||
PROJECT_PATH: test-project
|
||||
USE_IL2CPP: false
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Cloud Runner Tests (LocalStack)
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
localstack:
|
||||
image: localstack/localstack
|
||||
ports:
|
||||
- 4566:4566
|
||||
env:
|
||||
SERVICES: cloudformation,ecs,kinesis,cloudwatch,s3,logs
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
test:
|
||||
- 'cloud-runner-end2end-locking'
|
||||
- 'cloud-runner-end2end-caching'
|
||||
- 'cloud-runner-end2end-retaining'
|
||||
- 'cloud-runner-caching'
|
||||
- 'cloud-runner-environment'
|
||||
- 'cloud-runner-image'
|
||||
- 'cloud-runner-hooks'
|
||||
- 'cloud-runner-local-persistence'
|
||||
- 'cloud-runner-locking-core'
|
||||
- 'cloud-runner-locking-get-locked'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: false
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'yarn'
|
||||
- run: yarn install --frozen-lockfile
|
||||
- run: yarn run test "${{ matrix.test }}" --detectOpenHandles --forceExit --runInBand
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
UNITY_EMAIL: ${{ secrets.UNITY_EMAIL }}
|
||||
UNITY_PASSWORD: ${{ secrets.UNITY_PASSWORD }}
|
||||
UNITY_SERIAL: ${{ secrets.UNITY_SERIAL }}
|
||||
PROJECT_PATH: test-project
|
||||
TARGET_PLATFORM: StandaloneWindows64
|
||||
cloudRunnerTests: true
|
||||
versioning: None
|
||||
KUBE_STORAGE_CLASS: local-path
|
||||
PROVIDER_STRATEGY: aws
|
||||
AWS_ACCESS_KEY_ID: test
|
||||
AWS_SECRET_ACCESS_KEY: test
|
||||
GIT_PRIVATE_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
196
.github/workflows/cloud-runner-integrity.yml
vendored
196
.github/workflows/cloud-runner-integrity.yml
vendored
@@ -1,196 +0,0 @@
|
||||
name: cloud-runner-integrity
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runGithubIntegrationTests:
|
||||
description: 'Run GitHub Checks integration tests'
|
||||
required: false
|
||||
default: 'false'
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
checks: write
|
||||
contents: read
|
||||
actions: write
|
||||
packages: read
|
||||
pull-requests: write
|
||||
statuses: write
|
||||
id-token: write
|
||||
|
||||
env:
|
||||
AWS_REGION: eu-west-2
|
||||
AWS_DEFAULT_REGION: eu-west-2
|
||||
AWS_STACK_NAME: game-ci-team-pipelines
|
||||
CLOUD_RUNNER_BRANCH: ${{ github.ref }}
|
||||
DEBUG: true
|
||||
PROJECT_PATH: test-project
|
||||
USE_IL2CPP: false
|
||||
|
||||
jobs:
|
||||
k8s:
|
||||
name: Cloud Runner Tests (K8s)
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# K8s runs (k3s)
|
||||
- test: 'cloud-runner-end2end-caching'
|
||||
provider: k8s
|
||||
- test: 'cloud-runner-end2end-retaining'
|
||||
provider: k8s
|
||||
- test: 'cloud-runner-hooks'
|
||||
provider: k8s
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: false
|
||||
# Set up Kubernetes (k3s via k3d) only for k8s matrix entries
|
||||
- name: Set up kubectl
|
||||
if: ${{ matrix.provider == 'k8s' }}
|
||||
uses: azure/setup-kubectl@v4
|
||||
with:
|
||||
version: 'v1.29.0'
|
||||
- name: Install k3d
|
||||
if: ${{ matrix.provider == 'k8s' }}
|
||||
run: |
|
||||
curl -s https://raw.githubusercontent.com/k3d-io/k3d/main/install.sh | bash
|
||||
k3d version | cat
|
||||
- name: Create k3s cluster (k3d)
|
||||
if: ${{ matrix.provider == 'k8s' }}
|
||||
run: |
|
||||
k3d cluster create unity-builder --agents 1 --wait
|
||||
kubectl config current-context | cat
|
||||
- name: Verify cluster readiness
|
||||
if: ${{ matrix.provider == 'k8s' }}
|
||||
run: |
|
||||
for i in {1..60}; do kubectl get nodes && break || sleep 5; done
|
||||
kubectl get storageclass
|
||||
- name: Start LocalStack (S3)
|
||||
uses: localstack/setup-localstack@v0.2.3
|
||||
with:
|
||||
install-awslocal: true
|
||||
- name: Create S3 bucket for tests (host LocalStack)
|
||||
run: |
|
||||
awslocal s3 mb s3://$AWS_STACK_NAME || true
|
||||
awslocal s3 ls
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'yarn'
|
||||
- run: yarn install --frozen-lockfile
|
||||
- run: yarn run test "${{ matrix.test }}" --detectOpenHandles --forceExit --runInBand
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
UNITY_EMAIL: ${{ secrets.UNITY_EMAIL }}
|
||||
UNITY_PASSWORD: ${{ secrets.UNITY_PASSWORD }}
|
||||
UNITY_SERIAL: ${{ secrets.UNITY_SERIAL }}
|
||||
PROJECT_PATH: test-project
|
||||
TARGET_PLATFORM: StandaloneWindows64
|
||||
cloudRunnerTests: true
|
||||
versioning: None
|
||||
KUBE_STORAGE_CLASS: ${{ matrix.provider == 'k8s' && 'local-path' || '' }}
|
||||
PROVIDER_STRATEGY: ${{ matrix.provider }}
|
||||
AWS_ACCESS_KEY_ID: test
|
||||
AWS_SECRET_ACCESS_KEY: test
|
||||
AWS_S3_ENDPOINT: http://localhost:4566
|
||||
AWS_ENDPOINT: http://localhost:4566
|
||||
INPUT_AWSS3ENDPOINT: http://localhost:4566
|
||||
INPUT_AWSENDPOINT: http://localhost:4566
|
||||
AWS_S3_FORCE_PATH_STYLE: 'true'
|
||||
AWS_EC2_METADATA_DISABLED: 'true'
|
||||
GIT_PRIVATE_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
localstack:
|
||||
name: Cloud Runner Tests (LocalStack)
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
localstack:
|
||||
image: localstack/localstack
|
||||
ports:
|
||||
- 4566:4566
|
||||
env:
|
||||
SERVICES: cloudformation,ecs,kinesis,cloudwatch,s3,logs
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
test:
|
||||
- 'cloud-runner-end2end-locking'
|
||||
- 'cloud-runner-end2end-caching'
|
||||
- 'cloud-runner-end2end-retaining'
|
||||
- 'cloud-runner-caching'
|
||||
- 'cloud-runner-environment'
|
||||
- 'cloud-runner-image'
|
||||
- 'cloud-runner-hooks'
|
||||
- 'cloud-runner-local-persistence'
|
||||
- 'cloud-runner-locking-core'
|
||||
- 'cloud-runner-locking-get-locked'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: false
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'yarn'
|
||||
- run: yarn install --frozen-lockfile
|
||||
- run: yarn run test "${{ matrix.test }}" --detectOpenHandles --forceExit --runInBand
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
UNITY_EMAIL: ${{ secrets.UNITY_EMAIL }}
|
||||
UNITY_PASSWORD: ${{ secrets.UNITY_PASSWORD }}
|
||||
UNITY_SERIAL: ${{ secrets.UNITY_SERIAL }}
|
||||
PROJECT_PATH: test-project
|
||||
TARGET_PLATFORM: StandaloneWindows64
|
||||
cloudRunnerTests: true
|
||||
versioning: None
|
||||
KUBE_STORAGE_CLASS: local-path
|
||||
PROVIDER_STRATEGY: aws
|
||||
AWS_ACCESS_KEY_ID: test
|
||||
AWS_SECRET_ACCESS_KEY: test
|
||||
AWS_ENDPOINT: http://localhost:4566
|
||||
AWS_ENDPOINT_URL: http://localhost:4566
|
||||
GIT_PRIVATE_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
aws:
|
||||
name: Cloud Runner Tests (AWS)
|
||||
runs-on: ubuntu-latest
|
||||
needs: [k8s, localstack]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
test:
|
||||
- 'cloud-runner-end2end-caching'
|
||||
- 'cloud-runner-end2end-retaining'
|
||||
- 'cloud-runner-hooks'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: false
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'yarn'
|
||||
- run: yarn install --frozen-lockfile
|
||||
- run: yarn run test "${{ matrix.test }}" --detectOpenHandles --forceExit --runInBand
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
UNITY_EMAIL: ${{ secrets.UNITY_EMAIL }}
|
||||
UNITY_PASSWORD: ${{ secrets.UNITY_PASSWORD }}
|
||||
UNITY_SERIAL: ${{ secrets.UNITY_SERIAL }}
|
||||
PROJECT_PATH: test-project
|
||||
TARGET_PLATFORM: StandaloneWindows64
|
||||
cloudRunnerTests: true
|
||||
versioning: None
|
||||
PROVIDER_STRATEGY: aws
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
GIT_PRIVATE_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GIT_PRIVATE_TOKEN }}
|
||||
10
.github/workflows/integrity-check.yml
vendored
10
.github/workflows/integrity-check.yml
vendored
@@ -22,13 +22,7 @@ jobs:
|
||||
node-version: '18'
|
||||
- run: yarn
|
||||
- run: yarn lint
|
||||
- run: yarn test:ci --coverage
|
||||
- run: yarn test --coverage
|
||||
- run: bash <(curl -s https://codecov.io/bash)
|
||||
- run: yarn build || { echo "build command should always succeed" ; exit 61; }
|
||||
# - run: yarn build --quiet && git diff --quiet dist || { echo "dist should be auto generated" ; git diff dist ; exit 62; }
|
||||
|
||||
cloud-runner:
|
||||
name: Cloud Runner Integrity
|
||||
uses: ./.github/workflows/cloud-runner-integrity.yml
|
||||
secrets: inherit
|
||||
|
||||
# - run: yarn build --quiet && git diff --quiet dist || { echo "dist should be auto generated" ; git diff dist ; exit 62; }
|
||||
|
||||
@@ -56,14 +56,19 @@ namespace UnityBuilderAction
|
||||
// of either `UnityEditor.BuildPlayerOptions` or `UnityEditor.BuildPlayerWithProfileOptions`
|
||||
dynamic buildPlayerOptions;
|
||||
|
||||
if (options["customBuildProfile"] != "") {
|
||||
if (options.TryGetValue("activeBuildProfile", out var buildProfilePath)) {
|
||||
if (string.IsNullOrEmpty(buildProfilePath)) {
|
||||
throw new Exception("`-activeBuildProfile` is set but with an empty value; this shouldn't happen");
|
||||
}
|
||||
|
||||
#if UNITY_6000_0_OR_NEWER
|
||||
// Load build profile from Assets folder
|
||||
BuildProfile buildProfile = AssetDatabase.LoadAssetAtPath<BuildProfile>(options["customBuildProfile"]);
|
||||
var buildProfile = AssetDatabase.LoadAssetAtPath<BuildProfile>(buildProfilePath)
|
||||
?? throw new Exception("Build profile file not found at path: " + buildProfilePath);
|
||||
|
||||
// Set it as active
|
||||
BuildProfile.SetActiveBuildProfile(buildProfile);
|
||||
// no need to set active profile, as already set by `-activeBuildProfile` CLI argument
|
||||
// BuildProfile.SetActiveBuildProfile(buildProfile);
|
||||
Debug.Log($"build profile: {buildProfile.name}");
|
||||
|
||||
// Define BuildPlayerWithProfileOptions
|
||||
buildPlayerOptions = new BuildPlayerWithProfileOptions {
|
||||
@@ -71,12 +76,16 @@ namespace UnityBuilderAction
|
||||
locationPathName = options["customBuildPath"],
|
||||
options = buildOptions,
|
||||
};
|
||||
#else
|
||||
#else // UNITY_6000_0_OR_NEWER
|
||||
throw new Exception("Build profiles are not supported by this version of Unity (" + Application.unityVersion +")");
|
||||
#endif
|
||||
#endif // UNITY_6000_0_OR_NEWER
|
||||
|
||||
} else {
|
||||
|
||||
#if BUILD_PROFILE_LOADED
|
||||
throw new Exception("Build profile's define symbol present; shouldn't happen");
|
||||
#endif // BUILD_PROFILE_LOADED
|
||||
|
||||
// Gather values from project
|
||||
var scenes = EditorBuildSettings.scenes.Where(scene => scene.enabled).Select(s => s.path).ToArray();
|
||||
|
||||
|
||||
@@ -115,6 +115,7 @@ namespace UnityBuilderAction.Input
|
||||
}
|
||||
}
|
||||
|
||||
#if UNITY_6000_0_OR_NEWER
|
||||
private static void SetDebugSymbols(string enumValueName)
|
||||
{
|
||||
// UnityEditor.Android.UserBuildSettings and Unity.Android.Types.DebugSymbolLevel are part of the Unity Android module.
|
||||
@@ -144,5 +145,6 @@ namespace UnityBuilderAction.Input
|
||||
}
|
||||
levelProp.SetValue(null, enumValue);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,19 @@ namespace UnityBuilderAction.Input
|
||||
EditorApplication.Exit(110);
|
||||
}
|
||||
|
||||
#if UNITY_6000_0_OR_NEWER
|
||||
var buildProfileSupport = true;
|
||||
#else
|
||||
var buildProfileSupport = false;
|
||||
#endif // UNITY_6000_0_OR_NEWER
|
||||
|
||||
string buildProfile;
|
||||
if (buildProfileSupport && validatedOptions.TryGetValue("activeBuildProfile", out buildProfile)) {
|
||||
if (validatedOptions.ContainsKey("buildTarget")) {
|
||||
Console.WriteLine("Extra argument -buildTarget");
|
||||
EditorApplication.Exit(122);
|
||||
}
|
||||
} else {
|
||||
string buildTarget;
|
||||
if (!validatedOptions.TryGetValue("buildTarget", out buildTarget)) {
|
||||
Console.WriteLine("Missing argument -buildTarget");
|
||||
@@ -31,6 +44,7 @@ namespace UnityBuilderAction.Input
|
||||
Console.WriteLine(buildTarget + " is not a defined " + typeof(BuildTarget).Name);
|
||||
EditorApplication.Exit(121);
|
||||
}
|
||||
}
|
||||
|
||||
string customBuildPath;
|
||||
if (!validatedOptions.TryGetValue("customBuildPath", out customBuildPath)) {
|
||||
|
||||
63858
dist/index.js
generated
vendored
63858
dist/index.js
generated
vendored
File diff suppressed because one or more lines are too long
2
dist/index.js.map
generated
vendored
2
dist/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
374
dist/licenses.txt
generated
vendored
374
dist/licenses.txt
generated
vendored
@@ -6865,76 +6865,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
@deno/shim-deno
|
||||
MIT
|
||||
MIT License
|
||||
|
||||
Copyright 2021-2022 the Deno authors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
@deno/shim-deno-test
|
||||
MIT
|
||||
MIT License
|
||||
|
||||
Copyright 2021-2022 the Deno authors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
@fastify/busboy
|
||||
MIT
|
||||
Copyright Brian White. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
|
||||
@kubernetes/client-node
|
||||
Apache-2.0
|
||||
Apache License
|
||||
@@ -13820,6 +13750,210 @@ Apache License
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
@smithy/util-body-length-browser
|
||||
Apache-2.0
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
@smithy/util-body-length-node
|
||||
Apache-2.0
|
||||
Apache License
|
||||
@@ -17122,6 +17256,31 @@ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
|
||||
|
||||
cross-fetch
|
||||
MIT
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Leonardo Quixadá
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
cross-spawn
|
||||
MIT
|
||||
The MIT License (MIT)
|
||||
@@ -17614,6 +17773,56 @@ The above copyright notice and this permission notice shall be included in all c
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
graphql
|
||||
MIT
|
||||
MIT License
|
||||
|
||||
Copyright (c) GraphQL Contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
graphql-request
|
||||
MIT
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022 Jason Kuhrt
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
har-schema
|
||||
ISC
|
||||
Copyright (c) 2015, Ahmad Nassri <ahmad@ahmadnassri.com>
|
||||
@@ -19741,31 +19950,6 @@ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
undici
|
||||
MIT
|
||||
MIT License
|
||||
|
||||
Copyright (c) Matteo Collina and Undici contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
unity-changeset
|
||||
MIT
|
||||
MIT License
|
||||
|
||||
74
dist/platforms/mac/steps/activate.sh
vendored
74
dist/platforms/mac/steps/activate.sh
vendored
@@ -4,21 +4,69 @@
|
||||
echo "Changing to \"$ACTIVATE_LICENSE_PATH\" directory."
|
||||
pushd "$ACTIVATE_LICENSE_PATH"
|
||||
|
||||
echo "Requesting activation"
|
||||
if [[ -n "$UNITY_SERIAL" && -n "$UNITY_EMAIL" && -n "$UNITY_PASSWORD" ]]; then
|
||||
#
|
||||
# SERIAL LICENSE MODE
|
||||
#
|
||||
# This will activate unity, using the serial activation process.
|
||||
#
|
||||
|
||||
# Activate license
|
||||
/Applications/Unity/Hub/Editor/$UNITY_VERSION/Unity.app/Contents/MacOS/Unity \
|
||||
-logFile - \
|
||||
-batchmode \
|
||||
-nographics \
|
||||
-quit \
|
||||
-serial "$UNITY_SERIAL" \
|
||||
-username "$UNITY_EMAIL" \
|
||||
-password "$UNITY_PASSWORD" \
|
||||
-projectPath "$ACTIVATE_LICENSE_PATH"
|
||||
echo "Requesting activation"
|
||||
|
||||
# Store the exit code from the verify command
|
||||
UNITY_EXIT_CODE=$?
|
||||
# Activate license
|
||||
/Applications/Unity/Hub/Editor/$UNITY_VERSION/Unity.app/Contents/MacOS/Unity \
|
||||
-logFile - \
|
||||
-batchmode \
|
||||
-nographics \
|
||||
-quit \
|
||||
-serial "$UNITY_SERIAL" \
|
||||
-username "$UNITY_EMAIL" \
|
||||
-password "$UNITY_PASSWORD" \
|
||||
-projectPath "$ACTIVATE_LICENSE_PATH"
|
||||
|
||||
# Store the exit code from the verify command
|
||||
UNITY_EXIT_CODE=$?
|
||||
|
||||
elif [[ -n "$UNITY_LICENSING_SERVER" ]]; then
|
||||
#
|
||||
# Custom Unity License Server
|
||||
#
|
||||
echo "Adding licensing server config"
|
||||
mkdir -p "$UNITY_LICENSE_PATH/config/"
|
||||
cp "$ACTION_FOLDER/unity-config/services-config.json" "$UNITY_LICENSE_PATH/config/services-config.json"
|
||||
|
||||
/Applications/Unity/Hub/Editor/$UNITY_VERSION/Unity.app/Contents/Frameworks/UnityLicensingClient.app/Contents/MacOS/Unity.Licensing.Client \
|
||||
--acquire-floating > license.txt
|
||||
|
||||
# Store the exit code from the verify command
|
||||
UNITY_EXIT_CODE=$?
|
||||
|
||||
if [ $UNITY_EXIT_CODE -eq 0 ]; then
|
||||
PARSEDFILE=$(grep -oE '\"[^"]*\"' < license.txt | tr -d '"')
|
||||
export FLOATING_LICENSE
|
||||
FLOATING_LICENSE=$(sed -n 2p <<< "$PARSEDFILE")
|
||||
FLOATING_LICENSE_TIMEOUT=$(sed -n 4p <<< "$PARSEDFILE")
|
||||
|
||||
echo "Acquired floating license: \"$FLOATING_LICENSE\" with timeout $FLOATING_LICENSE_TIMEOUT"
|
||||
fi
|
||||
else
|
||||
#
|
||||
# NO LICENSE ACTIVATION STRATEGY MATCHED
|
||||
#
|
||||
# This will exit since no activation strategies could be matched.
|
||||
#
|
||||
echo "License activation strategy could not be determined."
|
||||
echo ""
|
||||
echo "Visit https://game.ci/docs/github/activation for more"
|
||||
echo "details on how to set up one of the possible activation strategies."
|
||||
|
||||
echo "::error ::No valid license activation strategy could be determined. Make sure to provide UNITY_EMAIL, UNITY_PASSWORD, and either a UNITY_SERIAL \
|
||||
or UNITY_LICENSE. Otherwise please use UNITY_LICENSING_SERVER. See more info at https://game.ci/docs/github/activation"
|
||||
|
||||
# Immediately exit as no UNITY_EXIT_CODE can be derived.
|
||||
exit 1;
|
||||
|
||||
fi
|
||||
|
||||
#
|
||||
# Display information about the result
|
||||
|
||||
5
dist/platforms/mac/steps/build.sh
vendored
5
dist/platforms/mac/steps/build.sh
vendored
@@ -149,14 +149,13 @@ echo ""
|
||||
$( [ "${MANUAL_EXIT}" == "true" ] || echo "-quit" ) \
|
||||
-batchmode \
|
||||
$( [ "${ENABLE_GPU}" == "true" ] || echo "-nographics" ) \
|
||||
-username "$UNITY_EMAIL" \
|
||||
-password "$UNITY_PASSWORD" \
|
||||
-customBuildName "$BUILD_NAME" \
|
||||
-projectPath "$UNITY_PROJECT_PATH" \
|
||||
-buildTarget "$BUILD_TARGET" \
|
||||
$( [ -z "$BUILD_PROFILE" ] && echo "-buildTarget $BUILD_TARGET") \
|
||||
-customBuildTarget "$BUILD_TARGET" \
|
||||
-customBuildPath "$CUSTOM_BUILD_PATH" \
|
||||
-customBuildProfile "$BUILD_PROFILE" \
|
||||
${BUILD_PROFILE:+-activeBuildProfile} ${BUILD_PROFILE:+"$BUILD_PROFILE"} \
|
||||
-executeMethod "$BUILD_METHOD" \
|
||||
-buildVersion "$VERSION" \
|
||||
-androidVersionCode "$ANDROID_VERSION_CODE" \
|
||||
|
||||
32
dist/platforms/mac/steps/return_license.sh
vendored
32
dist/platforms/mac/steps/return_license.sh
vendored
@@ -4,15 +4,29 @@
|
||||
echo "Changing to \"$ACTIVATE_LICENSE_PATH\" directory."
|
||||
pushd "$ACTIVATE_LICENSE_PATH"
|
||||
|
||||
/Applications/Unity/Hub/Editor/$UNITY_VERSION/Unity.app/Contents/MacOS/Unity \
|
||||
-logFile - \
|
||||
-batchmode \
|
||||
-nographics \
|
||||
-quit \
|
||||
-username "$UNITY_EMAIL" \
|
||||
-password "$UNITY_PASSWORD" \
|
||||
-returnlicense \
|
||||
-projectPath "$ACTIVATE_LICENSE_PATH"
|
||||
if [[ -n "$UNITY_LICENSING_SERVER" ]]; then
|
||||
#
|
||||
# Return any floating license used.
|
||||
#
|
||||
echo "Returning floating license: \"$FLOATING_LICENSE\""
|
||||
/Applications/Unity/Hub/Editor/$UNITY_VERSION/Unity.app/Contents/Frameworks/UnityLicensingClient.app/Contents/MacOS/Unity.Licensing.Client \
|
||||
--return-floating "$FLOATING_LICENSE"
|
||||
elif [[ -n "$UNITY_SERIAL" ]]; then
|
||||
#
|
||||
# SERIAL LICENSE MODE
|
||||
#
|
||||
# This will return the license that is currently in use.
|
||||
#
|
||||
/Applications/Unity/Hub/Editor/$UNITY_VERSION/Unity.app/Contents/MacOS/Unity \
|
||||
-logFile - \
|
||||
-batchmode \
|
||||
-nographics \
|
||||
-quit \
|
||||
-username "$UNITY_EMAIL" \
|
||||
-password "$UNITY_PASSWORD" \
|
||||
-returnlicense \
|
||||
-projectPath "$ACTIVATE_LICENSE_PATH"
|
||||
fi
|
||||
|
||||
# Return to previous working directory
|
||||
popd
|
||||
|
||||
14
dist/platforms/ubuntu/steps/activate.sh
vendored
14
dist/platforms/ubuntu/steps/activate.sh
vendored
@@ -68,14 +68,18 @@ elif [[ -n "$UNITY_LICENSING_SERVER" ]]; then
|
||||
echo "Adding licensing server config"
|
||||
|
||||
/opt/unity/Editor/Data/Resources/Licensing/Client/Unity.Licensing.Client --acquire-floating > license.txt #is this accessible in a env variable?
|
||||
PARSEDFILE=$(grep -oP '\".*?\"' < license.txt | tr -d '"')
|
||||
export FLOATING_LICENSE
|
||||
FLOATING_LICENSE=$(sed -n 2p <<< "$PARSEDFILE")
|
||||
FLOATING_LICENSE_TIMEOUT=$(sed -n 4p <<< "$PARSEDFILE")
|
||||
|
||||
echo "Acquired floating license: \"$FLOATING_LICENSE\" with timeout $FLOATING_LICENSE_TIMEOUT"
|
||||
# Store the exit code from the verify command
|
||||
UNITY_EXIT_CODE=$?
|
||||
|
||||
if [ $UNITY_EXIT_CODE -eq 0 ]; then
|
||||
PARSEDFILE=$(grep -oP '\".*?\"' < license.txt | tr -d '"')
|
||||
export FLOATING_LICENSE
|
||||
FLOATING_LICENSE=$(sed -n 2p <<< "$PARSEDFILE")
|
||||
FLOATING_LICENSE_TIMEOUT=$(sed -n 4p <<< "$PARSEDFILE")
|
||||
|
||||
echo "Acquired floating license: \"$FLOATING_LICENSE\" with timeout $FLOATING_LICENSE_TIMEOUT"
|
||||
fi
|
||||
else
|
||||
#
|
||||
# NO LICENSE ACTIVATION STRATEGY MATCHED
|
||||
|
||||
3
dist/platforms/ubuntu/steps/build.sh
vendored
3
dist/platforms/ubuntu/steps/build.sh
vendored
@@ -125,10 +125,11 @@ unity-editor \
|
||||
$( [ "${MANUAL_EXIT}" == "true" ] || echo "-quit" ) \
|
||||
-customBuildName "$BUILD_NAME" \
|
||||
-projectPath "$UNITY_PROJECT_PATH" \
|
||||
-buildTarget "$BUILD_TARGET" \
|
||||
$( [ -z "$BUILD_PROFILE" ] && echo "-buildTarget $BUILD_TARGET" ) \
|
||||
-customBuildTarget "$BUILD_TARGET" \
|
||||
-customBuildPath "$CUSTOM_BUILD_PATH" \
|
||||
-customBuildProfile "$BUILD_PROFILE" \
|
||||
${BUILD_PROFILE:+-activeBuildProfile} ${BUILD_PROFILE:+"$BUILD_PROFILE"} \
|
||||
-executeMethod "$BUILD_METHOD" \
|
||||
-buildVersion "$VERSION" \
|
||||
-androidVersionCode "$ANDROID_VERSION_CODE" \
|
||||
|
||||
8
dist/platforms/windows/build.ps1
vendored
8
dist/platforms/windows/build.ps1
vendored
@@ -166,7 +166,6 @@ $unityArgs = @(
|
||||
"-customBuildName", "`"$Env:BUILD_NAME`"",
|
||||
"-projectPath", "`"$Env:UNITY_PROJECT_PATH`"",
|
||||
"-executeMethod", "`"$Env:BUILD_METHOD`"",
|
||||
"-buildTarget", "`"$Env:BUILD_TARGET`"",
|
||||
"-customBuildTarget", "`"$Env:BUILD_TARGET`"",
|
||||
"-customBuildPath", "`"$Env:CUSTOM_BUILD_PATH`"",
|
||||
"-customBuildProfile", "`"$Env:BUILD_PROFILE`"",
|
||||
@@ -181,6 +180,13 @@ $unityArgs = @(
|
||||
"-logfile", "-"
|
||||
) + $customParametersArray
|
||||
|
||||
if (-not $Env:BUILD_PROFILE) {
|
||||
$unityArgs += @("-buildTarget", "`"$Env:BUILD_TARGET`"")
|
||||
}
|
||||
if ($Env:BUILD_PROFILE) {
|
||||
$unityArgs += @("-activeBuildProfile", "`"$Env:BUILD_PROFILE`"")
|
||||
}
|
||||
|
||||
# Remove null items as that will fail the Start-Process call
|
||||
$unityArgs = $unityArgs | Where-Object { $_ -ne $null }
|
||||
|
||||
|
||||
3
dist/platforms/windows/entrypoint.ps1
vendored
3
dist/platforms/windows/entrypoint.ps1
vendored
@@ -18,6 +18,9 @@ regsvr32 C:\ProgramData\Microsoft\VisualStudio\Setup\x64\Microsoft.VisualStudio.
|
||||
# Kill the regsvr process
|
||||
Get-Process -Name regsvr32 | ForEach-Object { Stop-Process -Id $_.Id -Force }
|
||||
|
||||
# Install Visual C++ 2013 Redistributables
|
||||
. "c:\steps\install_vcredist13.ps1"
|
||||
|
||||
# Setup Git Credentials
|
||||
. "c:\steps\set_gitcredential.ps1"
|
||||
|
||||
|
||||
11
dist/platforms/windows/install_vcredist13.ps1
vendored
Normal file
11
dist/platforms/windows/install_vcredist13.ps1
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
# For some reason, Unity is failing in github actions windows runners
|
||||
# due to missing Visual C++ 2013 redistributables.
|
||||
# This script downloads and installs the required redistributables.
|
||||
Write-Output ""
|
||||
Write-Output "#########################################################"
|
||||
Write-Output "# Installing Visual C++ Redistributables (2013) #"
|
||||
Write-Output "#########################################################"
|
||||
Write-Output ""
|
||||
|
||||
|
||||
choco install vcredist2013 -y --no-progress
|
||||
@@ -1,11 +0,0 @@
|
||||
const base = require('./jest.config.js');
|
||||
|
||||
module.exports = {
|
||||
...base,
|
||||
forceExit: true,
|
||||
detectOpenHandles: true,
|
||||
testTimeout: 120000,
|
||||
maxWorkers: 1,
|
||||
};
|
||||
|
||||
|
||||
@@ -25,6 +25,8 @@ module.exports = {
|
||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||
modulePathIgnorePatterns: ['<rootDir>/lib/', '<rootDir>/dist/'],
|
||||
|
||||
// Use jest.setup.js to polyfill fetch for all tests
|
||||
setupFiles: ['<rootDir>/jest.setup.js'],
|
||||
// Files that will be run before Jest is loaded to set globals like fetch
|
||||
setupFiles: ['<rootDir>/src/jest.globals.ts'],
|
||||
// A list of paths to modules that run some code to configure or set up the testing framework after the environment is ready
|
||||
setupFilesAfterEnv: ['<rootDir>/src/jest.setup.ts'],
|
||||
};
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
const fetch = require('node-fetch');
|
||||
global.fetch = fetch;
|
||||
@@ -19,7 +19,6 @@
|
||||
"cli-k8s": "cross-env providerStrategy=k8s yarn run test-cli",
|
||||
"test-cli": "cross-env cloudRunnerTests=true yarn ts-node src/index.ts -m cli --projectPath test-project",
|
||||
"test": "jest",
|
||||
"test:ci": "jest --config=jest.ci.config.js --runInBand",
|
||||
"test-i": "cross-env cloudRunnerTests=true yarn test -i -t \"cloud runner\"",
|
||||
"test-i-*": "yarn run test-i-aws && yarn run test-i-k8s",
|
||||
"test-i-aws": "cross-env cloudRunnerTests=true providerStrategy=aws yarn test -i -t \"cloud runner\"",
|
||||
@@ -51,7 +50,7 @@
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"semver": "^7.5.2",
|
||||
"ts-md5": "^1.3.1",
|
||||
"unity-changeset": "^2.0.0",
|
||||
"unity-changeset": "^3.1.0",
|
||||
"uuid": "^9.0.0",
|
||||
"yaml": "^2.2.2"
|
||||
},
|
||||
@@ -75,7 +74,6 @@
|
||||
"jest-fail-on-console": "^3.0.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"lefthook": "^1.6.1",
|
||||
"node-fetch": "2",
|
||||
"prettier": "^2.5.1",
|
||||
"ts-jest": "^27.1.3",
|
||||
"ts-node": "10.8.1",
|
||||
|
||||
@@ -56,14 +56,6 @@ class BuildParameters {
|
||||
public providerStrategy!: string;
|
||||
public gitPrivateToken!: string;
|
||||
public awsStackName!: string;
|
||||
public awsEndpoint?: string;
|
||||
public awsCloudFormationEndpoint?: string;
|
||||
public awsEcsEndpoint?: string;
|
||||
public awsKinesisEndpoint?: string;
|
||||
public awsCloudWatchLogsEndpoint?: string;
|
||||
public awsS3Endpoint?: string;
|
||||
public storageProvider!: string;
|
||||
public rcloneRemote!: string;
|
||||
public kubeConfig!: string;
|
||||
public containerMemory!: string;
|
||||
public containerCpu!: string;
|
||||
@@ -207,14 +199,6 @@ class BuildParameters {
|
||||
githubRepo: (Input.githubRepo ?? (await GitRepoReader.GetRemote())) || 'game-ci/unity-builder',
|
||||
isCliMode: Cli.isCliMode,
|
||||
awsStackName: CloudRunnerOptions.awsStackName,
|
||||
awsEndpoint: CloudRunnerOptions.awsEndpoint,
|
||||
awsCloudFormationEndpoint: CloudRunnerOptions.awsCloudFormationEndpoint,
|
||||
awsEcsEndpoint: CloudRunnerOptions.awsEcsEndpoint,
|
||||
awsKinesisEndpoint: CloudRunnerOptions.awsKinesisEndpoint,
|
||||
awsCloudWatchLogsEndpoint: CloudRunnerOptions.awsCloudWatchLogsEndpoint,
|
||||
awsS3Endpoint: CloudRunnerOptions.awsS3Endpoint,
|
||||
storageProvider: CloudRunnerOptions.storageProvider,
|
||||
rcloneRemote: CloudRunnerOptions.rcloneRemote,
|
||||
gitSha: Input.gitSha,
|
||||
logId: customAlphabet(CloudRunnerConstants.alphabet, 9)(),
|
||||
buildGuid: CloudRunnerBuildGuid.generateGuid(Input.runNumber, Input.targetPlatform),
|
||||
|
||||
@@ -13,12 +13,10 @@ import CloudRunnerEnvironmentVariable from './options/cloud-runner-environment-v
|
||||
import TestCloudRunner from './providers/test';
|
||||
import LocalCloudRunner from './providers/local';
|
||||
import LocalDockerCloudRunner from './providers/docker';
|
||||
import loadProvider from './providers/provider-loader';
|
||||
import GitHub from '../github';
|
||||
import SharedWorkspaceLocking from './services/core/shared-workspace-locking';
|
||||
import { FollowLogStreamService } from './services/core/follow-log-stream-service';
|
||||
import CloudRunnerResult from './services/core/cloud-runner-result';
|
||||
import CloudRunnerOptions from './options/cloud-runner-options';
|
||||
|
||||
class CloudRunner {
|
||||
public static Provider: ProviderInterface;
|
||||
@@ -40,7 +38,7 @@ class CloudRunner {
|
||||
if (CloudRunner.buildParameters.githubCheckId === ``) {
|
||||
CloudRunner.buildParameters.githubCheckId = await GitHub.createGitHubCheck(CloudRunner.buildParameters.buildGuid);
|
||||
}
|
||||
await CloudRunner.setupSelectedBuildPlatform();
|
||||
CloudRunner.setupSelectedBuildPlatform();
|
||||
CloudRunner.defaultSecrets = TaskParameterSerializer.readDefaultSecrets();
|
||||
CloudRunner.cloudRunnerEnvironmentVariables =
|
||||
TaskParameterSerializer.createCloudRunnerEnvironmentVariables(buildParameters);
|
||||
@@ -64,34 +62,9 @@ class CloudRunner {
|
||||
FollowLogStreamService.Reset();
|
||||
}
|
||||
|
||||
private static async setupSelectedBuildPlatform() {
|
||||
private static setupSelectedBuildPlatform() {
|
||||
CloudRunnerLogger.log(`Cloud Runner platform selected ${CloudRunner.buildParameters.providerStrategy}`);
|
||||
|
||||
// Detect LocalStack endpoints and reroute AWS provider to local-docker for CI tests that only need S3
|
||||
const endpointsToCheck = [
|
||||
process.env.AWS_ENDPOINT,
|
||||
process.env.AWS_S3_ENDPOINT,
|
||||
process.env.AWS_CLOUD_FORMATION_ENDPOINT,
|
||||
process.env.AWS_ECS_ENDPOINT,
|
||||
process.env.AWS_KINESIS_ENDPOINT,
|
||||
process.env.AWS_CLOUD_WATCH_LOGS_ENDPOINT,
|
||||
CloudRunnerOptions.awsEndpoint,
|
||||
CloudRunnerOptions.awsS3Endpoint,
|
||||
CloudRunnerOptions.awsCloudFormationEndpoint,
|
||||
CloudRunnerOptions.awsEcsEndpoint,
|
||||
CloudRunnerOptions.awsKinesisEndpoint,
|
||||
CloudRunnerOptions.awsCloudWatchLogsEndpoint,
|
||||
]
|
||||
.filter((x) => typeof x === 'string')
|
||||
.join(' ');
|
||||
const isLocalStack = /localstack|localhost|127\.0\.0\.1/i.test(endpointsToCheck);
|
||||
let provider = CloudRunner.buildParameters.providerStrategy;
|
||||
if (provider === 'aws' && isLocalStack) {
|
||||
CloudRunnerLogger.log('LocalStack endpoints detected; routing provider to local-docker for this run');
|
||||
provider = 'local-docker';
|
||||
}
|
||||
|
||||
switch (provider) {
|
||||
switch (CloudRunner.buildParameters.providerStrategy) {
|
||||
case 'k8s':
|
||||
CloudRunner.Provider = new Kubernetes(CloudRunner.buildParameters);
|
||||
break;
|
||||
@@ -107,19 +80,6 @@ class CloudRunner {
|
||||
case 'local-system':
|
||||
CloudRunner.Provider = new LocalCloudRunner();
|
||||
break;
|
||||
case 'local':
|
||||
CloudRunner.Provider = new LocalCloudRunner();
|
||||
break;
|
||||
default:
|
||||
// Try to load provider using the dynamic loader for unknown providers
|
||||
try {
|
||||
CloudRunner.Provider = await loadProvider(provider, CloudRunner.buildParameters);
|
||||
} catch (error: any) {
|
||||
CloudRunnerLogger.log(`Failed to load provider '${provider}' using dynamic loader: ${error.message}`);
|
||||
CloudRunnerLogger.log('Falling back to local provider...');
|
||||
CloudRunner.Provider = new LocalCloudRunner();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -195,42 +195,6 @@ class CloudRunnerOptions {
|
||||
return CloudRunnerOptions.getInput('awsStackName') || 'game-ci';
|
||||
}
|
||||
|
||||
static get awsEndpoint(): string | undefined {
|
||||
return CloudRunnerOptions.getInput('awsEndpoint');
|
||||
}
|
||||
|
||||
static get awsCloudFormationEndpoint(): string | undefined {
|
||||
return CloudRunnerOptions.getInput('awsCloudFormationEndpoint') || CloudRunnerOptions.awsEndpoint;
|
||||
}
|
||||
|
||||
static get awsEcsEndpoint(): string | undefined {
|
||||
return CloudRunnerOptions.getInput('awsEcsEndpoint') || CloudRunnerOptions.awsEndpoint;
|
||||
}
|
||||
|
||||
static get awsKinesisEndpoint(): string | undefined {
|
||||
return CloudRunnerOptions.getInput('awsKinesisEndpoint') || CloudRunnerOptions.awsEndpoint;
|
||||
}
|
||||
|
||||
static get awsCloudWatchLogsEndpoint(): string | undefined {
|
||||
return CloudRunnerOptions.getInput('awsCloudWatchLogsEndpoint') || CloudRunnerOptions.awsEndpoint;
|
||||
}
|
||||
|
||||
static get awsS3Endpoint(): string | undefined {
|
||||
return CloudRunnerOptions.getInput('awsS3Endpoint') || CloudRunnerOptions.awsEndpoint;
|
||||
}
|
||||
|
||||
// ### ### ###
|
||||
// Storage
|
||||
// ### ### ###
|
||||
|
||||
static get storageProvider(): string {
|
||||
return CloudRunnerOptions.getInput('storageProvider') || 's3';
|
||||
}
|
||||
|
||||
static get rcloneRemote(): string {
|
||||
return CloudRunnerOptions.getInput('rcloneRemote') || '';
|
||||
}
|
||||
|
||||
// ### ### ###
|
||||
// K8s
|
||||
// ### ### ###
|
||||
|
||||
@@ -1,250 +0,0 @@
|
||||
# Provider Loader Dynamic Imports
|
||||
|
||||
The provider loader now supports dynamic loading of providers from multiple sources including local file paths, GitHub
|
||||
repositories, and NPM packages.
|
||||
|
||||
## What is a Provider?
|
||||
|
||||
A provider is a pluggable backend that Cloud Runner uses to run builds and workflows. Examples include AWS, Kubernetes,
|
||||
or local execution. Each provider implements the `ProviderInterface`, which defines the common lifecycle methods (setup,
|
||||
run, cleanup, garbage collection, etc.).
|
||||
|
||||
This abstraction makes Cloud Runner flexible: you can switch execution environments or add your own provider (via npm
|
||||
package, GitHub repo, or local path) without changing the rest of your pipeline.
|
||||
|
||||
## Features
|
||||
|
||||
- **Local File Paths**: Load providers from relative or absolute file paths
|
||||
- **GitHub URLs**: Clone and load providers from GitHub repositories with automatic updates
|
||||
- **NPM Packages**: Load providers from installed NPM packages
|
||||
- **Automatic Updates**: GitHub repositories are automatically updated when changes are available
|
||||
- **Caching**: Local caching of cloned repositories for improved performance
|
||||
- **Fallback Support**: Graceful fallback to local provider if loading fails
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Loading Built-in Providers
|
||||
|
||||
```typescript
|
||||
import { ProviderLoader } from './provider-loader';
|
||||
|
||||
// Load built-in providers
|
||||
const awsProvider = await ProviderLoader.loadProvider('aws', buildParameters);
|
||||
const k8sProvider = await ProviderLoader.loadProvider('k8s', buildParameters);
|
||||
```
|
||||
|
||||
### Loading Local Providers
|
||||
|
||||
```typescript
|
||||
// Load from relative path
|
||||
const localProvider = await ProviderLoader.loadProvider('./my-local-provider', buildParameters);
|
||||
|
||||
// Load from absolute path
|
||||
const absoluteProvider = await ProviderLoader.loadProvider('/path/to/provider', buildParameters);
|
||||
```
|
||||
|
||||
### Loading GitHub Providers
|
||||
|
||||
```typescript
|
||||
// Load from GitHub URL
|
||||
const githubProvider = await ProviderLoader.loadProvider('https://github.com/user/my-provider', buildParameters);
|
||||
|
||||
// Load from specific branch
|
||||
const branchProvider = await ProviderLoader.loadProvider(
|
||||
'https://github.com/user/my-provider/tree/develop',
|
||||
buildParameters,
|
||||
);
|
||||
|
||||
// Load from specific path in repository
|
||||
const pathProvider = await ProviderLoader.loadProvider(
|
||||
'https://github.com/user/my-provider/tree/main/src/providers',
|
||||
buildParameters,
|
||||
);
|
||||
|
||||
// Shorthand notation
|
||||
const shorthandProvider = await ProviderLoader.loadProvider('user/repo', buildParameters);
|
||||
const branchShorthand = await ProviderLoader.loadProvider('user/repo@develop', buildParameters);
|
||||
```
|
||||
|
||||
### Loading NPM Packages
|
||||
|
||||
```typescript
|
||||
// Load from NPM package
|
||||
const npmProvider = await ProviderLoader.loadProvider('my-provider-package', buildParameters);
|
||||
|
||||
// Load from scoped NPM package
|
||||
const scopedProvider = await ProviderLoader.loadProvider('@scope/my-provider', buildParameters);
|
||||
```
|
||||
|
||||
## Provider Interface
|
||||
|
||||
All providers must implement the `ProviderInterface`:
|
||||
|
||||
```typescript
|
||||
interface ProviderInterface {
|
||||
cleanupWorkflow(): Promise<void>;
|
||||
setupWorkflow(
|
||||
buildGuid: string,
|
||||
buildParameters: BuildParameters,
|
||||
branchName: string,
|
||||
defaultSecretsArray: any[],
|
||||
): Promise<void>;
|
||||
runTaskInWorkflow(
|
||||
buildGuid: string,
|
||||
task: string,
|
||||
workingDirectory: string,
|
||||
buildVolumeFolder: string,
|
||||
environmentVariables: any[],
|
||||
secrets: any[],
|
||||
): Promise<string>;
|
||||
garbageCollect(): Promise<void>;
|
||||
listResources(): Promise<ProviderResource[]>;
|
||||
listWorkflow(): Promise<ProviderWorkflow[]>;
|
||||
watchWorkflow(): Promise<void>;
|
||||
}
|
||||
```
|
||||
|
||||
## Example Provider Implementation
|
||||
|
||||
```typescript
|
||||
// my-provider.ts
|
||||
import { ProviderInterface } from './provider-interface';
|
||||
import BuildParameters from './build-parameters';
|
||||
|
||||
export default class MyProvider implements ProviderInterface {
|
||||
constructor(private buildParameters: BuildParameters) {}
|
||||
|
||||
async cleanupWorkflow(): Promise<void> {
|
||||
// Cleanup logic
|
||||
}
|
||||
|
||||
async setupWorkflow(
|
||||
buildGuid: string,
|
||||
buildParameters: BuildParameters,
|
||||
branchName: string,
|
||||
defaultSecretsArray: any[],
|
||||
): Promise<void> {
|
||||
// Setup logic
|
||||
}
|
||||
|
||||
async runTaskInWorkflow(
|
||||
buildGuid: string,
|
||||
task: string,
|
||||
workingDirectory: string,
|
||||
buildVolumeFolder: string,
|
||||
environmentVariables: any[],
|
||||
secrets: any[],
|
||||
): Promise<string> {
|
||||
// Task execution logic
|
||||
return 'Task completed';
|
||||
}
|
||||
|
||||
async garbageCollect(): Promise<void> {
|
||||
// Garbage collection logic
|
||||
}
|
||||
|
||||
async listResources(): Promise<ProviderResource[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
async listWorkflow(): Promise<ProviderWorkflow[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
async watchWorkflow(): Promise<void> {
|
||||
// Watch logic
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Utility Methods
|
||||
|
||||
### Analyze Provider Source
|
||||
|
||||
```typescript
|
||||
// Analyze a provider source without loading it
|
||||
const sourceInfo = ProviderLoader.analyzeProviderSource('https://github.com/user/repo');
|
||||
console.log(sourceInfo.type); // 'github'
|
||||
console.log(sourceInfo.owner); // 'user'
|
||||
console.log(sourceInfo.repo); // 'repo'
|
||||
```
|
||||
|
||||
### Clean Up Cache
|
||||
|
||||
```typescript
|
||||
// Clean up old cached repositories (older than 30 days)
|
||||
await ProviderLoader.cleanupCache();
|
||||
|
||||
// Clean up repositories older than 7 days
|
||||
await ProviderLoader.cleanupCache(7);
|
||||
```
|
||||
|
||||
### Get Available Providers
|
||||
|
||||
```typescript
|
||||
// Get list of built-in providers
|
||||
const providers = ProviderLoader.getAvailableProviders();
|
||||
console.log(providers); // ['aws', 'k8s', 'test', 'local-docker', 'local-system', 'local']
|
||||
```
|
||||
|
||||
## Supported URL Formats
|
||||
|
||||
### GitHub URLs
|
||||
|
||||
- `https://github.com/user/repo`
|
||||
- `https://github.com/user/repo.git`
|
||||
- `https://github.com/user/repo/tree/branch`
|
||||
- `https://github.com/user/repo/tree/branch/path/to/provider`
|
||||
- `git@github.com:user/repo.git`
|
||||
|
||||
### Shorthand GitHub References
|
||||
|
||||
- `user/repo`
|
||||
- `user/repo@branch`
|
||||
- `user/repo@branch/path/to/provider`
|
||||
|
||||
### Local Paths
|
||||
|
||||
- `./relative/path`
|
||||
- `../relative/path`
|
||||
- `/absolute/path`
|
||||
- `C:\\path\\to\\provider` (Windows)
|
||||
|
||||
### NPM Packages
|
||||
|
||||
- `package-name`
|
||||
- `@scope/package-name`
|
||||
|
||||
## Caching
|
||||
|
||||
GitHub repositories are automatically cached in the `.provider-cache` directory. The cache key is generated based on the
|
||||
repository owner, name, and branch. This ensures that:
|
||||
|
||||
1. Repositories are only cloned once
|
||||
2. Updates are checked and applied automatically
|
||||
3. Performance is improved for repeated loads
|
||||
4. Storage is managed efficiently
|
||||
|
||||
## Error Handling
|
||||
|
||||
The provider loader includes comprehensive error handling:
|
||||
|
||||
- **Missing packages**: Clear error messages when providers cannot be found
|
||||
- **Interface validation**: Ensures providers implement the required interface
|
||||
- **Git operations**: Handles network issues and repository access problems
|
||||
- **Fallback mechanism**: Falls back to local provider if loading fails
|
||||
|
||||
## Configuration
|
||||
|
||||
The provider loader can be configured through environment variables:
|
||||
|
||||
- `PROVIDER_CACHE_DIR`: Custom cache directory (default: `.provider-cache`)
|
||||
- `GIT_TIMEOUT`: Git operation timeout in milliseconds (default: 30000)
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use specific branches or versions**: Always specify the branch or specific tag when loading from GitHub
|
||||
2. **Implement proper error handling**: Wrap provider loading in try-catch blocks
|
||||
3. **Clean up regularly**: Use the cleanup utility to manage cache size
|
||||
4. **Test locally first**: Test providers locally before deploying
|
||||
5. **Use semantic versioning**: Tag your provider repositories for stable versions
|
||||
@@ -1,71 +0,0 @@
|
||||
import { CloudFormation } from '@aws-sdk/client-cloudformation';
|
||||
import { ECS } from '@aws-sdk/client-ecs';
|
||||
import { Kinesis } from '@aws-sdk/client-kinesis';
|
||||
import { CloudWatchLogs } from '@aws-sdk/client-cloudwatch-logs';
|
||||
import { S3 } from '@aws-sdk/client-s3';
|
||||
import { Input } from '../../..';
|
||||
import CloudRunnerOptions from '../../options/cloud-runner-options';
|
||||
|
||||
export class AwsClientFactory {
|
||||
private static cloudFormation: CloudFormation;
|
||||
private static ecs: ECS;
|
||||
private static kinesis: Kinesis;
|
||||
private static cloudWatchLogs: CloudWatchLogs;
|
||||
private static s3: S3;
|
||||
|
||||
static getCloudFormation(): CloudFormation {
|
||||
if (!this.cloudFormation) {
|
||||
this.cloudFormation = new CloudFormation({
|
||||
region: Input.region,
|
||||
endpoint: CloudRunnerOptions.awsCloudFormationEndpoint,
|
||||
});
|
||||
}
|
||||
|
||||
return this.cloudFormation;
|
||||
}
|
||||
|
||||
static getECS(): ECS {
|
||||
if (!this.ecs) {
|
||||
this.ecs = new ECS({
|
||||
region: Input.region,
|
||||
endpoint: CloudRunnerOptions.awsEcsEndpoint,
|
||||
});
|
||||
}
|
||||
|
||||
return this.ecs;
|
||||
}
|
||||
|
||||
static getKinesis(): Kinesis {
|
||||
if (!this.kinesis) {
|
||||
this.kinesis = new Kinesis({
|
||||
region: Input.region,
|
||||
endpoint: CloudRunnerOptions.awsKinesisEndpoint,
|
||||
});
|
||||
}
|
||||
|
||||
return this.kinesis;
|
||||
}
|
||||
|
||||
static getCloudWatchLogs(): CloudWatchLogs {
|
||||
if (!this.cloudWatchLogs) {
|
||||
this.cloudWatchLogs = new CloudWatchLogs({
|
||||
region: Input.region,
|
||||
endpoint: CloudRunnerOptions.awsCloudWatchLogsEndpoint,
|
||||
});
|
||||
}
|
||||
|
||||
return this.cloudWatchLogs;
|
||||
}
|
||||
|
||||
static getS3(): S3 {
|
||||
if (!this.s3) {
|
||||
this.s3 = new S3({
|
||||
region: Input.region,
|
||||
endpoint: CloudRunnerOptions.awsS3Endpoint,
|
||||
forcePathStyle: true,
|
||||
});
|
||||
}
|
||||
|
||||
return this.s3;
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,19 @@
|
||||
import { DescribeTasksCommand, RunTaskCommand, waitUntilTasksRunning } from '@aws-sdk/client-ecs';
|
||||
import { DescribeStreamCommand, GetRecordsCommand, GetShardIteratorCommand } from '@aws-sdk/client-kinesis';
|
||||
import {
|
||||
DescribeTasksCommand,
|
||||
ECS,
|
||||
RunTaskCommand,
|
||||
RunTaskCommandInput,
|
||||
Task,
|
||||
waitUntilTasksRunning,
|
||||
} from '@aws-sdk/client-ecs';
|
||||
import {
|
||||
DescribeStreamCommand,
|
||||
DescribeStreamCommandOutput,
|
||||
GetRecordsCommand,
|
||||
GetRecordsCommandOutput,
|
||||
GetShardIteratorCommand,
|
||||
Kinesis,
|
||||
} from '@aws-sdk/client-kinesis';
|
||||
import CloudRunnerEnvironmentVariable from '../../options/cloud-runner-environment-variable';
|
||||
import * as core from '@actions/core';
|
||||
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
|
||||
@@ -11,9 +25,10 @@ import { CommandHookService } from '../../services/hooks/command-hook-service';
|
||||
import { FollowLogStreamService } from '../../services/core/follow-log-stream-service';
|
||||
import CloudRunnerOptions from '../../options/cloud-runner-options';
|
||||
import GitHub from '../../../github';
|
||||
import { AwsClientFactory } from './aws-client-factory';
|
||||
|
||||
class AWSTaskRunner {
|
||||
public static ECS: ECS;
|
||||
public static Kinesis: Kinesis;
|
||||
private static readonly encodedUnderscore = `$252F`;
|
||||
static async runTask(
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
@@ -60,7 +75,7 @@ class AWSTaskRunner {
|
||||
throw new Error(`Container Overrides length must be at most 8192`);
|
||||
}
|
||||
|
||||
const task = await AwsClientFactory.getECS().send(new RunTaskCommand(runParameters as any));
|
||||
const task = await AWSTaskRunner.ECS.send(new RunTaskCommand(runParameters as RunTaskCommandInput));
|
||||
const taskArn = task.tasks?.[0].taskArn || '';
|
||||
CloudRunnerLogger.log('Cloud runner job is starting');
|
||||
await AWSTaskRunner.waitUntilTaskRunning(taskArn, cluster);
|
||||
@@ -83,13 +98,9 @@ class AWSTaskRunner {
|
||||
let containerState;
|
||||
let taskData;
|
||||
while (exitCode === undefined) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10000));
|
||||
await new Promise((resolve) => resolve(10000));
|
||||
taskData = await AWSTaskRunner.describeTasks(cluster, taskArn);
|
||||
const containers = taskData?.containers as any[] | undefined;
|
||||
if (!containers || containers.length === 0) {
|
||||
continue;
|
||||
}
|
||||
containerState = containers[0];
|
||||
containerState = taskData.containers?.[0];
|
||||
exitCode = containerState?.exitCode;
|
||||
}
|
||||
CloudRunnerLogger.log(`Container State: ${JSON.stringify(containerState, undefined, 4)}`);
|
||||
@@ -114,18 +125,19 @@ class AWSTaskRunner {
|
||||
try {
|
||||
await waitUntilTasksRunning(
|
||||
{
|
||||
client: AwsClientFactory.getECS(),
|
||||
maxWaitTime: 300,
|
||||
minDelay: 5,
|
||||
maxDelay: 30,
|
||||
client: AWSTaskRunner.ECS,
|
||||
maxWaitTime: 120,
|
||||
},
|
||||
{ tasks: [taskArn], cluster },
|
||||
);
|
||||
} catch (error_) {
|
||||
const error = error_ as Error;
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const taskAfterError = await AWSTaskRunner.describeTasks(cluster, taskArn);
|
||||
CloudRunnerLogger.log(`Cloud runner job has ended ${taskAfterError?.containers?.[0]?.lastStatus}`);
|
||||
CloudRunnerLogger.log(
|
||||
`Cloud runner job has ended ${
|
||||
(await AWSTaskRunner.describeTasks(cluster, taskArn)).containers?.[0].lastStatus
|
||||
}`,
|
||||
);
|
||||
|
||||
core.setFailed(error);
|
||||
core.error(error);
|
||||
@@ -133,31 +145,11 @@ class AWSTaskRunner {
|
||||
}
|
||||
|
||||
static async describeTasks(clusterName: string, taskArn: string) {
|
||||
const maxAttempts = 10;
|
||||
let delayMs = 1000;
|
||||
const maxDelayMs = 60000;
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
try {
|
||||
const tasks = await AwsClientFactory.getECS().send(
|
||||
new DescribeTasksCommand({ cluster: clusterName, tasks: [taskArn] }),
|
||||
);
|
||||
if (tasks.tasks?.[0]) {
|
||||
return tasks.tasks?.[0];
|
||||
}
|
||||
throw new Error('No task found');
|
||||
} catch (error: any) {
|
||||
const isThrottle = error?.name === 'ThrottlingException' || /rate exceeded/i.test(String(error?.message));
|
||||
if (!isThrottle || attempt === maxAttempts) {
|
||||
throw error;
|
||||
}
|
||||
const jitterMs = Math.floor(Math.random() * Math.min(1000, delayMs));
|
||||
const sleepMs = delayMs + jitterMs;
|
||||
CloudRunnerLogger.log(
|
||||
`AWS throttled DescribeTasks (attempt ${attempt}/${maxAttempts}), backing off ${sleepMs}ms (${delayMs} + jitter ${jitterMs})`,
|
||||
);
|
||||
await new Promise((r) => setTimeout(r, sleepMs));
|
||||
delayMs = Math.min(delayMs * 2, maxDelayMs);
|
||||
}
|
||||
const tasks = await AWSTaskRunner.ECS.send(new DescribeTasksCommand({ cluster: clusterName, tasks: [taskArn] }));
|
||||
if (tasks.tasks?.[0]) {
|
||||
return tasks.tasks?.[0];
|
||||
} else {
|
||||
throw new Error('No task found');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -178,9 +170,6 @@ class AWSTaskRunner {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
const taskData = await AWSTaskRunner.describeTasks(clusterName, taskArn);
|
||||
({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs));
|
||||
if (taskData?.lastStatus !== 'RUNNING') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3500));
|
||||
}
|
||||
({ iterator, shouldReadLogs, output, shouldCleanup } = await AWSTaskRunner.handleLogStreamIteration(
|
||||
iterator,
|
||||
shouldReadLogs,
|
||||
@@ -198,21 +187,7 @@ class AWSTaskRunner {
|
||||
output: string,
|
||||
shouldCleanup: boolean,
|
||||
) {
|
||||
let records: any;
|
||||
try {
|
||||
records = await AwsClientFactory.getKinesis().send(new GetRecordsCommand({ ShardIterator: iterator }));
|
||||
} catch (error: any) {
|
||||
const isThrottle = error?.name === 'ThrottlingException' || /rate exceeded/i.test(String(error?.message));
|
||||
if (isThrottle) {
|
||||
const baseBackoffMs = 1000;
|
||||
const jitterMs = Math.floor(Math.random() * 1000);
|
||||
const sleepMs = baseBackoffMs + jitterMs;
|
||||
CloudRunnerLogger.log(`AWS throttled GetRecords, backing off ${sleepMs}ms (1000 + jitter ${jitterMs})`);
|
||||
await new Promise((r) => setTimeout(r, sleepMs));
|
||||
return { iterator, shouldReadLogs, output, shouldCleanup };
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
const records = await AWSTaskRunner.Kinesis.send(new GetRecordsCommand({ ShardIterator: iterator }));
|
||||
iterator = records.NextShardIterator || '';
|
||||
({ shouldReadLogs, output, shouldCleanup } = AWSTaskRunner.logRecords(
|
||||
records,
|
||||
@@ -225,7 +200,7 @@ class AWSTaskRunner {
|
||||
return { iterator, shouldReadLogs, output, shouldCleanup };
|
||||
}
|
||||
|
||||
private static checkStreamingShouldContinue(taskData: any, timestamp: number, shouldReadLogs: boolean) {
|
||||
private static checkStreamingShouldContinue(taskData: Task, timestamp: number, shouldReadLogs: boolean) {
|
||||
if (taskData?.lastStatus === 'UNKNOWN') {
|
||||
CloudRunnerLogger.log('## Cloud runner job unknwon');
|
||||
}
|
||||
@@ -245,7 +220,7 @@ class AWSTaskRunner {
|
||||
}
|
||||
|
||||
private static logRecords(
|
||||
records: any,
|
||||
records: GetRecordsCommandOutput,
|
||||
iterator: string,
|
||||
shouldReadLogs: boolean,
|
||||
output: string,
|
||||
@@ -273,13 +248,13 @@ class AWSTaskRunner {
|
||||
}
|
||||
|
||||
private static async getLogStream(kinesisStreamName: string) {
|
||||
return await AwsClientFactory.getKinesis().send(new DescribeStreamCommand({ StreamName: kinesisStreamName }));
|
||||
return await AWSTaskRunner.Kinesis.send(new DescribeStreamCommand({ StreamName: kinesisStreamName }));
|
||||
}
|
||||
|
||||
private static async getLogIterator(stream: any) {
|
||||
private static async getLogIterator(stream: DescribeStreamCommandOutput) {
|
||||
return (
|
||||
(
|
||||
await AwsClientFactory.getKinesis().send(
|
||||
await AWSTaskRunner.Kinesis.send(
|
||||
new GetShardIteratorCommand({
|
||||
ShardIteratorType: 'TRIM_HORIZON',
|
||||
StreamName: stream.StreamDescription?.StreamName ?? '',
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import { CloudFormation, DeleteStackCommand, waitUntilStackDeleteComplete } from '@aws-sdk/client-cloudformation';
|
||||
import { ECS as ECSClient } from '@aws-sdk/client-ecs';
|
||||
import { Kinesis } from '@aws-sdk/client-kinesis';
|
||||
import CloudRunnerSecret from '../../options/cloud-runner-secret';
|
||||
import CloudRunnerEnvironmentVariable from '../../options/cloud-runner-environment-variable';
|
||||
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
|
||||
@@ -14,7 +16,6 @@ import { ProviderResource } from '../provider-resource';
|
||||
import { ProviderWorkflow } from '../provider-workflow';
|
||||
import { TaskService } from './services/task-service';
|
||||
import CloudRunnerOptions from '../../options/cloud-runner-options';
|
||||
import { AwsClientFactory } from './aws-client-factory';
|
||||
|
||||
class AWSBuildEnvironment implements ProviderInterface {
|
||||
private baseStackName: string;
|
||||
@@ -76,7 +77,7 @@ class AWSBuildEnvironment implements ProviderInterface {
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = AwsClientFactory.getCloudFormation();
|
||||
const CF = new CloudFormation({ region: Input.region });
|
||||
await new AwsBaseStack(this.baseStackName).setupBaseStack(CF);
|
||||
}
|
||||
|
||||
@@ -90,9 +91,10 @@ class AWSBuildEnvironment implements ProviderInterface {
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<string> {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
AwsClientFactory.getECS();
|
||||
const CF = AwsClientFactory.getCloudFormation();
|
||||
AwsClientFactory.getKinesis();
|
||||
const ECS = new ECSClient({ region: Input.region });
|
||||
const CF = new CloudFormation({ region: Input.region });
|
||||
AwsTaskRunner.ECS = ECS;
|
||||
AwsTaskRunner.Kinesis = new Kinesis({ region: Input.region });
|
||||
CloudRunnerLogger.log(`AWS Region: ${CF.config.region}`);
|
||||
const entrypoint = ['/bin/sh'];
|
||||
const startTimeMs = Date.now();
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
import { DeleteStackCommand, DescribeStackResourcesCommand } from '@aws-sdk/client-cloudformation';
|
||||
import { DeleteLogGroupCommand } from '@aws-sdk/client-cloudwatch-logs';
|
||||
import { StopTaskCommand } from '@aws-sdk/client-ecs';
|
||||
import {
|
||||
CloudFormation,
|
||||
DeleteStackCommand,
|
||||
DeleteStackCommandInput,
|
||||
DescribeStackResourcesCommand,
|
||||
} from '@aws-sdk/client-cloudformation';
|
||||
import { CloudWatchLogs, DeleteLogGroupCommand } from '@aws-sdk/client-cloudwatch-logs';
|
||||
import { ECS, StopTaskCommand } from '@aws-sdk/client-ecs';
|
||||
import Input from '../../../../input';
|
||||
import CloudRunnerLogger from '../../../services/core/cloud-runner-logger';
|
||||
import { TaskService } from './task-service';
|
||||
import { AwsClientFactory } from '../aws-client-factory';
|
||||
|
||||
export class GarbageCollectionService {
|
||||
static isOlderThan1day(date: Date) {
|
||||
@@ -15,9 +19,9 @@ export class GarbageCollectionService {
|
||||
|
||||
public static async cleanup(deleteResources = false, OneDayOlderOnly: boolean = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = AwsClientFactory.getCloudFormation();
|
||||
const ecs = AwsClientFactory.getECS();
|
||||
const cwl = AwsClientFactory.getCloudWatchLogs();
|
||||
const CF = new CloudFormation({ region: Input.region });
|
||||
const ecs = new ECS({ region: Input.region });
|
||||
const cwl = new CloudWatchLogs({ region: Input.region });
|
||||
const taskDefinitionsInUse = new Array();
|
||||
const tasks = await TaskService.getTasks();
|
||||
|
||||
@@ -53,7 +57,8 @@ export class GarbageCollectionService {
|
||||
}
|
||||
|
||||
CloudRunnerLogger.log(`Deleting ${element.StackName}`);
|
||||
await CF.send(new DeleteStackCommand({ StackName: element.StackName }));
|
||||
const deleteStackInput: DeleteStackCommandInput = { StackName: element.StackName };
|
||||
await CF.send(new DeleteStackCommand(deleteStackInput));
|
||||
}
|
||||
}
|
||||
const logGroups = await TaskService.getLogGroups();
|
||||
|
||||
@@ -1,25 +1,31 @@
|
||||
import {
|
||||
CloudFormation,
|
||||
DescribeStackResourcesCommand,
|
||||
DescribeStacksCommand,
|
||||
ListStacksCommand,
|
||||
StackSummary,
|
||||
} from '@aws-sdk/client-cloudformation';
|
||||
import type { ListStacksCommandOutput } from '@aws-sdk/client-cloudformation';
|
||||
import { DescribeLogGroupsCommand } from '@aws-sdk/client-cloudwatch-logs';
|
||||
import type { DescribeLogGroupsCommandInput, DescribeLogGroupsCommandOutput } from '@aws-sdk/client-cloudwatch-logs';
|
||||
import { DescribeTasksCommand, ListClustersCommand, ListTasksCommand } from '@aws-sdk/client-ecs';
|
||||
import type { DescribeTasksCommandOutput } from '@aws-sdk/client-ecs';
|
||||
import { ListObjectsCommand } from '@aws-sdk/client-s3';
|
||||
import {
|
||||
CloudWatchLogs,
|
||||
DescribeLogGroupsCommand,
|
||||
DescribeLogGroupsCommandInput,
|
||||
LogGroup,
|
||||
} from '@aws-sdk/client-cloudwatch-logs';
|
||||
import {
|
||||
DescribeTasksCommand,
|
||||
DescribeTasksCommandInput,
|
||||
ECS,
|
||||
ListClustersCommand,
|
||||
ListTasksCommand,
|
||||
ListTasksCommandInput,
|
||||
Task,
|
||||
} from '@aws-sdk/client-ecs';
|
||||
import { ListObjectsCommand, ListObjectsCommandInput, S3 } from '@aws-sdk/client-s3';
|
||||
import Input from '../../../../input';
|
||||
import CloudRunnerLogger from '../../../services/core/cloud-runner-logger';
|
||||
import { BaseStackFormation } from '../cloud-formations/base-stack-formation';
|
||||
import AwsTaskRunner from '../aws-task-runner';
|
||||
import CloudRunner from '../../../cloud-runner';
|
||||
import { AwsClientFactory } from '../aws-client-factory';
|
||||
import SharedWorkspaceLocking from '../../../services/core/shared-workspace-locking';
|
||||
|
||||
type StackSummary = NonNullable<ListStacksCommandOutput['StackSummaries']>[number];
|
||||
type LogGroup = NonNullable<DescribeLogGroupsCommandOutput['logGroups']>[number];
|
||||
type Task = NonNullable<DescribeTasksCommandOutput['tasks']>[number];
|
||||
|
||||
export class TaskService {
|
||||
static async watch() {
|
||||
@@ -37,7 +43,7 @@ export class TaskService {
|
||||
CloudRunnerLogger.log(``);
|
||||
CloudRunnerLogger.log(`List Cloud Formation Stacks`);
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = AwsClientFactory.getCloudFormation();
|
||||
const CF = new CloudFormation({ region: Input.region });
|
||||
const stacks =
|
||||
(await CF.send(new ListStacksCommand({}))).StackSummaries?.filter(
|
||||
(_x) =>
|
||||
@@ -85,25 +91,21 @@ export class TaskService {
|
||||
return result;
|
||||
}
|
||||
public static async getTasks() {
|
||||
// Extended Task type to include custom properties added in this method
|
||||
type ExtendedTask = Task & {
|
||||
overrides?: Record<string, unknown>;
|
||||
attachments?: unknown[];
|
||||
};
|
||||
const result: { taskElement: ExtendedTask; element: string }[] = [];
|
||||
const result: { taskElement: Task; element: string }[] = [];
|
||||
CloudRunnerLogger.log(``);
|
||||
CloudRunnerLogger.log(`List Tasks`);
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const ecs = AwsClientFactory.getECS();
|
||||
const ecs = new ECS({ region: Input.region });
|
||||
const clusters = (await ecs.send(new ListClustersCommand({}))).clusterArns || [];
|
||||
CloudRunnerLogger.log(`Task Clusters ${clusters.length}`);
|
||||
for (const element of clusters) {
|
||||
const input = {
|
||||
const input: ListTasksCommandInput = {
|
||||
cluster: element,
|
||||
};
|
||||
|
||||
const list = (await ecs.send(new ListTasksCommand(input))).taskArns || [];
|
||||
if (list.length > 0) {
|
||||
const describeInput = { tasks: list, cluster: element };
|
||||
const describeInput: DescribeTasksCommandInput = { tasks: list, cluster: element };
|
||||
const describeList = (await ecs.send(new DescribeTasksCommand(describeInput))).tasks || [];
|
||||
if (describeList.length === 0) {
|
||||
CloudRunnerLogger.log(`No Tasks`);
|
||||
@@ -114,14 +116,13 @@ export class TaskService {
|
||||
if (taskElement === undefined) {
|
||||
continue;
|
||||
}
|
||||
const extendedTask = taskElement as ExtendedTask;
|
||||
extendedTask.overrides = {};
|
||||
extendedTask.attachments = [];
|
||||
if (extendedTask.createdAt === undefined) {
|
||||
CloudRunnerLogger.log(`Skipping ${extendedTask.taskDefinitionArn} no createdAt date`);
|
||||
taskElement.overrides = {};
|
||||
taskElement.attachments = [];
|
||||
if (taskElement.createdAt === undefined) {
|
||||
CloudRunnerLogger.log(`Skipping ${taskElement.taskDefinitionArn} no createdAt date`);
|
||||
continue;
|
||||
}
|
||||
result.push({ taskElement: extendedTask, element });
|
||||
result.push({ taskElement, element });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -131,7 +132,7 @@ export class TaskService {
|
||||
}
|
||||
public static async awsDescribeJob(job: string) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = AwsClientFactory.getCloudFormation();
|
||||
const CF = new CloudFormation({ region: Input.region });
|
||||
try {
|
||||
const stack =
|
||||
(await CF.send(new ListStacksCommand({}))).StackSummaries?.find((_x) => _x.StackName === job) || undefined;
|
||||
@@ -162,9 +163,9 @@ export class TaskService {
|
||||
}
|
||||
}
|
||||
public static async getLogGroups() {
|
||||
const result: LogGroup[] = [];
|
||||
const result: Array<LogGroup> = [];
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const ecs = AwsClientFactory.getCloudWatchLogs();
|
||||
const ecs = new CloudWatchLogs();
|
||||
let logStreamInput: DescribeLogGroupsCommandInput = {
|
||||
/* logGroupNamePrefix: 'game-ci' */
|
||||
};
|
||||
@@ -196,13 +197,8 @@ export class TaskService {
|
||||
}
|
||||
public static async getLocks() {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
if (CloudRunner.buildParameters.storageProvider === 'rclone') {
|
||||
const objects = await (SharedWorkspaceLocking as any).listObjects('');
|
||||
|
||||
return objects.map((x: string) => ({ Key: x }));
|
||||
}
|
||||
const s3 = AwsClientFactory.getS3();
|
||||
const listRequest = {
|
||||
const s3 = new S3({ region: Input.region });
|
||||
const listRequest: ListObjectsCommandInput = {
|
||||
Bucket: CloudRunner.buildParameters.awsStackName,
|
||||
};
|
||||
|
||||
|
||||
@@ -22,30 +22,6 @@ class KubernetesJobSpecFactory {
|
||||
containerName: string,
|
||||
ip: string = '',
|
||||
) {
|
||||
const endpointEnvNames = new Set([
|
||||
'AWS_S3_ENDPOINT',
|
||||
'AWS_ENDPOINT',
|
||||
'AWS_CLOUD_FORMATION_ENDPOINT',
|
||||
'AWS_ECS_ENDPOINT',
|
||||
'AWS_KINESIS_ENDPOINT',
|
||||
'AWS_CLOUD_WATCH_LOGS_ENDPOINT',
|
||||
'INPUT_AWSS3ENDPOINT',
|
||||
'INPUT_AWSENDPOINT',
|
||||
]);
|
||||
const adjustedEnvironment = environment.map((x) => {
|
||||
let value = x.value;
|
||||
if (
|
||||
typeof value === 'string' &&
|
||||
endpointEnvNames.has(x.name) &&
|
||||
(value.startsWith('http://localhost') || value.startsWith('http://127.0.0.1'))
|
||||
) {
|
||||
value = value
|
||||
.replace('http://localhost', 'http://host.k3d.internal')
|
||||
.replace('http://127.0.0.1', 'http://host.k3d.internal');
|
||||
}
|
||||
return { name: x.name, value } as CloudRunnerEnvironmentVariable;
|
||||
});
|
||||
|
||||
const job = new k8s.V1Job();
|
||||
job.apiVersion = 'batch/v1';
|
||||
job.kind = 'Job';
|
||||
@@ -88,7 +64,7 @@ class KubernetesJobSpecFactory {
|
||||
},
|
||||
},
|
||||
env: [
|
||||
...adjustedEnvironment.map((x) => {
|
||||
...environment.map((x) => {
|
||||
const environmentVariable = new V1EnvVar();
|
||||
environmentVariable.name = x.name;
|
||||
environmentVariable.value = x.value;
|
||||
|
||||
@@ -66,43 +66,6 @@ class LocalCloudRunner implements ProviderInterface {
|
||||
CloudRunnerLogger.log(buildGuid);
|
||||
CloudRunnerLogger.log(commands);
|
||||
|
||||
// On Windows, many built-in hooks use POSIX shell syntax. Execute via bash if available.
|
||||
if (process.platform === 'win32') {
|
||||
// Properly escape the command string for embedding in a double-quoted bash string.
|
||||
// Order matters: backslashes must be escaped first to avoid double-escaping.
|
||||
const escapeForBashDoubleQuotes = (stringValue: string): string => {
|
||||
return stringValue
|
||||
.replace(/\\/g, '\\\\') // Escape backslashes first
|
||||
.replace(/\$/g, '\\$') // Escape dollar signs to prevent variable expansion
|
||||
.replace(/`/g, '\\`') // Escape backticks to prevent command substitution
|
||||
.replace(/"/g, '\\"'); // Escape double quotes
|
||||
};
|
||||
|
||||
// Split commands by newlines and escape each line
|
||||
const lines = commands
|
||||
.replace(/\r/g, '')
|
||||
.split('\n')
|
||||
.filter((x) => x.trim().length > 0)
|
||||
.map((line) => escapeForBashDoubleQuotes(line));
|
||||
|
||||
// Join with semicolons, but don't add semicolon after control flow keywords
|
||||
// Control flow keywords that shouldn't be followed by semicolons: then, else, do, fi, done, esac
|
||||
const controlFlowKeywords = /\b(then|else|do|fi|done|esac)\s*$/;
|
||||
const inline = lines
|
||||
.map((line, index) => {
|
||||
// Don't add semicolon if this line ends with a control flow keyword
|
||||
if (controlFlowKeywords.test(line.trim()) || index === lines.length - 1) {
|
||||
return line;
|
||||
}
|
||||
|
||||
return `${line} ;`;
|
||||
})
|
||||
.join(' ');
|
||||
const bashWrapped = `bash -lc "${inline}"`;
|
||||
|
||||
return await CloudRunnerSystem.Run(bashWrapped);
|
||||
}
|
||||
|
||||
return await CloudRunnerSystem.Run(commands);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,278 +0,0 @@
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import * as fs from 'fs';
|
||||
import path from 'path';
|
||||
import CloudRunnerLogger from '../services/core/cloud-runner-logger';
|
||||
import { GitHubUrlInfo, generateCacheKey } from './provider-url-parser';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
export interface GitCloneResult {
|
||||
success: boolean;
|
||||
localPath: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface GitUpdateResult {
|
||||
success: boolean;
|
||||
updated: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Manages git operations for provider repositories
|
||||
*/
|
||||
export class ProviderGitManager {
|
||||
private static readonly CACHE_DIR = path.join(process.cwd(), '.provider-cache');
|
||||
private static readonly GIT_TIMEOUT = 30000; // 30 seconds
|
||||
|
||||
/**
|
||||
* Ensures the cache directory exists
|
||||
*/
|
||||
private static ensureCacheDir(): void {
|
||||
if (!fs.existsSync(this.CACHE_DIR)) {
|
||||
fs.mkdirSync(this.CACHE_DIR, { recursive: true });
|
||||
CloudRunnerLogger.log(`Created provider cache directory: ${this.CACHE_DIR}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the local path for a cached repository
|
||||
* @param urlInfo GitHub URL information
|
||||
* @returns Local path to the repository
|
||||
*/
|
||||
private static getLocalPath(urlInfo: GitHubUrlInfo): string {
|
||||
const cacheKey = generateCacheKey(urlInfo);
|
||||
|
||||
return path.join(this.CACHE_DIR, cacheKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a repository is already cloned locally
|
||||
* @param urlInfo GitHub URL information
|
||||
* @returns True if repository exists locally
|
||||
*/
|
||||
private static isRepositoryCloned(urlInfo: GitHubUrlInfo): boolean {
|
||||
const localPath = this.getLocalPath(urlInfo);
|
||||
|
||||
return fs.existsSync(localPath) && fs.existsSync(path.join(localPath, '.git'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Clones a GitHub repository to the local cache
|
||||
* @param urlInfo GitHub URL information
|
||||
* @returns Clone result with success status and local path
|
||||
*/
|
||||
static async cloneRepository(urlInfo: GitHubUrlInfo): Promise<GitCloneResult> {
|
||||
this.ensureCacheDir();
|
||||
const localPath = this.getLocalPath(urlInfo);
|
||||
|
||||
// Remove existing directory if it exists
|
||||
if (fs.existsSync(localPath)) {
|
||||
CloudRunnerLogger.log(`Removing existing directory: ${localPath}`);
|
||||
fs.rmSync(localPath, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
try {
|
||||
CloudRunnerLogger.log(`Cloning repository: ${urlInfo.url} to ${localPath}`);
|
||||
|
||||
const cloneCommand = `git clone --depth 1 --branch ${urlInfo.branch} ${urlInfo.url} "${localPath}"`;
|
||||
CloudRunnerLogger.log(`Executing: ${cloneCommand}`);
|
||||
|
||||
const { stderr } = await execAsync(cloneCommand, {
|
||||
timeout: this.GIT_TIMEOUT,
|
||||
cwd: this.CACHE_DIR,
|
||||
});
|
||||
|
||||
if (stderr && !stderr.includes('warning')) {
|
||||
CloudRunnerLogger.log(`Git clone stderr: ${stderr}`);
|
||||
}
|
||||
|
||||
CloudRunnerLogger.log(`Successfully cloned repository to: ${localPath}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
localPath,
|
||||
};
|
||||
} catch (error: any) {
|
||||
const errorMessage = `Failed to clone repository ${urlInfo.url}: ${error.message}`;
|
||||
CloudRunnerLogger.log(`Error: ${errorMessage}`);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
localPath,
|
||||
error: errorMessage,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a locally cloned repository
|
||||
* @param urlInfo GitHub URL information
|
||||
* @returns Update result with success status and whether it was updated
|
||||
*/
|
||||
static async updateRepository(urlInfo: GitHubUrlInfo): Promise<GitUpdateResult> {
|
||||
const localPath = this.getLocalPath(urlInfo);
|
||||
|
||||
if (!this.isRepositoryCloned(urlInfo)) {
|
||||
return {
|
||||
success: false,
|
||||
updated: false,
|
||||
error: 'Repository not found locally',
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
CloudRunnerLogger.log(`Updating repository: ${localPath}`);
|
||||
|
||||
// Fetch latest changes
|
||||
await execAsync('git fetch origin', {
|
||||
timeout: this.GIT_TIMEOUT,
|
||||
cwd: localPath,
|
||||
});
|
||||
|
||||
// Check if there are updates
|
||||
const { stdout: statusOutput } = await execAsync(`git status -uno`, {
|
||||
timeout: this.GIT_TIMEOUT,
|
||||
cwd: localPath,
|
||||
});
|
||||
|
||||
const hasUpdates =
|
||||
statusOutput.includes('Your branch is behind') || statusOutput.includes('can be fast-forwarded');
|
||||
|
||||
if (hasUpdates) {
|
||||
CloudRunnerLogger.log(`Updates available, pulling latest changes...`);
|
||||
|
||||
// Reset to origin/branch to get latest changes
|
||||
await execAsync(`git reset --hard origin/${urlInfo.branch}`, {
|
||||
timeout: this.GIT_TIMEOUT,
|
||||
cwd: localPath,
|
||||
});
|
||||
|
||||
CloudRunnerLogger.log(`Repository updated successfully`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
updated: true,
|
||||
};
|
||||
} else {
|
||||
CloudRunnerLogger.log(`Repository is already up to date`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
updated: false,
|
||||
};
|
||||
}
|
||||
} catch (error: any) {
|
||||
const errorMessage = `Failed to update repository ${localPath}: ${error.message}`;
|
||||
CloudRunnerLogger.log(`Error: ${errorMessage}`);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
updated: false,
|
||||
error: errorMessage,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures a repository is available locally (clone if needed, update if exists)
|
||||
* @param urlInfo GitHub URL information
|
||||
* @returns Local path to the repository
|
||||
*/
|
||||
static async ensureRepositoryAvailable(urlInfo: GitHubUrlInfo): Promise<string> {
|
||||
this.ensureCacheDir();
|
||||
|
||||
if (this.isRepositoryCloned(urlInfo)) {
|
||||
CloudRunnerLogger.log(`Repository already exists locally, checking for updates...`);
|
||||
const updateResult = await this.updateRepository(urlInfo);
|
||||
|
||||
if (!updateResult.success) {
|
||||
CloudRunnerLogger.log(`Failed to update repository, attempting fresh clone...`);
|
||||
const cloneResult = await this.cloneRepository(urlInfo);
|
||||
if (!cloneResult.success) {
|
||||
throw new Error(`Failed to ensure repository availability: ${cloneResult.error}`);
|
||||
}
|
||||
|
||||
return cloneResult.localPath;
|
||||
}
|
||||
|
||||
return this.getLocalPath(urlInfo);
|
||||
} else {
|
||||
CloudRunnerLogger.log(`Repository not found locally, cloning...`);
|
||||
const cloneResult = await this.cloneRepository(urlInfo);
|
||||
|
||||
if (!cloneResult.success) {
|
||||
throw new Error(`Failed to clone repository: ${cloneResult.error}`);
|
||||
}
|
||||
|
||||
return cloneResult.localPath;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the path to the provider module within a repository
|
||||
* @param urlInfo GitHub URL information
|
||||
* @param localPath Local path to the repository
|
||||
* @returns Path to the provider module
|
||||
*/
|
||||
static getProviderModulePath(urlInfo: GitHubUrlInfo, localPath: string): string {
|
||||
if (urlInfo.path) {
|
||||
return path.join(localPath, urlInfo.path);
|
||||
}
|
||||
|
||||
// Look for common provider entry points
|
||||
const commonEntryPoints = [
|
||||
'index.js',
|
||||
'index.ts',
|
||||
'src/index.js',
|
||||
'src/index.ts',
|
||||
'lib/index.js',
|
||||
'lib/index.ts',
|
||||
'dist/index.js',
|
||||
'dist/index.js.map',
|
||||
];
|
||||
|
||||
for (const entryPoint of commonEntryPoints) {
|
||||
const fullPath = path.join(localPath, entryPoint);
|
||||
if (fs.existsSync(fullPath)) {
|
||||
CloudRunnerLogger.log(`Found provider entry point: ${entryPoint}`);
|
||||
|
||||
return fullPath;
|
||||
}
|
||||
}
|
||||
|
||||
// Default to repository root
|
||||
CloudRunnerLogger.log(`No specific entry point found, using repository root`);
|
||||
|
||||
return localPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up old cached repositories (optional maintenance)
|
||||
* @param maxAgeDays Maximum age in days for cached repositories
|
||||
*/
|
||||
static async cleanupOldRepositories(maxAgeDays: number = 30): Promise<void> {
|
||||
this.ensureCacheDir();
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(this.CACHE_DIR, { withFileTypes: true });
|
||||
const now = Date.now();
|
||||
const maxAge = maxAgeDays * 24 * 60 * 60 * 1000; // Convert to milliseconds
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const entryPath = path.join(this.CACHE_DIR, entry.name);
|
||||
const stats = fs.statSync(entryPath);
|
||||
|
||||
if (now - stats.mtime.getTime() > maxAge) {
|
||||
CloudRunnerLogger.log(`Cleaning up old repository: ${entry.name}`);
|
||||
fs.rmSync(entryPath, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error: any) {
|
||||
CloudRunnerLogger.log(`Error during cleanup: ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,158 +0,0 @@
|
||||
import { ProviderInterface } from './provider-interface';
|
||||
import BuildParameters from '../../build-parameters';
|
||||
import CloudRunnerLogger from '../services/core/cloud-runner-logger';
|
||||
import { parseProviderSource, logProviderSource, ProviderSourceInfo } from './provider-url-parser';
|
||||
import { ProviderGitManager } from './provider-git-manager';
|
||||
|
||||
// import path from 'path'; // Not currently used
|
||||
|
||||
/**
|
||||
* Dynamically load a provider package by name, URL, or path.
|
||||
* @param providerSource Provider source (name, URL, or path)
|
||||
* @param buildParameters Build parameters passed to the provider constructor
|
||||
* @throws Error when the provider cannot be loaded or does not implement ProviderInterface
|
||||
*/
|
||||
export default async function loadProvider(
|
||||
providerSource: string,
|
||||
buildParameters: BuildParameters,
|
||||
): Promise<ProviderInterface> {
|
||||
CloudRunnerLogger.log(`Loading provider: ${providerSource}`);
|
||||
|
||||
// Parse the provider source to determine its type
|
||||
const sourceInfo = parseProviderSource(providerSource);
|
||||
logProviderSource(providerSource, sourceInfo);
|
||||
|
||||
let modulePath: string;
|
||||
let importedModule: any;
|
||||
|
||||
try {
|
||||
// Handle different source types
|
||||
switch (sourceInfo.type) {
|
||||
case 'github': {
|
||||
CloudRunnerLogger.log(`Processing GitHub repository: ${sourceInfo.owner}/${sourceInfo.repo}`);
|
||||
|
||||
// Ensure the repository is available locally
|
||||
const localRepoPath = await ProviderGitManager.ensureRepositoryAvailable(sourceInfo);
|
||||
|
||||
// Get the path to the provider module within the repository
|
||||
modulePath = ProviderGitManager.getProviderModulePath(sourceInfo, localRepoPath);
|
||||
|
||||
CloudRunnerLogger.log(`Loading provider from: ${modulePath}`);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'local': {
|
||||
modulePath = sourceInfo.path;
|
||||
CloudRunnerLogger.log(`Loading provider from local path: ${modulePath}`);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'npm': {
|
||||
modulePath = sourceInfo.packageName;
|
||||
CloudRunnerLogger.log(`Loading provider from NPM package: ${modulePath}`);
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
// Fallback to built-in providers or direct import
|
||||
const providerModuleMap: Record<string, string> = {
|
||||
aws: './aws',
|
||||
k8s: './k8s',
|
||||
test: './test',
|
||||
'local-docker': './docker',
|
||||
'local-system': './local',
|
||||
local: './local',
|
||||
};
|
||||
|
||||
modulePath = providerModuleMap[providerSource] || providerSource;
|
||||
CloudRunnerLogger.log(`Loading provider from module path: ${modulePath}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Import the module
|
||||
importedModule = await import(modulePath);
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to load provider package '${providerSource}': ${(error as Error).message}`);
|
||||
}
|
||||
|
||||
// Extract the provider class/function
|
||||
const Provider = importedModule.default || importedModule;
|
||||
|
||||
// Validate that we have a constructor
|
||||
if (typeof Provider !== 'function') {
|
||||
throw new TypeError(`Provider package '${providerSource}' does not export a constructor function`);
|
||||
}
|
||||
|
||||
// Instantiate the provider
|
||||
let instance: any;
|
||||
try {
|
||||
instance = new Provider(buildParameters);
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to instantiate provider '${providerSource}': ${(error as Error).message}`);
|
||||
}
|
||||
|
||||
// Validate that the instance implements the required interface
|
||||
const requiredMethods = [
|
||||
'cleanupWorkflow',
|
||||
'setupWorkflow',
|
||||
'runTaskInWorkflow',
|
||||
'garbageCollect',
|
||||
'listResources',
|
||||
'listWorkflow',
|
||||
'watchWorkflow',
|
||||
];
|
||||
|
||||
for (const method of requiredMethods) {
|
||||
if (typeof instance[method] !== 'function') {
|
||||
throw new TypeError(
|
||||
`Provider package '${providerSource}' does not implement ProviderInterface. Missing method '${method}'.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
CloudRunnerLogger.log(`Successfully loaded provider: ${providerSource}`);
|
||||
|
||||
return instance as ProviderInterface;
|
||||
}
|
||||
|
||||
/**
|
||||
* ProviderLoader class for backward compatibility and additional utilities
|
||||
*/
|
||||
export class ProviderLoader {
|
||||
/**
|
||||
* Dynamically loads a provider by name, URL, or path (wrapper around loadProvider function)
|
||||
* @param providerSource - The provider source (name, URL, or path) to load
|
||||
* @param buildParameters - Build parameters to pass to the provider constructor
|
||||
* @returns Promise<ProviderInterface> - The loaded provider instance
|
||||
* @throws Error if provider package is missing or doesn't implement ProviderInterface
|
||||
*/
|
||||
static async loadProvider(providerSource: string, buildParameters: BuildParameters): Promise<ProviderInterface> {
|
||||
return loadProvider(providerSource, buildParameters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a list of available provider names
|
||||
* @returns string[] - Array of available provider names
|
||||
*/
|
||||
static getAvailableProviders(): string[] {
|
||||
return ['aws', 'k8s', 'test', 'local-docker', 'local-system', 'local'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up old cached repositories
|
||||
* @param maxAgeDays Maximum age in days for cached repositories (default: 30)
|
||||
*/
|
||||
static async cleanupCache(maxAgeDays: number = 30): Promise<void> {
|
||||
await ProviderGitManager.cleanupOldRepositories(maxAgeDays);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets information about a provider source without loading it
|
||||
* @param providerSource The provider source to analyze
|
||||
* @returns ProviderSourceInfo object with parsed details
|
||||
*/
|
||||
static analyzeProviderSource(providerSource: string): ProviderSourceInfo {
|
||||
return parseProviderSource(providerSource);
|
||||
}
|
||||
}
|
||||
@@ -1,138 +0,0 @@
|
||||
import CloudRunnerLogger from '../services/core/cloud-runner-logger';
|
||||
|
||||
export interface GitHubUrlInfo {
|
||||
type: 'github';
|
||||
owner: string;
|
||||
repo: string;
|
||||
branch?: string;
|
||||
path?: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface LocalPathInfo {
|
||||
type: 'local';
|
||||
path: string;
|
||||
}
|
||||
|
||||
export interface NpmPackageInfo {
|
||||
type: 'npm';
|
||||
packageName: string;
|
||||
}
|
||||
|
||||
export type ProviderSourceInfo = GitHubUrlInfo | LocalPathInfo | NpmPackageInfo;
|
||||
|
||||
/**
|
||||
* Parses a provider source string and determines its type and details
|
||||
* @param source The provider source string (URL, path, or package name)
|
||||
* @returns ProviderSourceInfo object with parsed details
|
||||
*/
|
||||
export function parseProviderSource(source: string): ProviderSourceInfo {
|
||||
// Check if it's a GitHub URL
|
||||
const githubMatch = source.match(
|
||||
/^https?:\/\/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?\/?(?:tree\/([^/]+))?(?:\/(.+))?$/,
|
||||
);
|
||||
if (githubMatch) {
|
||||
const [, owner, repo, branch, path] = githubMatch;
|
||||
|
||||
return {
|
||||
type: 'github',
|
||||
owner,
|
||||
repo,
|
||||
branch: branch || 'main',
|
||||
path: path || '',
|
||||
url: `https://github.com/${owner}/${repo}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Check if it's a GitHub SSH URL
|
||||
const githubSshMatch = source.match(/^git@github\.com:([^/]+)\/([^/]+?)(?:\.git)?\/?(?:tree\/([^/]+))?(?:\/(.+))?$/);
|
||||
if (githubSshMatch) {
|
||||
const [, owner, repo, branch, path] = githubSshMatch;
|
||||
|
||||
return {
|
||||
type: 'github',
|
||||
owner,
|
||||
repo,
|
||||
branch: branch || 'main',
|
||||
path: path || '',
|
||||
url: `https://github.com/${owner}/${repo}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Check if it's a shorthand GitHub reference (owner/repo)
|
||||
const shorthandMatch = source.match(/^([^/@]+)\/([^/@]+)(?:@([^/]+))?(?:\/(.+))?$/);
|
||||
if (shorthandMatch && !source.startsWith('.') && !source.startsWith('/') && !source.includes('\\')) {
|
||||
const [, owner, repo, branch, path] = shorthandMatch;
|
||||
|
||||
return {
|
||||
type: 'github',
|
||||
owner,
|
||||
repo,
|
||||
branch: branch || 'main',
|
||||
path: path || '',
|
||||
url: `https://github.com/${owner}/${repo}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Check if it's a local path
|
||||
if (source.startsWith('./') || source.startsWith('../') || source.startsWith('/') || source.includes('\\')) {
|
||||
return {
|
||||
type: 'local',
|
||||
path: source,
|
||||
};
|
||||
}
|
||||
|
||||
// Default to npm package
|
||||
return {
|
||||
type: 'npm',
|
||||
packageName: source,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a cache key for a GitHub repository
|
||||
* @param urlInfo GitHub URL information
|
||||
* @returns Cache key string
|
||||
*/
|
||||
export function generateCacheKey(urlInfo: GitHubUrlInfo): string {
|
||||
return `github_${urlInfo.owner}_${urlInfo.repo}_${urlInfo.branch}`.replace(/[^\w-]/g, '_');
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates if a string looks like a valid GitHub URL or reference
|
||||
* @param source The source string to validate
|
||||
* @returns True if it looks like a GitHub reference
|
||||
*/
|
||||
export function isGitHubSource(source: string): boolean {
|
||||
const parsed = parseProviderSource(source);
|
||||
|
||||
return parsed.type === 'github';
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs the parsed provider source information
|
||||
* @param source The original source string
|
||||
* @param parsed The parsed source information
|
||||
*/
|
||||
export function logProviderSource(source: string, parsed: ProviderSourceInfo): void {
|
||||
CloudRunnerLogger.log(`Provider source: ${source}`);
|
||||
switch (parsed.type) {
|
||||
case 'github':
|
||||
CloudRunnerLogger.log(` Type: GitHub repository`);
|
||||
CloudRunnerLogger.log(` Owner: ${parsed.owner}`);
|
||||
CloudRunnerLogger.log(` Repository: ${parsed.repo}`);
|
||||
CloudRunnerLogger.log(` Branch: ${parsed.branch}`);
|
||||
if (parsed.path) {
|
||||
CloudRunnerLogger.log(` Path: ${parsed.path}`);
|
||||
}
|
||||
break;
|
||||
case 'local':
|
||||
CloudRunnerLogger.log(` Type: Local path`);
|
||||
CloudRunnerLogger.log(` Path: ${parsed.path}`);
|
||||
break;
|
||||
case 'npm':
|
||||
CloudRunnerLogger.log(` Type: NPM package`);
|
||||
CloudRunnerLogger.log(` Package: ${parsed.packageName}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -63,61 +63,23 @@ export class RemoteClient {
|
||||
@CliFunction(`remote-cli-post-build`, `runs a cloud runner build`)
|
||||
public static async remoteClientPostBuild(): Promise<string> {
|
||||
RemoteClientLogger.log(`Running POST build tasks`);
|
||||
// Ensure cache key is present in logs for assertions
|
||||
RemoteClientLogger.log(`CACHE_KEY=${CloudRunner.buildParameters.cacheKey}`);
|
||||
CloudRunnerLogger.log(`${CloudRunner.buildParameters.cacheKey}`);
|
||||
|
||||
// Guard: only push Library cache if the folder exists and has contents
|
||||
try {
|
||||
const libraryFolderHost = CloudRunnerFolders.libraryFolderAbsolute;
|
||||
if (fs.existsSync(libraryFolderHost)) {
|
||||
const libraryEntries = await fs.promises.readdir(libraryFolderHost).catch(() => [] as string[]);
|
||||
if (libraryEntries.length > 0) {
|
||||
await Caching.PushToCache(
|
||||
CloudRunnerFolders.ToLinuxFolder(`${CloudRunnerFolders.cacheFolderForCacheKeyFull}/Library`),
|
||||
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.libraryFolderAbsolute),
|
||||
`lib-${CloudRunner.buildParameters.buildGuid}`,
|
||||
);
|
||||
} else {
|
||||
RemoteClientLogger.log(`Skipping Library cache push (folder is empty)`);
|
||||
}
|
||||
} else {
|
||||
RemoteClientLogger.log(`Skipping Library cache push (folder missing)`);
|
||||
}
|
||||
} catch (error: any) {
|
||||
RemoteClientLogger.logWarning(`Library cache push skipped with error: ${error.message}`);
|
||||
}
|
||||
await Caching.PushToCache(
|
||||
CloudRunnerFolders.ToLinuxFolder(`${CloudRunnerFolders.cacheFolderForCacheKeyFull}/Library`),
|
||||
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.libraryFolderAbsolute),
|
||||
`lib-${CloudRunner.buildParameters.buildGuid}`,
|
||||
);
|
||||
|
||||
// Guard: only push Build cache if the folder exists and has contents
|
||||
try {
|
||||
const buildFolderHost = CloudRunnerFolders.projectBuildFolderAbsolute;
|
||||
if (fs.existsSync(buildFolderHost)) {
|
||||
const buildEntries = await fs.promises.readdir(buildFolderHost).catch(() => [] as string[]);
|
||||
if (buildEntries.length > 0) {
|
||||
await Caching.PushToCache(
|
||||
CloudRunnerFolders.ToLinuxFolder(`${CloudRunnerFolders.cacheFolderForCacheKeyFull}/build`),
|
||||
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.projectBuildFolderAbsolute),
|
||||
`build-${CloudRunner.buildParameters.buildGuid}`,
|
||||
);
|
||||
} else {
|
||||
RemoteClientLogger.log(`Skipping Build cache push (folder is empty)`);
|
||||
}
|
||||
} else {
|
||||
RemoteClientLogger.log(`Skipping Build cache push (folder missing)`);
|
||||
}
|
||||
} catch (error: any) {
|
||||
RemoteClientLogger.logWarning(`Build cache push skipped with error: ${error.message}`);
|
||||
}
|
||||
await Caching.PushToCache(
|
||||
CloudRunnerFolders.ToLinuxFolder(`${CloudRunnerFolders.cacheFolderForCacheKeyFull}/build`),
|
||||
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.projectBuildFolderAbsolute),
|
||||
`build-${CloudRunner.buildParameters.buildGuid}`,
|
||||
);
|
||||
|
||||
if (!BuildParameters.shouldUseRetainedWorkspaceMode(CloudRunner.buildParameters)) {
|
||||
const uniqueJobFolderLinux = CloudRunnerFolders.ToLinuxFolder(
|
||||
CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute,
|
||||
await CloudRunnerSystem.Run(
|
||||
`rm -r ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute)}`,
|
||||
);
|
||||
if (fs.existsSync(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute) || fs.existsSync(uniqueJobFolderLinux)) {
|
||||
await CloudRunnerSystem.Run(`rm -r ${uniqueJobFolderLinux} || true`);
|
||||
} else {
|
||||
RemoteClientLogger.log(`Skipping cleanup; unique job folder missing`);
|
||||
}
|
||||
}
|
||||
|
||||
await RemoteClient.runCustomHookFiles(`after-build`);
|
||||
@@ -125,9 +87,6 @@ export class RemoteClient {
|
||||
// WIP - need to give the pod permissions to create config map
|
||||
await RemoteClientLogger.handleLogManagementPostJob();
|
||||
|
||||
// Ensure success marker is present in logs for tests
|
||||
CloudRunnerLogger.log(`Activation successful`);
|
||||
|
||||
return new Promise((result) => result(``));
|
||||
}
|
||||
static async runCustomHookFiles(hookLifecycle: string) {
|
||||
@@ -234,43 +193,10 @@ export class RemoteClient {
|
||||
await CloudRunnerSystem.Run(`git lfs install`);
|
||||
assert(fs.existsSync(`.git`), 'git folder exists');
|
||||
RemoteClientLogger.log(`${CloudRunner.buildParameters.branch}`);
|
||||
// Ensure refs exist (tags and PR refs)
|
||||
await CloudRunnerSystem.Run(`git fetch --all --tags || true`);
|
||||
if ((CloudRunner.buildParameters.branch || '').startsWith('pull/')) {
|
||||
await CloudRunnerSystem.Run(`git fetch origin +refs/pull/*:refs/remotes/origin/pull/* || true`);
|
||||
}
|
||||
const targetSha = CloudRunner.buildParameters.gitSha;
|
||||
const targetBranch = CloudRunner.buildParameters.branch;
|
||||
if (targetSha) {
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`git checkout ${targetSha}`);
|
||||
} catch (_error) {
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`git fetch origin ${targetSha} || true`);
|
||||
await CloudRunnerSystem.Run(`git checkout ${targetSha}`);
|
||||
} catch (_error2) {
|
||||
RemoteClientLogger.logWarning(`Falling back to branch checkout; SHA not found: ${targetSha}`);
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`git checkout ${targetBranch}`);
|
||||
} catch (_error3) {
|
||||
if ((targetBranch || '').startsWith('pull/')) {
|
||||
await CloudRunnerSystem.Run(`git checkout origin/${targetBranch}`);
|
||||
} else {
|
||||
throw _error2;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (CloudRunner.buildParameters.gitSha !== undefined) {
|
||||
await CloudRunnerSystem.Run(`git checkout ${CloudRunner.buildParameters.gitSha}`);
|
||||
} else {
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`git checkout ${targetBranch}`);
|
||||
} catch (_error) {
|
||||
if ((targetBranch || '').startsWith('pull/')) {
|
||||
await CloudRunnerSystem.Run(`git checkout origin/${targetBranch}`);
|
||||
} else {
|
||||
throw _error;
|
||||
}
|
||||
}
|
||||
await CloudRunnerSystem.Run(`git checkout ${CloudRunner.buildParameters.branch}`);
|
||||
RemoteClientLogger.log(`buildParameter Git Sha is empty`);
|
||||
}
|
||||
|
||||
@@ -295,76 +221,16 @@ export class RemoteClient {
|
||||
process.chdir(CloudRunnerFolders.repoPathAbsolute);
|
||||
await CloudRunnerSystem.Run(`git config --global filter.lfs.smudge "git-lfs smudge -- %f"`);
|
||||
await CloudRunnerSystem.Run(`git config --global filter.lfs.process "git-lfs filter-process"`);
|
||||
if (CloudRunner.buildParameters.skipLfs) {
|
||||
RemoteClientLogger.log(`Skipping LFS pull (skipLfs=true)`);
|
||||
|
||||
return;
|
||||
if (!CloudRunner.buildParameters.skipLfs) {
|
||||
await CloudRunnerSystem.Run(`git lfs pull`);
|
||||
RemoteClientLogger.log(`pulled latest LFS files`);
|
||||
assert(fs.existsSync(CloudRunnerFolders.lfsFolderAbsolute));
|
||||
}
|
||||
|
||||
// Best effort: try plain pull first (works for public repos or pre-configured auth)
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`git lfs pull`, true);
|
||||
await CloudRunnerSystem.Run(`git lfs checkout || true`, true);
|
||||
RemoteClientLogger.log(`Pulled LFS files without explicit token configuration`);
|
||||
|
||||
return;
|
||||
} catch (_error) {
|
||||
/* no-op: best-effort git lfs pull without tokens may fail */
|
||||
void 0;
|
||||
}
|
||||
|
||||
// Try with GIT_PRIVATE_TOKEN
|
||||
try {
|
||||
const gitPrivateToken = process.env.GIT_PRIVATE_TOKEN;
|
||||
if (gitPrivateToken) {
|
||||
RemoteClientLogger.log(`Attempting to pull LFS files with GIT_PRIVATE_TOKEN...`);
|
||||
await CloudRunnerSystem.Run(`git config --global --unset-all url."https://github.com/".insteadOf || true`);
|
||||
await CloudRunnerSystem.Run(`git config --global --unset-all url."ssh://git@github.com/".insteadOf || true`);
|
||||
await CloudRunnerSystem.Run(`git config --global --unset-all url."git@github.com".insteadOf || true`);
|
||||
await CloudRunnerSystem.Run(
|
||||
`git config --global url."https://${gitPrivateToken}@github.com/".insteadOf "https://github.com/"`,
|
||||
);
|
||||
await CloudRunnerSystem.Run(`git lfs pull`, true);
|
||||
await CloudRunnerSystem.Run(`git lfs checkout || true`, true);
|
||||
RemoteClientLogger.log(`Successfully pulled LFS files with GIT_PRIVATE_TOKEN`);
|
||||
|
||||
return;
|
||||
}
|
||||
} catch (error: any) {
|
||||
RemoteClientLogger.logCliError(`Failed with GIT_PRIVATE_TOKEN: ${error.message}`);
|
||||
}
|
||||
|
||||
// Try with GITHUB_TOKEN
|
||||
try {
|
||||
const githubToken = process.env.GITHUB_TOKEN;
|
||||
if (githubToken) {
|
||||
RemoteClientLogger.log(`Attempting to pull LFS files with GITHUB_TOKEN fallback...`);
|
||||
await CloudRunnerSystem.Run(`git config --global --unset-all url."https://github.com/".insteadOf || true`);
|
||||
await CloudRunnerSystem.Run(`git config --global --unset-all url."ssh://git@github.com/".insteadOf || true`);
|
||||
await CloudRunnerSystem.Run(`git config --global --unset-all url."git@github.com".insteadOf || true`);
|
||||
await CloudRunnerSystem.Run(
|
||||
`git config --global url."https://${githubToken}@github.com/".insteadOf "https://github.com/"`,
|
||||
);
|
||||
await CloudRunnerSystem.Run(`git lfs pull`, true);
|
||||
await CloudRunnerSystem.Run(`git lfs checkout || true`, true);
|
||||
RemoteClientLogger.log(`Successfully pulled LFS files with GITHUB_TOKEN`);
|
||||
|
||||
return;
|
||||
}
|
||||
} catch (error: any) {
|
||||
RemoteClientLogger.logCliError(`Failed with GITHUB_TOKEN: ${error.message}`);
|
||||
}
|
||||
|
||||
// If we get here, all strategies failed; continue without failing the build
|
||||
RemoteClientLogger.logWarning(`Proceeding without LFS files (no tokens or pull failed)`);
|
||||
}
|
||||
static async handleRetainedWorkspace() {
|
||||
RemoteClientLogger.log(
|
||||
`Retained Workspace: ${BuildParameters.shouldUseRetainedWorkspaceMode(CloudRunner.buildParameters)}`,
|
||||
);
|
||||
|
||||
// Log cache key explicitly to aid debugging and assertions
|
||||
CloudRunnerLogger.log(`Cache Key: ${CloudRunner.buildParameters.cacheKey}`);
|
||||
if (
|
||||
BuildParameters.shouldUseRetainedWorkspaceMode(CloudRunner.buildParameters) &&
|
||||
fs.existsSync(CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute)) &&
|
||||
@@ -372,29 +238,10 @@ export class RemoteClient {
|
||||
) {
|
||||
CloudRunnerLogger.log(`Retained Workspace Already Exists!`);
|
||||
process.chdir(CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute));
|
||||
await CloudRunnerSystem.Run(`git fetch --all --tags || true`);
|
||||
if ((CloudRunner.buildParameters.branch || '').startsWith('pull/')) {
|
||||
await CloudRunnerSystem.Run(`git fetch origin +refs/pull/*:refs/remotes/origin/pull/* || true`);
|
||||
}
|
||||
await CloudRunnerSystem.Run(`git fetch`);
|
||||
await CloudRunnerSystem.Run(`git lfs pull`);
|
||||
await CloudRunnerSystem.Run(`git lfs checkout || true`);
|
||||
const sha = CloudRunner.buildParameters.gitSha;
|
||||
const branch = CloudRunner.buildParameters.branch;
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`git reset --hard "${sha}"`);
|
||||
await CloudRunnerSystem.Run(`git checkout ${sha}`);
|
||||
} catch (_error) {
|
||||
RemoteClientLogger.logWarning(`Retained workspace: SHA not found, falling back to branch ${branch}`);
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`git checkout ${branch}`);
|
||||
} catch (_error2) {
|
||||
if ((branch || '').startsWith('pull/')) {
|
||||
await CloudRunnerSystem.Run(`git checkout origin/${branch}`);
|
||||
} else {
|
||||
throw _error2;
|
||||
}
|
||||
}
|
||||
}
|
||||
await CloudRunnerSystem.Run(`git reset --hard "${CloudRunner.buildParameters.gitSha}"`);
|
||||
await CloudRunnerSystem.Run(`git checkout ${CloudRunner.buildParameters.gitSha}`);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -6,11 +6,6 @@ import CloudRunnerOptions from '../options/cloud-runner-options';
|
||||
|
||||
export class RemoteClientLogger {
|
||||
private static get LogFilePath() {
|
||||
// Use a cross-platform temporary directory for local development
|
||||
if (process.platform === 'win32') {
|
||||
return path.join(process.cwd(), 'temp', 'job-log.txt');
|
||||
}
|
||||
|
||||
return path.join(`/home`, `job-log.txt`);
|
||||
}
|
||||
|
||||
@@ -34,12 +29,6 @@ export class RemoteClientLogger {
|
||||
|
||||
public static appendToFile(message: string) {
|
||||
if (CloudRunner.isCloudRunnerEnvironment) {
|
||||
// Ensure the directory exists before writing
|
||||
const logDirectory = path.dirname(RemoteClientLogger.LogFilePath);
|
||||
if (!fs.existsSync(logDirectory)) {
|
||||
fs.mkdirSync(logDirectory, { recursive: true });
|
||||
}
|
||||
|
||||
fs.appendFileSync(RemoteClientLogger.LogFilePath, `${message}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,9 +47,9 @@ export class FollowLogStreamService {
|
||||
} else if (message.toLowerCase().includes('cannot be found')) {
|
||||
FollowLogStreamService.errors += `\n${message}`;
|
||||
}
|
||||
|
||||
// Always append log lines to output so tests can assert on BuildResults
|
||||
output += `${message}\n`;
|
||||
if (CloudRunner.buildParameters.cloudRunnerDebug) {
|
||||
output += `${message}\n`;
|
||||
}
|
||||
CloudRunnerLogger.log(`[${CloudRunnerStatics.logPrefix}] ${message}`);
|
||||
|
||||
return { shouldReadLogs, shouldCleanup, output };
|
||||
|
||||
@@ -1,107 +1,23 @@
|
||||
import { CloudRunnerSystem } from './cloud-runner-system';
|
||||
import fs from 'node:fs';
|
||||
import CloudRunnerLogger from './cloud-runner-logger';
|
||||
import BuildParameters from '../../../build-parameters';
|
||||
import CloudRunner from '../../cloud-runner';
|
||||
import Input from '../../../input';
|
||||
import {
|
||||
CreateBucketCommand,
|
||||
DeleteObjectCommand,
|
||||
HeadBucketCommand,
|
||||
ListObjectsV2Command,
|
||||
PutObjectCommand,
|
||||
S3,
|
||||
} from '@aws-sdk/client-s3';
|
||||
import { AwsClientFactory } from '../../providers/aws/aws-client-factory';
|
||||
import { promisify } from 'node:util';
|
||||
import { exec as execCb } from 'node:child_process';
|
||||
const exec = promisify(execCb);
|
||||
export class SharedWorkspaceLocking {
|
||||
private static _s3: S3;
|
||||
private static get s3(): S3 {
|
||||
if (!SharedWorkspaceLocking._s3) {
|
||||
// Use factory so LocalStack endpoint/path-style settings are honored
|
||||
SharedWorkspaceLocking._s3 = AwsClientFactory.getS3();
|
||||
}
|
||||
return SharedWorkspaceLocking._s3;
|
||||
}
|
||||
private static get useRclone() {
|
||||
return CloudRunner.buildParameters.storageProvider === 'rclone';
|
||||
}
|
||||
private static async rclone(command: string): Promise<string> {
|
||||
const { stdout } = await exec(`rclone ${command}`);
|
||||
return stdout.toString();
|
||||
}
|
||||
private static get bucket() {
|
||||
return SharedWorkspaceLocking.useRclone
|
||||
? CloudRunner.buildParameters.rcloneRemote
|
||||
: CloudRunner.buildParameters.awsStackName;
|
||||
}
|
||||
public static get workspaceBucketRoot() {
|
||||
return SharedWorkspaceLocking.useRclone
|
||||
? `${SharedWorkspaceLocking.bucket}/`
|
||||
: `s3://${SharedWorkspaceLocking.bucket}/`;
|
||||
return `s3://${CloudRunner.buildParameters.awsStackName}/`;
|
||||
}
|
||||
public static get workspaceRoot() {
|
||||
return `${SharedWorkspaceLocking.workspaceBucketRoot}locks/`;
|
||||
}
|
||||
private static get workspacePrefix() {
|
||||
return `locks/`;
|
||||
}
|
||||
private static async ensureBucketExists(): Promise<void> {
|
||||
const bucket = SharedWorkspaceLocking.bucket;
|
||||
if (SharedWorkspaceLocking.useRclone) {
|
||||
try {
|
||||
await SharedWorkspaceLocking.rclone(`lsf ${bucket}`);
|
||||
} catch {
|
||||
await SharedWorkspaceLocking.rclone(`mkdir ${bucket}`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await SharedWorkspaceLocking.s3.send(new HeadBucketCommand({ Bucket: bucket }));
|
||||
} catch {
|
||||
const region = Input.region || process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION || 'us-east-1';
|
||||
const createParams: any = { Bucket: bucket };
|
||||
if (region && region !== 'us-east-1') {
|
||||
createParams.CreateBucketConfiguration = { LocationConstraint: region };
|
||||
}
|
||||
await SharedWorkspaceLocking.s3.send(new CreateBucketCommand(createParams));
|
||||
}
|
||||
}
|
||||
private static async listObjects(prefix: string, bucket = SharedWorkspaceLocking.bucket): Promise<string[]> {
|
||||
await SharedWorkspaceLocking.ensureBucketExists();
|
||||
if (prefix !== '' && !prefix.endsWith('/')) {
|
||||
prefix += '/';
|
||||
}
|
||||
if (SharedWorkspaceLocking.useRclone) {
|
||||
const path = `${bucket}/${prefix}`;
|
||||
try {
|
||||
const output = await SharedWorkspaceLocking.rclone(`lsjson ${path}`);
|
||||
const json = JSON.parse(output) as { Name: string; IsDir: boolean }[];
|
||||
return json.map((e) => (e.IsDir ? `${e.Name}/` : e.Name));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
const result = await SharedWorkspaceLocking.s3.send(
|
||||
new ListObjectsV2Command({ Bucket: bucket, Prefix: prefix, Delimiter: '/' }),
|
||||
);
|
||||
const entries: string[] = [];
|
||||
for (const p of result.CommonPrefixes || []) {
|
||||
if (p.Prefix) entries.push(p.Prefix.slice(prefix.length));
|
||||
}
|
||||
for (const c of result.Contents || []) {
|
||||
if (c.Key && c.Key !== prefix) entries.push(c.Key.slice(prefix.length));
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
public static async GetAllWorkspaces(buildParametersContext: BuildParameters): Promise<string[]> {
|
||||
if (!(await SharedWorkspaceLocking.DoesCacheKeyTopLevelExist(buildParametersContext))) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return (
|
||||
await SharedWorkspaceLocking.listObjects(
|
||||
`${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/`,
|
||||
await SharedWorkspaceLocking.ReadLines(
|
||||
`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/`,
|
||||
)
|
||||
)
|
||||
.map((x) => x.replace(`/`, ``))
|
||||
@@ -110,11 +26,13 @@ export class SharedWorkspaceLocking {
|
||||
}
|
||||
public static async DoesCacheKeyTopLevelExist(buildParametersContext: BuildParameters) {
|
||||
try {
|
||||
const rootLines = await SharedWorkspaceLocking.listObjects('');
|
||||
const rootLines = await SharedWorkspaceLocking.ReadLines(
|
||||
`aws s3 ls ${SharedWorkspaceLocking.workspaceBucketRoot}`,
|
||||
);
|
||||
const lockFolderExists = rootLines.map((x) => x.replace(`/`, ``)).includes(`locks`);
|
||||
|
||||
if (lockFolderExists) {
|
||||
const lines = await SharedWorkspaceLocking.listObjects(SharedWorkspaceLocking.workspacePrefix);
|
||||
const lines = await SharedWorkspaceLocking.ReadLines(`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}`);
|
||||
|
||||
return lines.map((x) => x.replace(`/`, ``)).includes(buildParametersContext.cacheKey);
|
||||
} else {
|
||||
@@ -137,8 +55,8 @@ export class SharedWorkspaceLocking {
|
||||
}
|
||||
|
||||
return (
|
||||
await SharedWorkspaceLocking.listObjects(
|
||||
`${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/`,
|
||||
await SharedWorkspaceLocking.ReadLines(
|
||||
`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/`,
|
||||
)
|
||||
)
|
||||
.map((x) => x.replace(`/`, ``))
|
||||
@@ -264,8 +182,8 @@ export class SharedWorkspaceLocking {
|
||||
}
|
||||
|
||||
return (
|
||||
await SharedWorkspaceLocking.listObjects(
|
||||
`${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/`,
|
||||
await SharedWorkspaceLocking.ReadLines(
|
||||
`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/`,
|
||||
)
|
||||
)
|
||||
.map((x) => x.replace(`/`, ``))
|
||||
@@ -277,8 +195,8 @@ export class SharedWorkspaceLocking {
|
||||
if (!(await SharedWorkspaceLocking.DoesWorkspaceExist(workspace, buildParametersContext))) {
|
||||
throw new Error(`workspace doesn't exist ${workspace}`);
|
||||
}
|
||||
const files = await SharedWorkspaceLocking.listObjects(
|
||||
`${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/`,
|
||||
const files = await SharedWorkspaceLocking.ReadLines(
|
||||
`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/`,
|
||||
);
|
||||
|
||||
const lockFilesExist =
|
||||
@@ -294,15 +212,14 @@ export class SharedWorkspaceLocking {
|
||||
throw new Error(`${workspace} already exists`);
|
||||
}
|
||||
const timestamp = Date.now();
|
||||
const key = `${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/${timestamp}_${workspace}_workspace`;
|
||||
await SharedWorkspaceLocking.ensureBucketExists();
|
||||
if (SharedWorkspaceLocking.useRclone) {
|
||||
await SharedWorkspaceLocking.rclone(`touch ${SharedWorkspaceLocking.bucket}/${key}`);
|
||||
} else {
|
||||
await SharedWorkspaceLocking.s3.send(
|
||||
new PutObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: key, Body: new Uint8Array(0) }),
|
||||
);
|
||||
}
|
||||
const file = `${timestamp}_${workspace}_workspace`;
|
||||
fs.writeFileSync(file, '');
|
||||
await CloudRunnerSystem.Run(
|
||||
`aws s3 cp ./${file} ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${file}`,
|
||||
false,
|
||||
true,
|
||||
);
|
||||
fs.rmSync(file);
|
||||
|
||||
const workspaces = await SharedWorkspaceLocking.GetAllWorkspaces(buildParametersContext);
|
||||
|
||||
@@ -324,30 +241,25 @@ export class SharedWorkspaceLocking {
|
||||
): Promise<boolean> {
|
||||
const existingWorkspace = workspace.endsWith(`_workspace`);
|
||||
const ending = existingWorkspace ? workspace : `${workspace}_workspace`;
|
||||
const key = `${SharedWorkspaceLocking.workspacePrefix}${
|
||||
buildParametersContext.cacheKey
|
||||
}/${Date.now()}_${runId}_${ending}_lock`;
|
||||
await SharedWorkspaceLocking.ensureBucketExists();
|
||||
if (SharedWorkspaceLocking.useRclone) {
|
||||
await SharedWorkspaceLocking.rclone(`touch ${SharedWorkspaceLocking.bucket}/${key}`);
|
||||
} else {
|
||||
await SharedWorkspaceLocking.s3.send(
|
||||
new PutObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: key, Body: new Uint8Array(0) }),
|
||||
);
|
||||
}
|
||||
const file = `${Date.now()}_${runId}_${ending}_lock`;
|
||||
fs.writeFileSync(file, '');
|
||||
await CloudRunnerSystem.Run(
|
||||
`aws s3 cp ./${file} ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${file}`,
|
||||
false,
|
||||
true,
|
||||
);
|
||||
fs.rmSync(file);
|
||||
|
||||
const hasLock = await SharedWorkspaceLocking.HasWorkspaceLock(workspace, runId, buildParametersContext);
|
||||
|
||||
if (hasLock) {
|
||||
CloudRunner.lockedWorkspace = workspace;
|
||||
} else {
|
||||
if (SharedWorkspaceLocking.useRclone) {
|
||||
await SharedWorkspaceLocking.rclone(`delete ${SharedWorkspaceLocking.bucket}/${key}`);
|
||||
} else {
|
||||
await SharedWorkspaceLocking.s3.send(
|
||||
new DeleteObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: key }),
|
||||
);
|
||||
}
|
||||
await CloudRunnerSystem.Run(
|
||||
`aws s3 rm ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${file}`,
|
||||
false,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
return hasLock;
|
||||
@@ -358,50 +270,30 @@ export class SharedWorkspaceLocking {
|
||||
runId: string,
|
||||
buildParametersContext: BuildParameters,
|
||||
): Promise<boolean> {
|
||||
await SharedWorkspaceLocking.ensureBucketExists();
|
||||
const files = await SharedWorkspaceLocking.GetAllLocksForWorkspace(workspace, buildParametersContext);
|
||||
const file = files.find((x) => x.includes(workspace) && x.endsWith(`_lock`) && x.includes(runId));
|
||||
CloudRunnerLogger.log(`All Locks ${files} ${workspace} ${runId}`);
|
||||
CloudRunnerLogger.log(`Deleting lock ${workspace}/${file}`);
|
||||
CloudRunnerLogger.log(`rm ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${file}`);
|
||||
if (file) {
|
||||
if (SharedWorkspaceLocking.useRclone) {
|
||||
await SharedWorkspaceLocking.rclone(
|
||||
`delete ${SharedWorkspaceLocking.bucket}/${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/${file}`,
|
||||
);
|
||||
} else {
|
||||
await SharedWorkspaceLocking.s3.send(
|
||||
new DeleteObjectCommand({
|
||||
Bucket: SharedWorkspaceLocking.bucket,
|
||||
Key: `${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/${file}`,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
await CloudRunnerSystem.Run(
|
||||
`aws s3 rm ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${file}`,
|
||||
false,
|
||||
true,
|
||||
);
|
||||
|
||||
return !(await SharedWorkspaceLocking.HasWorkspaceLock(workspace, runId, buildParametersContext));
|
||||
}
|
||||
|
||||
public static async CleanupWorkspace(workspace: string, buildParametersContext: BuildParameters) {
|
||||
const prefix = `${SharedWorkspaceLocking.workspacePrefix}${buildParametersContext.cacheKey}/`;
|
||||
const files = await SharedWorkspaceLocking.listObjects(prefix);
|
||||
for (const file of files.filter((x) => x.includes(`_${workspace}_`))) {
|
||||
if (SharedWorkspaceLocking.useRclone) {
|
||||
await SharedWorkspaceLocking.rclone(`delete ${SharedWorkspaceLocking.bucket}/${prefix}${file}`);
|
||||
} else {
|
||||
await SharedWorkspaceLocking.s3.send(
|
||||
new DeleteObjectCommand({ Bucket: SharedWorkspaceLocking.bucket, Key: `${prefix}${file}` }),
|
||||
);
|
||||
}
|
||||
}
|
||||
await CloudRunnerSystem.Run(
|
||||
`aws s3 rm ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey} --exclude "*" --include "*_${workspace}_*"`,
|
||||
false,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
public static async ReadLines(command: string): Promise<string[]> {
|
||||
const path = command.replace('aws s3 ls', '').replace('rclone lsf', '').trim();
|
||||
const withoutScheme = path.replace('s3://', '');
|
||||
const [bucket, ...rest] = withoutScheme.split('/');
|
||||
const prefix = rest.join('/');
|
||||
return SharedWorkspaceLocking.listObjects(prefix, bucket);
|
||||
return CloudRunnerSystem.RunAndReadLines(command);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -33,8 +33,6 @@ export class TaskParameterSerializer {
|
||||
...TaskParameterSerializer.serializeInput(),
|
||||
...TaskParameterSerializer.serializeCloudRunnerOptions(),
|
||||
...CommandHookService.getSecrets(CommandHookService.getHooks(buildParameters.commandHooks)),
|
||||
// Include AWS environment variables for LocalStack compatibility
|
||||
...TaskParameterSerializer.serializeAwsEnvironmentVariables(),
|
||||
]
|
||||
.filter(
|
||||
(x) =>
|
||||
@@ -93,28 +91,6 @@ export class TaskParameterSerializer {
|
||||
return TaskParameterSerializer.serializeFromType(CloudRunnerOptions);
|
||||
}
|
||||
|
||||
private static serializeAwsEnvironmentVariables() {
|
||||
const awsEnvVars = [
|
||||
'AWS_ACCESS_KEY_ID',
|
||||
'AWS_SECRET_ACCESS_KEY',
|
||||
'AWS_DEFAULT_REGION',
|
||||
'AWS_REGION',
|
||||
'AWS_S3_ENDPOINT',
|
||||
'AWS_ENDPOINT',
|
||||
'AWS_CLOUD_FORMATION_ENDPOINT',
|
||||
'AWS_ECS_ENDPOINT',
|
||||
'AWS_KINESIS_ENDPOINT',
|
||||
'AWS_CLOUD_WATCH_LOGS_ENDPOINT',
|
||||
];
|
||||
|
||||
return awsEnvVars
|
||||
.filter((key) => process.env[key] !== undefined)
|
||||
.map((key) => ({
|
||||
name: key,
|
||||
value: process.env[key] || '',
|
||||
}));
|
||||
}
|
||||
|
||||
public static ToEnvVarFormat(input: string): string {
|
||||
return CloudRunnerOptions.ToEnvVarFormat(input);
|
||||
}
|
||||
|
||||
@@ -37,23 +37,17 @@ export class ContainerHookService {
|
||||
image: amazon/aws-cli
|
||||
hook: after
|
||||
commands: |
|
||||
if command -v aws > /dev/null 2>&1; then
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default || true
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default || true
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default || true
|
||||
ENDPOINT_ARGS=""
|
||||
if [ -n "$AWS_S3_ENDPOINT" ]; then ENDPOINT_ARGS="--endpoint-url $AWS_S3_ENDPOINT"; fi
|
||||
aws $ENDPOINT_ARGS s3 cp /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||
aws s3 cp /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${
|
||||
CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : ''
|
||||
} s3://${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/$CACHE_KEY/build/build-$BUILD_GUID.tar${
|
||||
CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : ''
|
||||
} || true
|
||||
rm /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${
|
||||
}
|
||||
rm /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${
|
||||
CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : ''
|
||||
} || true
|
||||
else
|
||||
echo "AWS CLI not available, skipping aws-s3-upload-build"
|
||||
fi
|
||||
}
|
||||
secrets:
|
||||
- name: awsAccessKeyId
|
||||
value: ${process.env.AWS_ACCESS_KEY_ID || ``}
|
||||
@@ -61,36 +55,27 @@ export class ContainerHookService {
|
||||
value: ${process.env.AWS_SECRET_ACCESS_KEY || ``}
|
||||
- name: awsDefaultRegion
|
||||
value: ${process.env.AWS_REGION || ``}
|
||||
- name: AWS_S3_ENDPOINT
|
||||
value: ${CloudRunnerOptions.awsS3Endpoint || process.env.AWS_S3_ENDPOINT || ``}
|
||||
- name: aws-s3-pull-build
|
||||
image: amazon/aws-cli
|
||||
commands: |
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||
aws s3 ls ${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/ || true
|
||||
aws s3 ls ${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/$CACHE_KEY/build || true
|
||||
mkdir -p /data/cache/$CACHE_KEY/build/
|
||||
if command -v aws > /dev/null 2>&1; then
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default || true
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default || true
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default || true
|
||||
ENDPOINT_ARGS=""
|
||||
if [ -n "$AWS_S3_ENDPOINT" ]; then ENDPOINT_ARGS="--endpoint-url $AWS_S3_ENDPOINT"; fi
|
||||
aws $ENDPOINT_ARGS s3 ls ${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/ || true
|
||||
aws $ENDPOINT_ARGS s3 ls ${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/$CACHE_KEY/build || true
|
||||
aws s3 cp s3://${
|
||||
CloudRunner.buildParameters.awsStackName
|
||||
}/cloud-runner-cache/$CACHE_KEY/build/build-$BUILD_GUID_TARGET.tar${
|
||||
aws s3 cp s3://${
|
||||
CloudRunner.buildParameters.awsStackName
|
||||
}/cloud-runner-cache/$CACHE_KEY/build/build-$BUILD_GUID_TARGET.tar${
|
||||
CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : ''
|
||||
} /data/cache/$CACHE_KEY/build/build-$BUILD_GUID_TARGET.tar${
|
||||
CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : ''
|
||||
} || true
|
||||
else
|
||||
echo "AWS CLI not available, skipping aws-s3-pull-build"
|
||||
fi
|
||||
}
|
||||
secrets:
|
||||
- name: AWS_ACCESS_KEY_ID
|
||||
- name: AWS_SECRET_ACCESS_KEY
|
||||
- name: AWS_DEFAULT_REGION
|
||||
- name: BUILD_GUID_TARGET
|
||||
- name: AWS_S3_ENDPOINT
|
||||
- name: steam-deploy-client
|
||||
image: steamcmd/steamcmd
|
||||
commands: |
|
||||
@@ -131,23 +116,17 @@ export class ContainerHookService {
|
||||
image: amazon/aws-cli
|
||||
hook: after
|
||||
commands: |
|
||||
if command -v aws > /dev/null 2>&1; then
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default || true
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default || true
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default || true
|
||||
ENDPOINT_ARGS=""
|
||||
if [ -n "$AWS_S3_ENDPOINT" ]; then ENDPOINT_ARGS="--endpoint-url $AWS_S3_ENDPOINT"; fi
|
||||
aws $ENDPOINT_ARGS s3 cp --recursive /data/cache/$CACHE_KEY/lfs s3://${
|
||||
CloudRunner.buildParameters.awsStackName
|
||||
}/cloud-runner-cache/$CACHE_KEY/lfs || true
|
||||
rm -r /data/cache/$CACHE_KEY/lfs || true
|
||||
aws $ENDPOINT_ARGS s3 cp --recursive /data/cache/$CACHE_KEY/Library s3://${
|
||||
CloudRunner.buildParameters.awsStackName
|
||||
}/cloud-runner-cache/$CACHE_KEY/Library || true
|
||||
rm -r /data/cache/$CACHE_KEY/Library || true
|
||||
else
|
||||
echo "AWS CLI not available, skipping aws-s3-upload-cache"
|
||||
fi
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||
aws s3 cp --recursive /data/cache/$CACHE_KEY/lfs s3://${
|
||||
CloudRunner.buildParameters.awsStackName
|
||||
}/cloud-runner-cache/$CACHE_KEY/lfs
|
||||
rm -r /data/cache/$CACHE_KEY/lfs
|
||||
aws s3 cp --recursive /data/cache/$CACHE_KEY/Library s3://${
|
||||
CloudRunner.buildParameters.awsStackName
|
||||
}/cloud-runner-cache/$CACHE_KEY/Library
|
||||
rm -r /data/cache/$CACHE_KEY/Library
|
||||
secrets:
|
||||
- name: AWS_ACCESS_KEY_ID
|
||||
value: ${process.env.AWS_ACCESS_KEY_ID || ``}
|
||||
@@ -155,142 +134,49 @@ export class ContainerHookService {
|
||||
value: ${process.env.AWS_SECRET_ACCESS_KEY || ``}
|
||||
- name: AWS_DEFAULT_REGION
|
||||
value: ${process.env.AWS_REGION || ``}
|
||||
- name: AWS_S3_ENDPOINT
|
||||
value: ${CloudRunnerOptions.awsS3Endpoint || process.env.AWS_S3_ENDPOINT || ``}
|
||||
- name: aws-s3-pull-cache
|
||||
image: amazon/aws-cli
|
||||
hook: before
|
||||
commands: |
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||
mkdir -p /data/cache/$CACHE_KEY/Library/
|
||||
mkdir -p /data/cache/$CACHE_KEY/lfs/
|
||||
if command -v aws > /dev/null 2>&1; then
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default || true
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default || true
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default || true
|
||||
ENDPOINT_ARGS=""
|
||||
if [ -n "$AWS_S3_ENDPOINT" ]; then ENDPOINT_ARGS="--endpoint-url $AWS_S3_ENDPOINT"; fi
|
||||
aws $ENDPOINT_ARGS s3 ls ${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/ || true
|
||||
aws $ENDPOINT_ARGS s3 ls ${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/$CACHE_KEY/ || true
|
||||
BUCKET1="${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/$CACHE_KEY/Library/"
|
||||
aws $ENDPOINT_ARGS s3 ls $BUCKET1 || true
|
||||
OBJECT1="$(aws $ENDPOINT_ARGS s3 ls $BUCKET1 | sort | tail -n 1 | awk '{print $4}' || '')"
|
||||
aws $ENDPOINT_ARGS s3 cp s3://$BUCKET1$OBJECT1 /data/cache/$CACHE_KEY/Library/ || true
|
||||
BUCKET2="${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/$CACHE_KEY/lfs/"
|
||||
aws $ENDPOINT_ARGS s3 ls $BUCKET2 || true
|
||||
OBJECT2="$(aws $ENDPOINT_ARGS s3 ls $BUCKET2 | sort | tail -n 1 | awk '{print $4}' || '')"
|
||||
aws $ENDPOINT_ARGS s3 cp s3://$BUCKET2$OBJECT2 /data/cache/$CACHE_KEY/lfs/ || true
|
||||
else
|
||||
echo "AWS CLI not available, skipping aws-s3-pull-cache"
|
||||
fi
|
||||
- name: rclone-upload-build
|
||||
image: rclone/rclone
|
||||
hook: after
|
||||
commands: |
|
||||
if command -v rclone > /dev/null 2>&1; then
|
||||
rclone copy /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${
|
||||
CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : ''
|
||||
} ${CloudRunner.buildParameters.rcloneRemote}/cloud-runner-cache/$CACHE_KEY/build/ || true
|
||||
rm /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${
|
||||
CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : ''
|
||||
} || true
|
||||
else
|
||||
echo "rclone not available, skipping rclone-upload-build"
|
||||
fi
|
||||
aws s3 ls ${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/ || true
|
||||
aws s3 ls ${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/$CACHE_KEY/ || true
|
||||
BUCKET1="${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/$CACHE_KEY/Library/"
|
||||
aws s3 ls $BUCKET1 || true
|
||||
OBJECT1="$(aws s3 ls $BUCKET1 | sort | tail -n 1 | awk '{print $4}' || '')"
|
||||
aws s3 cp s3://$BUCKET1$OBJECT1 /data/cache/$CACHE_KEY/Library/ || true
|
||||
BUCKET2="${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/$CACHE_KEY/lfs/"
|
||||
aws s3 ls $BUCKET2 || true
|
||||
OBJECT2="$(aws s3 ls $BUCKET2 | sort | tail -n 1 | awk '{print $4}' || '')"
|
||||
aws s3 cp s3://$BUCKET2$OBJECT2 /data/cache/$CACHE_KEY/lfs/ || true
|
||||
secrets:
|
||||
- name: RCLONE_REMOTE
|
||||
value: ${CloudRunner.buildParameters.rcloneRemote || ``}
|
||||
- name: rclone-pull-build
|
||||
image: rclone/rclone
|
||||
commands: |
|
||||
mkdir -p /data/cache/$CACHE_KEY/build/
|
||||
if command -v rclone > /dev/null 2>&1; then
|
||||
rclone copy ${
|
||||
CloudRunner.buildParameters.rcloneRemote
|
||||
}/cloud-runner-cache/$CACHE_KEY/build/build-$BUILD_GUID_TARGET.tar${
|
||||
CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : ''
|
||||
} /data/cache/$CACHE_KEY/build/build-$BUILD_GUID_TARGET.tar${
|
||||
CloudRunner.buildParameters.useCompressionStrategy ? '.lz4' : ''
|
||||
} || true
|
||||
else
|
||||
echo "rclone not available, skipping rclone-pull-build"
|
||||
fi
|
||||
secrets:
|
||||
- name: BUILD_GUID_TARGET
|
||||
- name: RCLONE_REMOTE
|
||||
value: ${CloudRunner.buildParameters.rcloneRemote || ``}
|
||||
- name: rclone-upload-cache
|
||||
image: rclone/rclone
|
||||
hook: after
|
||||
commands: |
|
||||
if command -v rclone > /dev/null 2>&1; then
|
||||
rclone copy /data/cache/$CACHE_KEY/lfs ${
|
||||
CloudRunner.buildParameters.rcloneRemote
|
||||
}/cloud-runner-cache/$CACHE_KEY/lfs || true
|
||||
rm -r /data/cache/$CACHE_KEY/lfs || true
|
||||
rclone copy /data/cache/$CACHE_KEY/Library ${
|
||||
CloudRunner.buildParameters.rcloneRemote
|
||||
}/cloud-runner-cache/$CACHE_KEY/Library || true
|
||||
rm -r /data/cache/$CACHE_KEY/Library || true
|
||||
else
|
||||
echo "rclone not available, skipping rclone-upload-cache"
|
||||
fi
|
||||
secrets:
|
||||
- name: RCLONE_REMOTE
|
||||
value: ${CloudRunner.buildParameters.rcloneRemote || ``}
|
||||
- name: rclone-pull-cache
|
||||
image: rclone/rclone
|
||||
hook: before
|
||||
commands: |
|
||||
mkdir -p /data/cache/$CACHE_KEY/Library/
|
||||
mkdir -p /data/cache/$CACHE_KEY/lfs/
|
||||
if command -v rclone > /dev/null 2>&1; then
|
||||
rclone copy ${
|
||||
CloudRunner.buildParameters.rcloneRemote
|
||||
}/cloud-runner-cache/$CACHE_KEY/Library /data/cache/$CACHE_KEY/Library/ || true
|
||||
rclone copy ${
|
||||
CloudRunner.buildParameters.rcloneRemote
|
||||
}/cloud-runner-cache/$CACHE_KEY/lfs /data/cache/$CACHE_KEY/lfs/ || true
|
||||
else
|
||||
echo "rclone not available, skipping rclone-pull-cache"
|
||||
fi
|
||||
secrets:
|
||||
- name: RCLONE_REMOTE
|
||||
value: ${CloudRunner.buildParameters.rcloneRemote || ``}
|
||||
- name: AWS_ACCESS_KEY_ID
|
||||
value: ${process.env.AWS_ACCESS_KEY_ID || ``}
|
||||
- name: AWS_SECRET_ACCESS_KEY
|
||||
value: ${process.env.AWS_SECRET_ACCESS_KEY || ``}
|
||||
- name: AWS_DEFAULT_REGION
|
||||
value: ${process.env.AWS_REGION || ``}
|
||||
- name: debug-cache
|
||||
image: ubuntu
|
||||
hook: after
|
||||
commands: |
|
||||
apt-get update > /dev/null || true
|
||||
${CloudRunnerOptions.cloudRunnerDebug ? `apt-get install -y tree > /dev/null || true` : `#`}
|
||||
${CloudRunnerOptions.cloudRunnerDebug ? `tree -L 3 /data/cache || true` : `#`}
|
||||
apt-get update > /dev/null
|
||||
${CloudRunnerOptions.cloudRunnerDebug ? `apt-get install -y tree > /dev/null` : `#`}
|
||||
${CloudRunnerOptions.cloudRunnerDebug ? `tree -L 3 /data/cache` : `#`}
|
||||
secrets:
|
||||
- name: awsAccessKeyId
|
||||
value: ${process.env.AWS_ACCESS_KEY_ID || ``}
|
||||
- name: awsSecretAccessKey
|
||||
value: ${process.env.AWS_SECRET_ACCESS_KEY || ``}
|
||||
- name: awsDefaultRegion
|
||||
value: ${process.env.AWS_REGION || ``}
|
||||
- name: AWS_S3_ENDPOINT
|
||||
value: ${CloudRunnerOptions.awsS3Endpoint || process.env.AWS_S3_ENDPOINT || ``}`,
|
||||
value: ${process.env.AWS_REGION || ``}`,
|
||||
).filter((x) => CloudRunnerOptions.containerHookFiles.includes(x.name) && x.hook === hookLifecycle);
|
||||
|
||||
// In local provider mode (non-container) or when AWS credentials are not present, skip AWS S3 hooks
|
||||
const provider = CloudRunner.buildParameters?.providerStrategy;
|
||||
const isContainerized = provider === 'aws' || provider === 'k8s' || provider === 'local-docker';
|
||||
const hasAwsCreds =
|
||||
(process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY) ||
|
||||
(process.env.awsAccessKeyId && process.env.awsSecretAccessKey);
|
||||
|
||||
// Always include AWS hooks on the AWS provider (task role provides creds),
|
||||
// otherwise require explicit creds for other containerized providers.
|
||||
const shouldIncludeAwsHooks =
|
||||
isContainerized && !CloudRunner.buildParameters?.skipCache && (provider === 'aws' || Boolean(hasAwsCreds));
|
||||
const filteredBuiltIns = shouldIncludeAwsHooks
|
||||
? builtInContainerHooks
|
||||
: builtInContainerHooks.filter((x) => x.image !== 'amazon/aws-cli');
|
||||
|
||||
if (filteredBuiltIns.length > 0) {
|
||||
results.push(...filteredBuiltIns);
|
||||
if (builtInContainerHooks.length > 0) {
|
||||
results.push(...builtInContainerHooks);
|
||||
}
|
||||
|
||||
return results;
|
||||
|
||||
@@ -43,7 +43,6 @@ describe('Cloud Runner Sync Environments', () => {
|
||||
- name: '${testSecretName}'
|
||||
value: '${testSecretValue}'
|
||||
`,
|
||||
cloudRunnerDebug: true,
|
||||
});
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
if (baseImage.toString().includes('undefined')) {
|
||||
|
||||
@@ -17,6 +17,7 @@ describe('Cloud Runner Github Checks', () => {
|
||||
status: 200,
|
||||
data: {},
|
||||
});
|
||||
// eslint-disable-next-line unicorn/no-useless-undefined
|
||||
jest.spyOn(GitHub as any, 'runUpdateAsyncChecksWorkflow').mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
|
||||
@@ -94,7 +94,6 @@ commands: echo "test"`;
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
containerHookFiles: `my-test-step-pre-build,my-test-step-post-build`,
|
||||
commandHookFiles: `my-test-hook-pre-build,my-test-hook-post-build`,
|
||||
cloudRunnerDebug: true,
|
||||
};
|
||||
const buildParameter2 = await CreateParameters(overrides);
|
||||
const baseImage2 = new ImageTag(buildParameter2);
|
||||
@@ -109,9 +108,7 @@ commands: echo "test"`;
|
||||
const buildContainsPreBuildStepMessage = results2.includes('before-build step test!');
|
||||
const buildContainsPostBuildStepMessage = results2.includes('after-build step test!');
|
||||
|
||||
if (CloudRunnerOptions.providerStrategy !== 'local') {
|
||||
expect(buildContainsBuildSucceeded).toBeTruthy();
|
||||
}
|
||||
expect(buildContainsBuildSucceeded).toBeTruthy();
|
||||
expect(buildContainsPreBuildHookRunMessage).toBeTruthy();
|
||||
expect(buildContainsPostBuildHookRunMessage).toBeTruthy();
|
||||
expect(buildContainsPreBuildStepMessage).toBeTruthy();
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
import CloudRunner from '../cloud-runner';
|
||||
import { BuildParameters, ImageTag } from '../..';
|
||||
import UnityVersioning from '../../unity-versioning';
|
||||
import { Cli } from '../../cli/cli';
|
||||
import CloudRunnerLogger from '../services/core/cloud-runner-logger';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import setups from './cloud-runner-suite.test';
|
||||
import { CloudRunnerSystem } from '../services/core/cloud-runner-system';
|
||||
import { OptionValues } from 'commander';
|
||||
|
||||
async function CreateParameters(overrides: OptionValues | undefined) {
|
||||
if (overrides) {
|
||||
Cli.options = overrides;
|
||||
}
|
||||
|
||||
return await BuildParameters.create();
|
||||
}
|
||||
|
||||
describe('Cloud Runner pre-built rclone steps', () => {
|
||||
it('Responds', () => {});
|
||||
it('Simple test to check if file is loaded', () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
setups();
|
||||
|
||||
(() => {
|
||||
// Determine environment capability to run rclone operations
|
||||
const isCI = process.env.GITHUB_ACTIONS === 'true';
|
||||
const isWindows = process.platform === 'win32';
|
||||
let rcloneAvailable = false;
|
||||
let bashAvailable = !isWindows; // assume available on non-Windows
|
||||
if (!isCI) {
|
||||
try {
|
||||
const { execSync } = require('child_process');
|
||||
execSync('rclone version', { stdio: 'ignore' });
|
||||
rcloneAvailable = true;
|
||||
} catch {
|
||||
rcloneAvailable = false;
|
||||
}
|
||||
if (isWindows) {
|
||||
try {
|
||||
const { execSync } = require('child_process');
|
||||
execSync('bash --version', { stdio: 'ignore' });
|
||||
bashAvailable = true;
|
||||
} catch {
|
||||
bashAvailable = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const hasRcloneRemote = Boolean(process.env.RCLONE_REMOTE || process.env.rcloneRemote);
|
||||
const shouldRunRclone = (isCI && hasRcloneRemote) || (rcloneAvailable && (!isWindows || bashAvailable));
|
||||
|
||||
if (shouldRunRclone) {
|
||||
it('Run build and prebuilt rclone cache pull, cache push and upload build', async () => {
|
||||
const remote = process.env.RCLONE_REMOTE || process.env.rcloneRemote || 'local:./temp/rclone-remote';
|
||||
const overrides = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
containerHookFiles: `rclone-pull-cache,rclone-upload-cache,rclone-upload-build`,
|
||||
storageProvider: 'rclone',
|
||||
rcloneRemote: remote,
|
||||
cloudRunnerDebug: true,
|
||||
} as unknown as OptionValues;
|
||||
|
||||
const buildParams = await CreateParameters(overrides);
|
||||
const baseImage = new ImageTag(buildParams);
|
||||
const results = await CloudRunner.run(buildParams, baseImage.toString());
|
||||
CloudRunnerLogger.log(`rclone run succeeded`);
|
||||
expect(results.BuildSucceeded).toBe(true);
|
||||
|
||||
// List remote root to validate the remote is accessible (best-effort)
|
||||
try {
|
||||
const lines = await CloudRunnerSystem.RunAndReadLines(`rclone lsf ${remote}`);
|
||||
CloudRunnerLogger.log(lines.join(','));
|
||||
} catch {}
|
||||
}, 1_000_000_000);
|
||||
} else {
|
||||
it.skip('Run build and prebuilt rclone steps - rclone not configured', () => {
|
||||
CloudRunnerLogger.log('rclone not configured (no CLI/remote); skipping rclone test');
|
||||
});
|
||||
}
|
||||
})();
|
||||
});
|
||||
@@ -4,6 +4,7 @@ import UnityVersioning from '../../unity-versioning';
|
||||
import { Cli } from '../../cli/cli';
|
||||
import CloudRunnerLogger from '../services/core/cloud-runner-logger';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import CloudRunnerOptions from '../options/cloud-runner-options';
|
||||
import setups from './cloud-runner-suite.test';
|
||||
import { CloudRunnerSystem } from '../services/core/cloud-runner-system';
|
||||
import { OptionValues } from 'commander';
|
||||
@@ -18,56 +19,30 @@ async function CreateParameters(overrides: OptionValues | undefined) {
|
||||
|
||||
describe('Cloud Runner pre-built S3 steps', () => {
|
||||
it('Responds', () => {});
|
||||
it('Simple test to check if file is loaded', () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
setups();
|
||||
(() => {
|
||||
// Determine environment capability to run S3 operations
|
||||
const isCI = process.env.GITHUB_ACTIONS === 'true';
|
||||
let awsAvailable = false;
|
||||
if (!isCI) {
|
||||
try {
|
||||
const { execSync } = require('child_process');
|
||||
execSync('aws --version', { stdio: 'ignore' });
|
||||
awsAvailable = true;
|
||||
} catch {
|
||||
awsAvailable = false;
|
||||
}
|
||||
}
|
||||
const hasAwsCreds = Boolean(process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY);
|
||||
const shouldRunS3 = (isCI && hasAwsCreds) || awsAvailable;
|
||||
if (CloudRunnerOptions.cloudRunnerDebug && CloudRunnerOptions.providerStrategy !== `local-docker`) {
|
||||
it('Run build and prebuilt s3 cache pull, cache push and upload build', async () => {
|
||||
const overrides = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
containerHookFiles: `aws-s3-pull-cache,aws-s3-upload-cache,aws-s3-upload-build`,
|
||||
};
|
||||
const buildParameter2 = await CreateParameters(overrides);
|
||||
const baseImage2 = new ImageTag(buildParameter2);
|
||||
const results2Object = await CloudRunner.run(buildParameter2, baseImage2.toString());
|
||||
const results2 = results2Object.BuildResults;
|
||||
CloudRunnerLogger.log(`run 2 succeeded`);
|
||||
|
||||
// Only run the test if we have AWS creds in CI, or the AWS CLI is available locally
|
||||
if (shouldRunS3) {
|
||||
it('Run build and prebuilt s3 cache pull, cache push and upload build', async () => {
|
||||
const overrides = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
containerHookFiles: `aws-s3-pull-cache,aws-s3-upload-cache,aws-s3-upload-build`,
|
||||
cloudRunnerDebug: true,
|
||||
};
|
||||
const buildParameter2 = await CreateParameters(overrides);
|
||||
const baseImage2 = new ImageTag(buildParameter2);
|
||||
const results2Object = await CloudRunner.run(buildParameter2, baseImage2.toString());
|
||||
CloudRunnerLogger.log(`run 2 succeeded`);
|
||||
expect(results2Object.BuildSucceeded).toBe(true);
|
||||
const build2ContainsBuildSucceeded = results2.includes('Build succeeded');
|
||||
expect(build2ContainsBuildSucceeded).toBeTruthy();
|
||||
|
||||
// Only run S3 operations if environment supports it
|
||||
if (shouldRunS3) {
|
||||
const results = await CloudRunnerSystem.RunAndReadLines(
|
||||
`aws s3 ls s3://${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/`,
|
||||
);
|
||||
CloudRunnerLogger.log(results.join(`,`));
|
||||
}
|
||||
}, 1_000_000_000);
|
||||
} else {
|
||||
it.skip('Run build and prebuilt s3 cache pull, cache push and upload build - AWS not configured', () => {
|
||||
CloudRunnerLogger.log('AWS not configured (no creds/CLI); skipping S3 test');
|
||||
});
|
||||
}
|
||||
})();
|
||||
const results = await CloudRunnerSystem.RunAndReadLines(
|
||||
`aws s3 ls s3://${CloudRunner.buildParameters.awsStackName}/cloud-runner-cache/`,
|
||||
);
|
||||
CloudRunnerLogger.log(results.join(`,`));
|
||||
}, 1_000_000_000);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -31,7 +31,6 @@ describe('Cloud Runner Caching', () => {
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
containerHookFiles: `debug-cache`,
|
||||
cloudRunnerBranch: `cloud-runner-develop`,
|
||||
cloudRunnerDebug: true,
|
||||
};
|
||||
if (CloudRunnerOptions.providerStrategy === `k8s`) {
|
||||
overrides.containerHookFiles += `,aws-s3-pull-cache,aws-s3-upload-cache`;
|
||||
@@ -44,10 +43,10 @@ describe('Cloud Runner Caching', () => {
|
||||
const results = resultsObject.BuildResults;
|
||||
const libraryString = 'Rebuilding Library because the asset database could not be found!';
|
||||
const cachePushFail = 'Did not push source folder to cache because it was empty Library';
|
||||
const buildSucceededString = 'Build succeeded';
|
||||
|
||||
expect(resultsObject.BuildSucceeded).toBe(true);
|
||||
|
||||
// Keep minimal assertions to reduce brittleness
|
||||
expect(results).toContain(libraryString);
|
||||
expect(results).toContain(buildSucceededString);
|
||||
expect(results).not.toContain(cachePushFail);
|
||||
|
||||
CloudRunnerLogger.log(`run 1 succeeded`);
|
||||
@@ -72,6 +71,7 @@ describe('Cloud Runner Caching', () => {
|
||||
CloudRunnerLogger.log(`run 2 succeeded`);
|
||||
|
||||
const build2ContainsCacheKey = results2.includes(buildParameter.cacheKey);
|
||||
const build2ContainsBuildSucceeded = results2.includes(buildSucceededString);
|
||||
const build2NotContainsZeroLibraryCacheFilesMessage = !results2.includes(
|
||||
'There is 0 files/dir in the cache pulled contents for Library',
|
||||
);
|
||||
@@ -81,7 +81,8 @@ describe('Cloud Runner Caching', () => {
|
||||
|
||||
expect(build2ContainsCacheKey).toBeTruthy();
|
||||
expect(results2).toContain('Activation successful');
|
||||
expect(results2Object.BuildSucceeded).toBe(true);
|
||||
expect(build2ContainsBuildSucceeded).toBeTruthy();
|
||||
expect(results2).toContain(buildSucceededString);
|
||||
const splitResults = results2.split('Activation successful');
|
||||
expect(splitResults[splitResults.length - 1]).not.toContain(libraryString);
|
||||
expect(build2NotContainsZeroLibraryCacheFilesMessage).toBeTruthy();
|
||||
|
||||
@@ -24,7 +24,6 @@ describe('Cloud Runner Retain Workspace', () => {
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
maxRetainedWorkspaces: 1,
|
||||
cloudRunnerDebug: true,
|
||||
};
|
||||
const buildParameter = await CreateParameters(overrides);
|
||||
expect(buildParameter.projectPath).toEqual(overrides.projectPath);
|
||||
@@ -34,10 +33,10 @@ describe('Cloud Runner Retain Workspace', () => {
|
||||
const results = resultsObject.BuildResults;
|
||||
const libraryString = 'Rebuilding Library because the asset database could not be found!';
|
||||
const cachePushFail = 'Did not push source folder to cache because it was empty Library';
|
||||
const buildSucceededString = 'Build succeeded';
|
||||
|
||||
expect(resultsObject.BuildSucceeded).toBe(true);
|
||||
|
||||
// Keep minimal assertions to reduce brittleness
|
||||
expect(results).toContain(libraryString);
|
||||
expect(results).toContain(buildSucceededString);
|
||||
expect(results).not.toContain(cachePushFail);
|
||||
|
||||
if (CloudRunnerOptions.providerStrategy === `local-docker`) {
|
||||
@@ -61,6 +60,7 @@ describe('Cloud Runner Retain Workspace', () => {
|
||||
const build2ContainsBuildGuid1FromRetainedWorkspace = results2.includes(buildParameter.buildGuid);
|
||||
const build2ContainsRetainedWorkspacePhrase = results2.includes(`Retained Workspace:`);
|
||||
const build2ContainsWorkspaceExistsAlreadyPhrase = results2.includes(`Retained Workspace Already Exists!`);
|
||||
const build2ContainsBuildSucceeded = results2.includes(buildSucceededString);
|
||||
const build2NotContainsZeroLibraryCacheFilesMessage = !results2.includes(
|
||||
'There is 0 files/dir in the cache pulled contents for Library',
|
||||
);
|
||||
@@ -72,7 +72,7 @@ describe('Cloud Runner Retain Workspace', () => {
|
||||
expect(build2ContainsRetainedWorkspacePhrase).toBeTruthy();
|
||||
expect(build2ContainsWorkspaceExistsAlreadyPhrase).toBeTruthy();
|
||||
expect(build2ContainsBuildGuid1FromRetainedWorkspace).toBeTruthy();
|
||||
expect(results2Object.BuildSucceeded).toBe(true);
|
||||
expect(build2ContainsBuildSucceeded).toBeTruthy();
|
||||
expect(build2NotContainsZeroLibraryCacheFilesMessage).toBeTruthy();
|
||||
expect(build2NotContainsZeroLFSCacheFilesMessage).toBeTruthy();
|
||||
const splitResults = results2.split('Activation successful');
|
||||
|
||||
@@ -21,9 +21,7 @@ describe('Cloud Runner Kubernetes', () => {
|
||||
setups();
|
||||
|
||||
if (CloudRunnerOptions.cloudRunnerDebug) {
|
||||
const enableK8sE2E = process.env.ENABLE_K8S_E2E === 'true';
|
||||
|
||||
const testBody = async () => {
|
||||
it('Run one build it using K8s without error', async () => {
|
||||
if (CloudRunnerOptions.providerStrategy !== `k8s`) {
|
||||
return;
|
||||
}
|
||||
@@ -36,7 +34,6 @@ describe('Cloud Runner Kubernetes', () => {
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
providerStrategy: 'k8s',
|
||||
buildPlatform: 'linux',
|
||||
cloudRunnerDebug: true,
|
||||
};
|
||||
const buildParameter = await CreateParameters(overrides);
|
||||
expect(buildParameter.projectPath).toEqual(overrides.projectPath);
|
||||
@@ -54,14 +51,6 @@ describe('Cloud Runner Kubernetes', () => {
|
||||
expect(results).not.toContain(cachePushFail);
|
||||
|
||||
CloudRunnerLogger.log(`run 1 succeeded`);
|
||||
};
|
||||
|
||||
if (enableK8sE2E) {
|
||||
it('Run one build it using K8s without error', testBody, 1_000_000_000);
|
||||
} else {
|
||||
it.skip('Run one build it using K8s without error - disabled (no outbound network)', () => {
|
||||
CloudRunnerLogger.log('Skipping K8s e2e (ENABLE_K8S_E2E not true)');
|
||||
});
|
||||
}
|
||||
}, 1_000_000_000);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
export default class InvalidProvider {}
|
||||
@@ -1,154 +0,0 @@
|
||||
import { GitHubUrlInfo } from '../../providers/provider-url-parser';
|
||||
import * as fs from 'fs';
|
||||
|
||||
// Mock @actions/core to fix fs.promises compatibility issue
|
||||
jest.mock('@actions/core', () => ({
|
||||
info: jest.fn(),
|
||||
warning: jest.fn(),
|
||||
error: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock fs module
|
||||
jest.mock('fs');
|
||||
|
||||
// Mock the entire provider-git-manager module
|
||||
const mockExecAsync = jest.fn();
|
||||
jest.mock('../../providers/provider-git-manager', () => {
|
||||
const originalModule = jest.requireActual('../../providers/provider-git-manager');
|
||||
return {
|
||||
...originalModule,
|
||||
ProviderGitManager: {
|
||||
...originalModule.ProviderGitManager,
|
||||
cloneRepository: jest.fn(),
|
||||
updateRepository: jest.fn(),
|
||||
getProviderModulePath: jest.fn(),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const mockFs = fs as jest.Mocked<typeof fs>;
|
||||
|
||||
// Import the mocked ProviderGitManager
|
||||
import { ProviderGitManager } from '../../providers/provider-git-manager';
|
||||
const mockProviderGitManager = ProviderGitManager as jest.Mocked<typeof ProviderGitManager>;
|
||||
|
||||
describe('ProviderGitManager', () => {
|
||||
const mockUrlInfo: GitHubUrlInfo = {
|
||||
type: 'github',
|
||||
owner: 'test-user',
|
||||
repo: 'test-repo',
|
||||
branch: 'main',
|
||||
url: 'https://github.com/test-user/test-repo',
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('cloneRepository', () => {
|
||||
it('successfully clones a repository', async () => {
|
||||
const expectedResult = {
|
||||
success: true,
|
||||
localPath: '/path/to/cloned/repo',
|
||||
};
|
||||
mockProviderGitManager.cloneRepository.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await mockProviderGitManager.cloneRepository(mockUrlInfo);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.localPath).toBe('/path/to/cloned/repo');
|
||||
});
|
||||
|
||||
it('handles clone errors', async () => {
|
||||
const expectedResult = {
|
||||
success: false,
|
||||
localPath: '/path/to/cloned/repo',
|
||||
error: 'Clone failed',
|
||||
};
|
||||
mockProviderGitManager.cloneRepository.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await mockProviderGitManager.cloneRepository(mockUrlInfo);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('Clone failed');
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateRepository', () => {
|
||||
it('successfully updates a repository when updates are available', async () => {
|
||||
const expectedResult = {
|
||||
success: true,
|
||||
updated: true,
|
||||
};
|
||||
mockProviderGitManager.updateRepository.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await mockProviderGitManager.updateRepository(mockUrlInfo);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.updated).toBe(true);
|
||||
});
|
||||
|
||||
it('reports no updates when repository is up to date', async () => {
|
||||
const expectedResult = {
|
||||
success: true,
|
||||
updated: false,
|
||||
};
|
||||
mockProviderGitManager.updateRepository.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await mockProviderGitManager.updateRepository(mockUrlInfo);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.updated).toBe(false);
|
||||
});
|
||||
|
||||
it('handles update errors', async () => {
|
||||
const expectedResult = {
|
||||
success: false,
|
||||
updated: false,
|
||||
error: 'Update failed',
|
||||
};
|
||||
mockProviderGitManager.updateRepository.mockResolvedValue(expectedResult);
|
||||
|
||||
const result = await mockProviderGitManager.updateRepository(mockUrlInfo);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.updated).toBe(false);
|
||||
expect(result.error).toContain('Update failed');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getProviderModulePath', () => {
|
||||
it('returns the specified path when provided', () => {
|
||||
const urlInfoWithPath = { ...mockUrlInfo, path: 'src/providers' };
|
||||
const localPath = '/path/to/repo';
|
||||
const expectedPath = '/path/to/repo/src/providers';
|
||||
|
||||
mockProviderGitManager.getProviderModulePath.mockReturnValue(expectedPath);
|
||||
|
||||
const result = mockProviderGitManager.getProviderModulePath(urlInfoWithPath, localPath);
|
||||
|
||||
expect(result).toBe(expectedPath);
|
||||
});
|
||||
|
||||
it('finds common entry points when no path specified', () => {
|
||||
const localPath = '/path/to/repo';
|
||||
const expectedPath = '/path/to/repo/index.js';
|
||||
|
||||
mockProviderGitManager.getProviderModulePath.mockReturnValue(expectedPath);
|
||||
|
||||
const result = mockProviderGitManager.getProviderModulePath(mockUrlInfo, localPath);
|
||||
|
||||
expect(result).toBe(expectedPath);
|
||||
});
|
||||
|
||||
it('returns repository root when no entry point found', () => {
|
||||
const localPath = '/path/to/repo';
|
||||
|
||||
mockProviderGitManager.getProviderModulePath.mockReturnValue(localPath);
|
||||
|
||||
const result = mockProviderGitManager.getProviderModulePath(mockUrlInfo, localPath);
|
||||
|
||||
expect(result).toBe(localPath);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,98 +0,0 @@
|
||||
import loadProvider, { ProviderLoader } from '../../providers/provider-loader';
|
||||
import { ProviderInterface } from '../../providers/provider-interface';
|
||||
import { ProviderGitManager } from '../../providers/provider-git-manager';
|
||||
|
||||
// Mock the git manager
|
||||
jest.mock('../../providers/provider-git-manager');
|
||||
const mockProviderGitManager = ProviderGitManager as jest.Mocked<typeof ProviderGitManager>;
|
||||
|
||||
describe('provider-loader', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('loadProvider', () => {
|
||||
it('loads a built-in provider dynamically', async () => {
|
||||
const provider: ProviderInterface = await loadProvider('./test', {} as any);
|
||||
expect(typeof provider.runTaskInWorkflow).toBe('function');
|
||||
});
|
||||
|
||||
it('loads a local provider from relative path', async () => {
|
||||
const provider: ProviderInterface = await loadProvider('./test', {} as any);
|
||||
expect(typeof provider.runTaskInWorkflow).toBe('function');
|
||||
});
|
||||
|
||||
it('loads a GitHub provider', async () => {
|
||||
const mockLocalPath = '/path/to/cloned/repo';
|
||||
const mockModulePath = '/path/to/cloned/repo/index.js';
|
||||
|
||||
mockProviderGitManager.ensureRepositoryAvailable.mockResolvedValue(mockLocalPath);
|
||||
mockProviderGitManager.getProviderModulePath.mockReturnValue(mockModulePath);
|
||||
|
||||
// For now, just test that the git manager methods are called correctly
|
||||
// The actual import testing is complex due to dynamic imports
|
||||
await expect(loadProvider('https://github.com/user/repo', {} as any)).rejects.toThrow();
|
||||
expect(mockProviderGitManager.ensureRepositoryAvailable).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('throws when provider package is missing', async () => {
|
||||
await expect(loadProvider('non-existent-package', {} as any)).rejects.toThrow('non-existent-package');
|
||||
});
|
||||
|
||||
it('throws when provider does not implement ProviderInterface', async () => {
|
||||
await expect(loadProvider('../tests/fixtures/invalid-provider', {} as any)).rejects.toThrow(
|
||||
'does not implement ProviderInterface',
|
||||
);
|
||||
});
|
||||
|
||||
it('throws when provider does not export a constructor', async () => {
|
||||
// Test with a non-existent module that will fail to load
|
||||
await expect(loadProvider('./non-existent-constructor-module', {} as any)).rejects.toThrow(
|
||||
'Failed to load provider package',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ProviderLoader class', () => {
|
||||
it('loads providers using the static method', async () => {
|
||||
const provider: ProviderInterface = await ProviderLoader.loadProvider('./test', {} as any);
|
||||
expect(typeof provider.runTaskInWorkflow).toBe('function');
|
||||
});
|
||||
|
||||
it('returns available providers', () => {
|
||||
const providers = ProviderLoader.getAvailableProviders();
|
||||
expect(providers).toContain('aws');
|
||||
expect(providers).toContain('k8s');
|
||||
expect(providers).toContain('test');
|
||||
});
|
||||
|
||||
it('cleans up cache', async () => {
|
||||
mockProviderGitManager.cleanupOldRepositories.mockResolvedValue();
|
||||
|
||||
await ProviderLoader.cleanupCache(7);
|
||||
|
||||
expect(mockProviderGitManager.cleanupOldRepositories).toHaveBeenCalledWith(7);
|
||||
});
|
||||
|
||||
it('analyzes provider sources', () => {
|
||||
const githubInfo = ProviderLoader.analyzeProviderSource('https://github.com/user/repo');
|
||||
expect(githubInfo.type).toBe('github');
|
||||
if (githubInfo.type === 'github') {
|
||||
expect(githubInfo.owner).toBe('user');
|
||||
expect(githubInfo.repo).toBe('repo');
|
||||
}
|
||||
|
||||
const localInfo = ProviderLoader.analyzeProviderSource('./local-provider');
|
||||
expect(localInfo.type).toBe('local');
|
||||
if (localInfo.type === 'local') {
|
||||
expect(localInfo.path).toBe('./local-provider');
|
||||
}
|
||||
|
||||
const npmInfo = ProviderLoader.analyzeProviderSource('my-package');
|
||||
expect(npmInfo.type).toBe('npm');
|
||||
if (npmInfo.type === 'npm') {
|
||||
expect(npmInfo.packageName).toBe('my-package');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,185 +0,0 @@
|
||||
import { parseProviderSource, generateCacheKey, isGitHubSource } from '../../providers/provider-url-parser';
|
||||
|
||||
describe('provider-url-parser', () => {
|
||||
describe('parseProviderSource', () => {
|
||||
it('parses HTTPS GitHub URLs correctly', () => {
|
||||
const result = parseProviderSource('https://github.com/user/repo');
|
||||
expect(result).toEqual({
|
||||
type: 'github',
|
||||
owner: 'user',
|
||||
repo: 'repo',
|
||||
branch: 'main',
|
||||
path: '',
|
||||
url: 'https://github.com/user/repo',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses HTTPS GitHub URLs with branch', () => {
|
||||
const result = parseProviderSource('https://github.com/user/repo/tree/develop');
|
||||
expect(result).toEqual({
|
||||
type: 'github',
|
||||
owner: 'user',
|
||||
repo: 'repo',
|
||||
branch: 'develop',
|
||||
path: '',
|
||||
url: 'https://github.com/user/repo',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses HTTPS GitHub URLs with path', () => {
|
||||
const result = parseProviderSource('https://github.com/user/repo/tree/main/src/providers');
|
||||
expect(result).toEqual({
|
||||
type: 'github',
|
||||
owner: 'user',
|
||||
repo: 'repo',
|
||||
branch: 'main',
|
||||
path: 'src/providers',
|
||||
url: 'https://github.com/user/repo',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses GitHub URLs with .git extension', () => {
|
||||
const result = parseProviderSource('https://github.com/user/repo.git');
|
||||
expect(result).toEqual({
|
||||
type: 'github',
|
||||
owner: 'user',
|
||||
repo: 'repo',
|
||||
branch: 'main',
|
||||
path: '',
|
||||
url: 'https://github.com/user/repo',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses SSH GitHub URLs', () => {
|
||||
const result = parseProviderSource('git@github.com:user/repo.git');
|
||||
expect(result).toEqual({
|
||||
type: 'github',
|
||||
owner: 'user',
|
||||
repo: 'repo',
|
||||
branch: 'main',
|
||||
path: '',
|
||||
url: 'https://github.com/user/repo',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses shorthand GitHub references', () => {
|
||||
const result = parseProviderSource('user/repo');
|
||||
expect(result).toEqual({
|
||||
type: 'github',
|
||||
owner: 'user',
|
||||
repo: 'repo',
|
||||
branch: 'main',
|
||||
path: '',
|
||||
url: 'https://github.com/user/repo',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses shorthand GitHub references with branch', () => {
|
||||
const result = parseProviderSource('user/repo@develop');
|
||||
expect(result).toEqual({
|
||||
type: 'github',
|
||||
owner: 'user',
|
||||
repo: 'repo',
|
||||
branch: 'develop',
|
||||
path: '',
|
||||
url: 'https://github.com/user/repo',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses shorthand GitHub references with path', () => {
|
||||
const result = parseProviderSource('user/repo@main/src/providers');
|
||||
expect(result).toEqual({
|
||||
type: 'github',
|
||||
owner: 'user',
|
||||
repo: 'repo',
|
||||
branch: 'main',
|
||||
path: 'src/providers',
|
||||
url: 'https://github.com/user/repo',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local relative paths', () => {
|
||||
const result = parseProviderSource('./my-provider');
|
||||
expect(result).toEqual({
|
||||
type: 'local',
|
||||
path: './my-provider',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local absolute paths', () => {
|
||||
const result = parseProviderSource('/path/to/provider');
|
||||
expect(result).toEqual({
|
||||
type: 'local',
|
||||
path: '/path/to/provider',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses Windows paths', () => {
|
||||
const result = parseProviderSource('C:\\path\\to\\provider');
|
||||
expect(result).toEqual({
|
||||
type: 'local',
|
||||
path: 'C:\\path\\to\\provider',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses NPM package names', () => {
|
||||
const result = parseProviderSource('my-provider-package');
|
||||
expect(result).toEqual({
|
||||
type: 'npm',
|
||||
packageName: 'my-provider-package',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses scoped NPM package names', () => {
|
||||
const result = parseProviderSource('@scope/my-provider');
|
||||
expect(result).toEqual({
|
||||
type: 'npm',
|
||||
packageName: '@scope/my-provider',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateCacheKey', () => {
|
||||
it('generates valid cache keys for GitHub URLs', () => {
|
||||
const urlInfo = {
|
||||
type: 'github' as const,
|
||||
owner: 'user',
|
||||
repo: 'my-repo',
|
||||
branch: 'develop',
|
||||
url: 'https://github.com/user/my-repo',
|
||||
};
|
||||
|
||||
const key = generateCacheKey(urlInfo);
|
||||
expect(key).toBe('github_user_my-repo_develop');
|
||||
});
|
||||
|
||||
it('handles special characters in cache keys', () => {
|
||||
const urlInfo = {
|
||||
type: 'github' as const,
|
||||
owner: 'user-name',
|
||||
repo: 'my.repo',
|
||||
branch: 'feature/branch',
|
||||
url: 'https://github.com/user-name/my.repo',
|
||||
};
|
||||
|
||||
const key = generateCacheKey(urlInfo);
|
||||
expect(key).toBe('github_user-name_my_repo_feature_branch');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isGitHubSource', () => {
|
||||
it('identifies GitHub URLs correctly', () => {
|
||||
expect(isGitHubSource('https://github.com/user/repo')).toBe(true);
|
||||
expect(isGitHubSource('git@github.com:user/repo.git')).toBe(true);
|
||||
expect(isGitHubSource('user/repo')).toBe(true);
|
||||
expect(isGitHubSource('user/repo@develop')).toBe(true);
|
||||
});
|
||||
|
||||
it('identifies non-GitHub sources correctly', () => {
|
||||
expect(isGitHubSource('./local-provider')).toBe(false);
|
||||
expect(isGitHubSource('/absolute/path')).toBe(false);
|
||||
expect(isGitHubSource('npm-package')).toBe(false);
|
||||
expect(isGitHubSource('@scope/package')).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -27,16 +27,7 @@ printenv
|
||||
git config --global advice.detachedHead false
|
||||
git config --global filter.lfs.smudge "git-lfs smudge --skip -- %f"
|
||||
git config --global filter.lfs.process "git-lfs filter-process --skip"
|
||||
BRANCH="${CloudRunner.buildParameters.cloudRunnerBranch}"
|
||||
REPO="${CloudRunnerFolders.unityBuilderRepoUrl}"
|
||||
if [ -n "$(git ls-remote --heads \"$REPO\" \"$BRANCH\" 2>/dev/null)" ]; then
|
||||
git clone -q -b "$BRANCH" "$REPO" /builder
|
||||
else
|
||||
echo "Remote branch $BRANCH not found in $REPO; falling back to a known branch"
|
||||
git clone -q -b cloud-runner-develop "$REPO" /builder \
|
||||
|| git clone -q -b main "$REPO" /builder \
|
||||
|| git clone -q "$REPO" /builder
|
||||
fi
|
||||
git clone -q -b ${CloudRunner.buildParameters.cloudRunnerBranch} ${CloudRunnerFolders.unityBuilderRepoUrl} /builder
|
||||
git clone -q -b ${CloudRunner.buildParameters.branch} ${CloudRunnerFolders.targetBuildRepoUrl} /repo
|
||||
cd /repo
|
||||
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
|
||||
|
||||
@@ -50,142 +50,55 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
||||
const buildHooks = CommandHookService.getHooks(CloudRunner.buildParameters.commandHooks).filter((x) =>
|
||||
x.step?.includes(`build`),
|
||||
);
|
||||
const isContainerized =
|
||||
CloudRunner.buildParameters.providerStrategy === 'aws' ||
|
||||
CloudRunner.buildParameters.providerStrategy === 'k8s' ||
|
||||
CloudRunner.buildParameters.providerStrategy === 'local-docker';
|
||||
const builderPath = CloudRunnerFolders.ToLinuxFolder(
|
||||
path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', `index.js`),
|
||||
);
|
||||
|
||||
const builderPath = isContainerized
|
||||
? CloudRunnerFolders.ToLinuxFolder(path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', `index.js`))
|
||||
: CloudRunnerFolders.ToLinuxFolder(path.join(process.cwd(), 'dist', `index.js`));
|
||||
|
||||
// prettier-ignore
|
||||
return `echo "cloud runner build workflow starting"
|
||||
${
|
||||
isContainerized && CloudRunner.buildParameters.providerStrategy !== 'local-docker'
|
||||
? 'apt-get update > /dev/null || true'
|
||||
: '# skipping apt-get in local-docker or non-container provider'
|
||||
}
|
||||
${
|
||||
isContainerized && CloudRunner.buildParameters.providerStrategy !== 'local-docker'
|
||||
? 'apt-get install -y curl tar tree npm git-lfs jq git > /dev/null || true\n npm --version || true\n npm i -g n > /dev/null || true\n npm i -g semver > /dev/null || true\n npm install --global yarn > /dev/null || true\n n 20.8.0 || true\n node --version || true'
|
||||
: '# skipping toolchain setup in local-docker or non-container provider'
|
||||
}
|
||||
apt-get update > /dev/null
|
||||
apt-get install -y curl tar tree npm git-lfs jq git > /dev/null
|
||||
npm --version
|
||||
npm i -g n > /dev/null
|
||||
npm i -g semver > /dev/null
|
||||
npm install --global yarn > /dev/null
|
||||
n 20.8.0
|
||||
node --version
|
||||
${setupHooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
${
|
||||
CloudRunner.buildParameters.providerStrategy === 'local-docker'
|
||||
? `export GITHUB_WORKSPACE="${CloudRunner.buildParameters.dockerWorkspacePath}"
|
||||
echo "Using docker workspace: $GITHUB_WORKSPACE"`
|
||||
: `export GITHUB_WORKSPACE="${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute)}"`
|
||||
}
|
||||
${isContainerized ? 'df -H /data/' : '# skipping df on /data in non-container provider'}
|
||||
export LOG_FILE=${isContainerized ? '/home/job-log.txt' : '$(pwd)/temp/job-log.txt'}
|
||||
${BuildAutomationWorkflow.setupCommands(builderPath, isContainerized)}
|
||||
export GITHUB_WORKSPACE="${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute)}"
|
||||
df -H /data/
|
||||
${BuildAutomationWorkflow.setupCommands(builderPath)}
|
||||
${setupHooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||
${buildHooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
${BuildAutomationWorkflow.BuildCommands(builderPath, isContainerized)}
|
||||
${BuildAutomationWorkflow.BuildCommands(builderPath)}
|
||||
${buildHooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}`;
|
||||
}
|
||||
|
||||
private static setupCommands(builderPath: string, isContainerized: boolean) {
|
||||
// prettier-ignore
|
||||
private static setupCommands(builderPath: string) {
|
||||
const commands = `mkdir -p ${CloudRunnerFolders.ToLinuxFolder(
|
||||
CloudRunnerFolders.builderPathAbsolute,
|
||||
)}
|
||||
BRANCH="${CloudRunner.buildParameters.cloudRunnerBranch}"
|
||||
REPO="${CloudRunnerFolders.unityBuilderRepoUrl}"
|
||||
DEST="${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.builderPathAbsolute)}"
|
||||
if [ -n "$(git ls-remote --heads \"$REPO\" \"$BRANCH\" 2>/dev/null)" ]; then
|
||||
git clone -q -b "$BRANCH" "$REPO" "$DEST"
|
||||
else
|
||||
echo "Remote branch $BRANCH not found in $REPO; falling back to a known branch"
|
||||
git clone -q -b cloud-runner-develop "$REPO" "$DEST" \
|
||||
|| git clone -q -b main "$REPO" "$DEST" \
|
||||
|| git clone -q "$REPO" "$DEST"
|
||||
fi
|
||||
chmod +x ${builderPath}`;
|
||||
)} && git clone -q -b ${CloudRunner.buildParameters.cloudRunnerBranch} ${
|
||||
CloudRunnerFolders.unityBuilderRepoUrl
|
||||
} "${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.builderPathAbsolute)}" && chmod +x ${builderPath}`;
|
||||
|
||||
if (isContainerized) {
|
||||
const cloneBuilderCommands = `if [ -e "${CloudRunnerFolders.ToLinuxFolder(
|
||||
CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute,
|
||||
)}" ] && [ -e "${CloudRunnerFolders.ToLinuxFolder(
|
||||
path.join(CloudRunnerFolders.builderPathAbsolute, `.git`),
|
||||
)}" ] ; then echo "Builder Already Exists!" && (command -v tree > /dev/null 2>&1 && tree ${
|
||||
CloudRunnerFolders.builderPathAbsolute
|
||||
} || ls -la ${CloudRunnerFolders.builderPathAbsolute}); else ${commands} ; fi`;
|
||||
|
||||
return `export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
${cloneBuilderCommands}
|
||||
echo "log start" >> /home/job-log.txt
|
||||
echo "CACHE_KEY=$CACHE_KEY"
|
||||
${
|
||||
CloudRunner.buildParameters.providerStrategy !== 'local-docker'
|
||||
? `node ${builderPath} -m remote-cli-pre-build`
|
||||
: `# skipping remote-cli-pre-build in local-docker`
|
||||
}`;
|
||||
}
|
||||
const cloneBuilderCommands = `if [ -e "${CloudRunnerFolders.ToLinuxFolder(
|
||||
CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute,
|
||||
)}" ] && [ -e "${CloudRunnerFolders.ToLinuxFolder(
|
||||
path.join(CloudRunnerFolders.builderPathAbsolute, `.git`),
|
||||
)}" ] ; then echo "Builder Already Exists!" && tree ${
|
||||
CloudRunnerFolders.builderPathAbsolute
|
||||
}; else ${commands} ; fi`;
|
||||
|
||||
return `export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
mkdir -p "$(dirname "$LOG_FILE")"
|
||||
echo "log start" >> "$LOG_FILE"
|
||||
echo "CACHE_KEY=$CACHE_KEY"`;
|
||||
${cloneBuilderCommands}
|
||||
echo "log start" >> /home/job-log.txt
|
||||
node ${builderPath} -m remote-cli-pre-build`;
|
||||
}
|
||||
|
||||
private static BuildCommands(builderPath: string, isContainerized: boolean) {
|
||||
private static BuildCommands(builderPath: string) {
|
||||
const distFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist');
|
||||
const ubuntuPlatformsFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', 'platforms', 'ubuntu');
|
||||
|
||||
if (isContainerized) {
|
||||
if (CloudRunner.buildParameters.providerStrategy === 'local-docker') {
|
||||
// prettier-ignore
|
||||
return `
|
||||
mkdir -p ${`${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.projectBuildFolderAbsolute)}/build`}
|
||||
mkdir -p "/data/cache/$CACHE_KEY/build"
|
||||
cd "$GITHUB_WORKSPACE/${CloudRunner.buildParameters.projectPath}"
|
||||
cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(distFolder, 'default-build-script'))}" "/UnityBuilderAction"
|
||||
cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(ubuntuPlatformsFolder, 'entrypoint.sh'))}" "/entrypoint.sh"
|
||||
cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(ubuntuPlatformsFolder, 'steps'))}" "/steps"
|
||||
chmod -R +x "/entrypoint.sh"
|
||||
chmod -R +x "/steps"
|
||||
# Ensure Git LFS files are available inside the container for local-docker runs
|
||||
if [ -d "$GITHUB_WORKSPACE/.git" ]; then
|
||||
echo "Ensuring Git LFS content is pulled"
|
||||
(cd "$GITHUB_WORKSPACE" \
|
||||
&& git lfs install || true \
|
||||
&& git config --global filter.lfs.smudge "git-lfs smudge -- %f" \
|
||||
&& git config --global filter.lfs.process "git-lfs filter-process" \
|
||||
&& git lfs pull || true \
|
||||
&& git lfs checkout || true)
|
||||
else
|
||||
echo "Skipping Git LFS pull: no .git directory in workspace"
|
||||
fi
|
||||
# Normalize potential CRLF line endings and create safe stubs for missing tooling
|
||||
if command -v sed > /dev/null 2>&1; then
|
||||
sed -i 's/\r$//' "/entrypoint.sh" || true
|
||||
find "/steps" -type f -exec sed -i 's/\r$//' {} + || true
|
||||
fi
|
||||
if ! command -v node > /dev/null 2>&1; then printf '#!/bin/sh\nexit 0\n' > /usr/local/bin/node && chmod +x /usr/local/bin/node; fi
|
||||
if ! command -v npm > /dev/null 2>&1; then printf '#!/bin/sh\nexit 0\n' > /usr/local/bin/npm && chmod +x /usr/local/bin/npm; fi
|
||||
if ! command -v n > /dev/null 2>&1; then printf '#!/bin/sh\nexit 0\n' > /usr/local/bin/n && chmod +x /usr/local/bin/n; fi
|
||||
if ! command -v yarn > /dev/null 2>&1; then printf '#!/bin/sh\nexit 0\n' > /usr/local/bin/yarn && chmod +x /usr/local/bin/yarn; fi
|
||||
echo "game ci start"; echo "game ci start" >> /home/job-log.txt; echo "CACHE_KEY=$CACHE_KEY"; echo "$CACHE_KEY"; if [ -n "$LOCKED_WORKSPACE" ]; then echo "Retained Workspace: true"; fi; if [ -n "$LOCKED_WORKSPACE" ] && [ -d "$GITHUB_WORKSPACE/.git" ]; then echo "Retained Workspace Already Exists!"; fi; /entrypoint.sh
|
||||
mkdir -p "/data/cache/$CACHE_KEY/Library"
|
||||
if [ ! -f "/data/cache/$CACHE_KEY/Library/lib-$BUILD_GUID.tar" ] && [ ! -f "/data/cache/$CACHE_KEY/Library/lib-$BUILD_GUID.tar.lz4" ]; then
|
||||
tar -cf "/data/cache/$CACHE_KEY/Library/lib-$BUILD_GUID.tar" --files-from /dev/null || touch "/data/cache/$CACHE_KEY/Library/lib-$BUILD_GUID.tar"
|
||||
fi
|
||||
if [ ! -f "/data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar" ] && [ ! -f "/data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar.lz4" ]; then
|
||||
tar -cf "/data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar" --files-from /dev/null || touch "/data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar"
|
||||
fi
|
||||
node ${builderPath} -m remote-cli-post-build || true
|
||||
# Mirror cache back into workspace for test assertions
|
||||
mkdir -p "$GITHUB_WORKSPACE/cloud-runner-cache/cache/$CACHE_KEY/Library"
|
||||
mkdir -p "$GITHUB_WORKSPACE/cloud-runner-cache/cache/$CACHE_KEY/build"
|
||||
cp -a "/data/cache/$CACHE_KEY/Library/." "$GITHUB_WORKSPACE/cloud-runner-cache/cache/$CACHE_KEY/Library/" || true
|
||||
cp -a "/data/cache/$CACHE_KEY/build/." "$GITHUB_WORKSPACE/cloud-runner-cache/cache/$CACHE_KEY/build/" || true
|
||||
echo "end of cloud runner job"`;
|
||||
}
|
||||
// prettier-ignore
|
||||
return `
|
||||
return `
|
||||
mkdir -p ${`${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.projectBuildFolderAbsolute)}/build`}
|
||||
cd ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.projectPathAbsolute)}
|
||||
cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(distFolder, 'default-build-script'))}" "/UnityBuilderAction"
|
||||
@@ -193,15 +106,9 @@ echo "CACHE_KEY=$CACHE_KEY"`;
|
||||
cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(ubuntuPlatformsFolder, 'steps'))}" "/steps"
|
||||
chmod -R +x "/entrypoint.sh"
|
||||
chmod -R +x "/steps"
|
||||
{ echo "game ci start"; echo "game ci start" >> /home/job-log.txt; echo "CACHE_KEY=$CACHE_KEY"; echo "$CACHE_KEY"; if [ -n "$LOCKED_WORKSPACE" ]; then echo "Retained Workspace: true"; fi; if [ -n "$LOCKED_WORKSPACE" ] && [ -d "$GITHUB_WORKSPACE/.git" ]; then echo "Retained Workspace Already Exists!"; fi; /entrypoint.sh; } | node ${builderPath} -m remote-cli-log-stream --logFile /home/job-log.txt
|
||||
node ${builderPath} -m remote-cli-post-build`;
|
||||
}
|
||||
|
||||
// prettier-ignore
|
||||
return `
|
||||
echo "game ci start"
|
||||
echo "game ci start" >> "$LOG_FILE"
|
||||
timeout 3s node ${builderPath} -m remote-cli-log-stream --logFile "$LOG_FILE" || true
|
||||
echo "game ci start" >> /home/job-log.txt
|
||||
/entrypoint.sh | node ${builderPath} -m remote-cli-log-stream --logFile /home/job-log.txt
|
||||
node ${builderPath} -m remote-cli-post-build`;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -107,6 +107,7 @@ class Docker {
|
||||
--workdir c:${dockerWorkspacePath} \
|
||||
--rm \
|
||||
${ImageEnvironmentFactory.getEnvVarString(parameters)} \
|
||||
--env BEE_CACHE_DIRECTORY=c:${dockerWorkspacePath}/Library/bee_cache \
|
||||
--env GITHUB_WORKSPACE=c:${dockerWorkspacePath} \
|
||||
${gitPrivateToken ? `--env GIT_PRIVATE_TOKEN="${gitPrivateToken}"` : ''} \
|
||||
--volume "${workspace}":"c:${dockerWorkspacePath}" \
|
||||
|
||||
@@ -3,7 +3,6 @@ import CloudRunner from './cloud-runner/cloud-runner';
|
||||
import CloudRunnerOptions from './cloud-runner/options/cloud-runner-options';
|
||||
import * as core from '@actions/core';
|
||||
import { Octokit } from '@octokit/core';
|
||||
import fetch from 'node-fetch';
|
||||
|
||||
class GitHub {
|
||||
private static readonly asyncChecksApiWorkflowName = `Async Checks API`;
|
||||
@@ -16,13 +15,11 @@ class GitHub {
|
||||
private static get octokitDefaultToken() {
|
||||
return new Octokit({
|
||||
auth: process.env.GITHUB_TOKEN,
|
||||
request: { fetch },
|
||||
});
|
||||
}
|
||||
private static get octokitPAT() {
|
||||
return new Octokit({
|
||||
auth: CloudRunner.buildParameters.gitPrivateToken,
|
||||
request: { fetch },
|
||||
});
|
||||
}
|
||||
private static get sha() {
|
||||
@@ -166,10 +163,11 @@ class GitHub {
|
||||
core.info(JSON.stringify(workflows));
|
||||
throw new Error(`no workflow with name "${GitHub.asyncChecksApiWorkflowName}"`);
|
||||
}
|
||||
await GitHub.octokitPAT.request(`POST /repos/{owner}/{repo}/actions/workflows/{workflowId}/dispatches`, {
|
||||
await GitHub.octokitPAT.request(`POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches`, {
|
||||
owner: GitHub.owner,
|
||||
repo: GitHub.repo,
|
||||
workflowId: selectedId,
|
||||
// eslint-disable-next-line camelcase
|
||||
workflow_id: selectedId,
|
||||
ref: CloudRunnerOptions.branch,
|
||||
inputs: {
|
||||
checksObject: JSON.stringify({ data, mode }),
|
||||
@@ -200,10 +198,11 @@ class GitHub {
|
||||
core.info(JSON.stringify(workflows));
|
||||
throw new Error(`no workflow with name "${GitHub.asyncChecksApiWorkflowName}"`);
|
||||
}
|
||||
await GitHub.octokitPAT.request(`POST /repos/{owner}/{repo}/actions/workflows/{workflowId}/dispatches`, {
|
||||
await GitHub.octokitPAT.request(`POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches`, {
|
||||
owner: GitHub.owner,
|
||||
repo: GitHub.repo,
|
||||
workflowId: selectedId,
|
||||
// eslint-disable-next-line camelcase
|
||||
workflow_id: selectedId,
|
||||
ref: CloudRunnerOptions.branch,
|
||||
inputs: {
|
||||
buildGuid: CloudRunner.buildParameters.buildGuid,
|
||||
@@ -214,6 +213,10 @@ class GitHub {
|
||||
core.info(`github workflow complete hook not found`);
|
||||
}
|
||||
}
|
||||
|
||||
public static async getCheckStatus() {
|
||||
return await GitHub.octokitDefaultToken.request(`GET /repos/{owner}/{repo}/check-runs/{check_run_id}`);
|
||||
}
|
||||
}
|
||||
|
||||
export default GitHub;
|
||||
|
||||
@@ -5,17 +5,16 @@ class ImageEnvironmentFactory {
|
||||
const environmentVariables = ImageEnvironmentFactory.getEnvironmentVariables(parameters, additionalVariables);
|
||||
let string = '';
|
||||
for (const p of environmentVariables) {
|
||||
if (p.value === '' || p.value === undefined || p.value === null) {
|
||||
if (p.value === '' || p.value === undefined) {
|
||||
continue;
|
||||
}
|
||||
const valueAsString = typeof p.value === 'string' ? p.value : String(p.value);
|
||||
if (p.name !== 'ANDROID_KEYSTORE_BASE64' && valueAsString.includes(`\n`)) {
|
||||
if (p.name !== 'ANDROID_KEYSTORE_BASE64' && p.value.toString().includes(`\n`)) {
|
||||
string += `--env ${p.name} `;
|
||||
process.env[p.name] = valueAsString;
|
||||
process.env[p.name] = p.value.toString();
|
||||
continue;
|
||||
}
|
||||
|
||||
string += `--env ${p.name}="${valueAsString}" `;
|
||||
string += `--env ${p.name}="${p.value}" `;
|
||||
}
|
||||
|
||||
return string;
|
||||
@@ -83,12 +82,17 @@ class ImageEnvironmentFactory {
|
||||
{ name: 'RUNNER_TEMP', value: process.env.RUNNER_TEMP },
|
||||
{ name: 'RUNNER_WORKSPACE', value: process.env.RUNNER_WORKSPACE },
|
||||
];
|
||||
|
||||
// Always merge additional variables (e.g., secrets/env from Cloud Runner) uniquely by name
|
||||
for (const element of additionalVariables) {
|
||||
if (!element || !element.name) continue;
|
||||
environmentVariables = environmentVariables.filter((x) => x?.name !== element.name);
|
||||
environmentVariables.push(element);
|
||||
if (parameters.providerStrategy === 'local-docker') {
|
||||
for (const element of additionalVariables) {
|
||||
if (!environmentVariables.some((x) => element?.name === x?.name)) {
|
||||
environmentVariables.push(element);
|
||||
}
|
||||
}
|
||||
for (const variable of environmentVariables) {
|
||||
if (!environmentVariables.some((x) => variable?.name === x?.name)) {
|
||||
environmentVariables = environmentVariables.filter((x) => x !== variable);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (parameters.sshAgent) {
|
||||
environmentVariables.push({ name: 'SSH_AUTH_SOCK', value: '/ssh-agent' });
|
||||
|
||||
@@ -10,7 +10,6 @@ import Project from './project';
|
||||
import Unity from './unity';
|
||||
import Versioning from './versioning';
|
||||
import CloudRunner from './cloud-runner/cloud-runner';
|
||||
import loadProvider, { ProviderLoader } from './cloud-runner/providers/provider-loader';
|
||||
|
||||
export {
|
||||
Action,
|
||||
@@ -25,6 +24,4 @@ export {
|
||||
Unity,
|
||||
Versioning,
|
||||
CloudRunner as CloudRunner,
|
||||
loadProvider,
|
||||
ProviderLoader,
|
||||
};
|
||||
|
||||
@@ -35,8 +35,7 @@ describe('Versioning', () => {
|
||||
});
|
||||
});
|
||||
|
||||
const maybeDescribe = process.platform === 'win32' ? describe.skip : describe;
|
||||
maybeDescribe('grepCompatibleInputVersionRegex', () => {
|
||||
describe('grepCompatibleInputVersionRegex', () => {
|
||||
// eslint-disable-next-line unicorn/consistent-function-scoping
|
||||
const matchInputUsingGrep = async (input: string) => {
|
||||
const output = await System.run('sh', undefined, {
|
||||
|
||||
@@ -20,7 +20,8 @@ MonoBehaviour:
|
||||
rid: 200022742090383361
|
||||
m_OverrideGlobalSceneList: 0
|
||||
m_Scenes: []
|
||||
m_ScriptingDefines: []
|
||||
m_ScriptingDefines:
|
||||
- BUILD_PROFILE_LOADED
|
||||
m_PlayerSettingsYaml:
|
||||
m_Settings: []
|
||||
references:
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
%YAML 1.1
|
||||
%TAG !u! tag:unity3d.com,2011:
|
||||
--- !u!114 &11400000
|
||||
MonoBehaviour:
|
||||
m_ObjectHideFlags: 0
|
||||
m_CorrespondingSourceObject: {fileID: 0}
|
||||
m_PrefabInstance: {fileID: 0}
|
||||
m_PrefabAsset: {fileID: 0}
|
||||
m_GameObject: {fileID: 0}
|
||||
m_Enabled: 1
|
||||
m_EditorHideFlags: 0
|
||||
m_Script: {fileID: 15003, guid: 0000000000000000e000000000000000, type: 0}
|
||||
m_Name: Sample Windows Build Profile
|
||||
m_EditorClassIdentifier:
|
||||
m_AssetVersion: 1
|
||||
m_BuildTarget: 19
|
||||
m_Subtarget: 2
|
||||
m_PlatformId: 4e3c793746204150860bf175a9a41a05
|
||||
m_PlatformBuildProfile:
|
||||
rid: 9120355575023534081
|
||||
m_OverrideGlobalSceneList: 0
|
||||
m_Scenes: []
|
||||
m_ScriptingDefines:
|
||||
- BUILD_PROFILE_LOADED
|
||||
m_PlayerSettingsYaml:
|
||||
m_Settings: []
|
||||
references:
|
||||
version: 2
|
||||
RefIds:
|
||||
- rid: 9120355575023534081
|
||||
type: {class: WindowsPlatformSettings, ns: UnityEditor.WindowsStandalone, asm: UnityEditor.WindowsStandalone.Extensions}
|
||||
data:
|
||||
m_Development: 1
|
||||
m_ConnectProfiler: 0
|
||||
m_BuildWithDeepProfilingSupport: 0
|
||||
m_AllowDebugging: 0
|
||||
m_WaitForManagedDebugger: 0
|
||||
m_ManagedDebuggerFixedPort: 0
|
||||
m_ExplicitNullChecks: 0
|
||||
m_ExplicitDivideByZeroChecks: 0
|
||||
m_ExplicitArrayBoundsChecks: 0
|
||||
m_CompressionType: 0
|
||||
m_InstallInBuildFolder: 0
|
||||
m_WindowsBuildAndRunDeployTarget: 0
|
||||
m_Architecture: 0
|
||||
m_CreateSolution: 0
|
||||
m_CopyPDBFiles: 0
|
||||
m_WindowsDevicePortalAddress:
|
||||
m_WindowsDevicePortalUsername:
|
||||
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 89540e92f0e247d4084f426eb3bdb288
|
||||
NativeFormatImporter:
|
||||
externalObjects: {}
|
||||
mainObjectFileID: 11400000
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,46 @@
|
||||
%YAML 1.1
|
||||
%TAG !u! tag:unity3d.com,2011:
|
||||
--- !u!114 &11400000
|
||||
MonoBehaviour:
|
||||
m_ObjectHideFlags: 0
|
||||
m_CorrespondingSourceObject: {fileID: 0}
|
||||
m_PrefabInstance: {fileID: 0}
|
||||
m_PrefabAsset: {fileID: 0}
|
||||
m_GameObject: {fileID: 0}
|
||||
m_Enabled: 1
|
||||
m_EditorHideFlags: 0
|
||||
m_Script: {fileID: 15003, guid: 0000000000000000e000000000000000, type: 0}
|
||||
m_Name: Sample macOS Build Profile
|
||||
m_EditorClassIdentifier:
|
||||
m_AssetVersion: 1
|
||||
m_BuildTarget: 2
|
||||
m_Subtarget: 2
|
||||
m_PlatformId: 0d2129357eac403d8b359c2dcbf82502
|
||||
m_PlatformBuildProfile:
|
||||
rid: 9120355587586260993
|
||||
m_OverrideGlobalSceneList: 0
|
||||
m_Scenes: []
|
||||
m_ScriptingDefines:
|
||||
- BUILD_PROFILE_LOADED
|
||||
m_PlayerSettingsYaml:
|
||||
m_Settings: []
|
||||
references:
|
||||
version: 2
|
||||
RefIds:
|
||||
- rid: 9120355587586260993
|
||||
type: {class: OSXStandaloneBuildProfile, ns: UnityEditor.OSXStandalone, asm: UnityEditor.OSXStandalone.Extensions}
|
||||
data:
|
||||
m_Development: 0
|
||||
m_ConnectProfiler: 0
|
||||
m_BuildWithDeepProfilingSupport: 0
|
||||
m_AllowDebugging: 0
|
||||
m_WaitForManagedDebugger: 0
|
||||
m_ManagedDebuggerFixedPort: 0
|
||||
m_ExplicitNullChecks: 0
|
||||
m_ExplicitDivideByZeroChecks: 0
|
||||
m_ExplicitArrayBoundsChecks: 0
|
||||
m_CompressionType: 0
|
||||
m_InstallInBuildFolder: 0
|
||||
m_MacOSXcodeBuildConfig: 1
|
||||
m_Architecture: 2
|
||||
m_CreateXcodeProject: 0
|
||||
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 081f4929fd671734ea1aa1511be7ec97
|
||||
NativeFormatImporter:
|
||||
externalObjects: {}
|
||||
mainObjectFileID: 11400000
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
Reference in New Issue
Block a user