mirror of
https://github.com/game-ci/unity-builder.git
synced 2026-02-02 22:59:06 +08:00
Compare commits
19 Commits
v2.1.2
...
davidmfino
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1e53060e37 | ||
|
|
8c9dcf076d | ||
|
|
f35829a9d4 | ||
|
|
fb5b03c3b8 | ||
|
|
584c0366c6 | ||
|
|
28147e5e1b | ||
|
|
00c5685d03 | ||
|
|
e334dc785a | ||
|
|
a45155c578 | ||
|
|
f68f14ff9f | ||
|
|
9329b7369c | ||
|
|
5bd589e19f | ||
|
|
f03bee03f6 | ||
|
|
2a32a9d870 | ||
|
|
d6ac850da1 | ||
|
|
f300dd27bb | ||
|
|
4cca069ebb | ||
|
|
96555a0945 | ||
|
|
4cb3e593f5 |
35
.github/workflows/build-tests.yml
vendored
35
.github/workflows/build-tests.yml
vendored
@@ -7,7 +7,30 @@ on:
|
|||||||
- '.github/**'
|
- '.github/**'
|
||||||
|
|
||||||
env:
|
env:
|
||||||
UNITY_LICENSE: "<?xml version=\"1.0\" encoding=\"UTF-8\"?><root>\n <License id=\"Terms\">\n <MachineBindings>\n <Binding Key=\"1\" Value=\"576562626572264761624c65526f7578\"/>\n <Binding Key=\"2\" Value=\"576562626572264761624c65526f7578\"/>\n </MachineBindings>\n <MachineID Value=\"D7nTUnjNAmtsUMcnoyrqkgIbYdM=\"/>\n <SerialHash Value=\"2033b8ac3e6faa3742ca9f0bfae44d18f2a96b80\"/>\n <Features>\n <Feature Value=\"33\"/>\n <Feature Value=\"1\"/>\n <Feature Value=\"12\"/>\n <Feature Value=\"2\"/>\n <Feature Value=\"24\"/>\n <Feature Value=\"3\"/>\n <Feature Value=\"36\"/>\n <Feature Value=\"17\"/>\n <Feature Value=\"19\"/>\n <Feature Value=\"62\"/>\n </Features>\n <DeveloperData Value=\"AQAAAEY0LUJHUlgtWEQ0RS1aQ1dWLUM1SlctR0RIQg==\"/>\n <SerialMasked Value=\"F4-BGRX-XD4E-ZCWV-C5JW-XXXX\"/>\n <StartDate Value=\"2021-02-08T00:00:00\"/>\n <UpdateDate Value=\"2021-02-09T00:34:57\"/>\n <InitialActivationDate Value=\"2021-02-08T00:34:56\"/>\n <LicenseVersion Value=\"6.x\"/>\n <ClientProvidedVersion Value=\"2018.4.30f1\"/>\n <AlwaysOnline Value=\"false\"/>\n <Entitlements>\n <Entitlement Ns=\"unity_editor\" Tag=\"UnityPersonal\" Type=\"EDITOR\" ValidTo=\"9999-12-31T00:00:00\"/>\n <Entitlement Ns=\"unity_editor\" Tag=\"DarkSkin\" Type=\"EDITOR_FEATURE\" ValidTo=\"9999-12-31T00:00:00\"/>\n </Entitlements>\n </License>\n<Signature xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><SignedInfo><CanonicalizationMethod Algorithm=\"http://www.w3.org/TR/2001/REC-xml-c14n-20010315#WithComments\"/><SignatureMethod Algorithm=\"http://www.w3.org/2000/09/xmldsig#rsa-sha1\"/><Reference URI=\"#Terms\"><Transforms><Transform Algorithm=\"http://www.w3.org/2000/09/xmldsig#enveloped-signature\"/></Transforms><DigestMethod Algorithm=\"http://www.w3.org/2000/09/xmldsig#sha1\"/><DigestValue>m0Db8UK+ktnOLJBtHybkfetpcKo=</DigestValue></Reference></SignedInfo><SignatureValue>o/pUbSQAukz7+ZYAWhnA0AJbIlyyCPL7bKVEM2lVqbrXt7cyey+umkCXamuOgsWPVUKBMkXtMH8L\n5etLmD0getWIhTGhzOnDCk+gtIPfL4jMo9tkEuOCROQAXCci23VFscKcrkB+3X6h4wEOtA2APhOY\nB+wvC794o8/82ffjP79aVAi57rp3Wmzx+9pe9yMwoJuljAy2sc2tIMgdQGWVmOGBpQm3JqsidyzI\nJWG2kjnc7pDXK9pwYzXoKiqUqqrut90d+kQqRyv7MSZXR50HFqD/LI69h68b7P8Bjo3bPXOhNXGR\n9YCoemH6EkfCJxp2gIjzjWW+l2Hj2EsFQi8YXw==</SignatureValue></Signature></root>"
|
UNITY_LICENSE:
|
||||||
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?><root>\n <License
|
||||||
|
id=\"Terms\">\n <MachineBindings>\n <Binding Key=\"1\"
|
||||||
|
Value=\"576562626572264761624c65526f7578\"/>\n <Binding Key=\"2\"
|
||||||
|
Value=\"576562626572264761624c65526f7578\"/>\n </MachineBindings>\n <MachineID
|
||||||
|
Value=\"D7nTUnjNAmtsUMcnoyrqkgIbYdM=\"/>\n <SerialHash
|
||||||
|
Value=\"2033b8ac3e6faa3742ca9f0bfae44d18f2a96b80\"/>\n <Features>\n <Feature
|
||||||
|
Value=\"33\"/>\n <Feature Value=\"1\"/>\n <Feature Value=\"12\"/>\n <Feature
|
||||||
|
Value=\"2\"/>\n <Feature Value=\"24\"/>\n <Feature Value=\"3\"/>\n <Feature
|
||||||
|
Value=\"36\"/>\n <Feature Value=\"17\"/>\n <Feature Value=\"19\"/>\n <Feature
|
||||||
|
Value=\"62\"/>\n </Features>\n <DeveloperData
|
||||||
|
Value=\"AQAAAEY0LUJHUlgtWEQ0RS1aQ1dWLUM1SlctR0RIQg==\"/>\n <SerialMasked
|
||||||
|
Value=\"F4-BGRX-XD4E-ZCWV-C5JW-XXXX\"/>\n <StartDate Value=\"2021-02-08T00:00:00\"/>\n <UpdateDate
|
||||||
|
Value=\"2021-02-09T00:34:57\"/>\n <InitialActivationDate
|
||||||
|
Value=\"2021-02-08T00:34:56\"/>\n <LicenseVersion Value=\"6.x\"/>\n <ClientProvidedVersion
|
||||||
|
Value=\"2018.4.30f1\"/>\n <AlwaysOnline Value=\"false\"/>\n <Entitlements>\n <Entitlement
|
||||||
|
Ns=\"unity_editor\" Tag=\"UnityPersonal\" Type=\"EDITOR\"
|
||||||
|
ValidTo=\"9999-12-31T00:00:00\"/>\n <Entitlement Ns=\"unity_editor\" Tag=\"DarkSkin\"
|
||||||
|
Type=\"EDITOR_FEATURE\" ValidTo=\"9999-12-31T00:00:00\"/>\n </Entitlements>\n </License>\n<Signature
|
||||||
|
xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><SignedInfo><CanonicalizationMethod
|
||||||
|
Algorithm=\"http://www.w3.org/TR/2001/REC-xml-c14n-20010315#WithComments\"/><SignatureMethod
|
||||||
|
Algorithm=\"http://www.w3.org/2000/09/xmldsig#rsa-sha1\"/><Reference URI=\"#Terms\"><Transforms><Transform
|
||||||
|
Algorithm=\"http://www.w3.org/2000/09/xmldsig#enveloped-signature\"/></Transforms><DigestMethod
|
||||||
|
Algorithm=\"http://www.w3.org/2000/09/xmldsig#sha1\"/><DigestValue>m0Db8UK+ktnOLJBtHybkfetpcKo=</DigestValue></Reference></SignedInfo><SignatureValue>o/pUbSQAukz7+ZYAWhnA0AJbIlyyCPL7bKVEM2lVqbrXt7cyey+umkCXamuOgsWPVUKBMkXtMH8L\n5etLmD0getWIhTGhzOnDCk+gtIPfL4jMo9tkEuOCROQAXCci23VFscKcrkB+3X6h4wEOtA2APhOY\nB+wvC794o8/82ffjP79aVAi57rp3Wmzx+9pe9yMwoJuljAy2sc2tIMgdQGWVmOGBpQm3JqsidyzI\nJWG2kjnc7pDXK9pwYzXoKiqUqqrut90d+kQqRyv7MSZXR50HFqD/LI69h68b7P8Bjo3bPXOhNXGR\n9YCoemH6EkfCJxp2gIjzjWW+l2Hj2EsFQi8YXw==</SignatureValue></Signature></root>"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
buildForAllPlatformsUbuntu:
|
buildForAllPlatformsUbuntu:
|
||||||
@@ -16,6 +39,9 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
cloudRunnerCluster:
|
||||||
|
# - local-docker
|
||||||
|
- local
|
||||||
projectPath:
|
projectPath:
|
||||||
- test-project
|
- test-project
|
||||||
unityVersion:
|
unityVersion:
|
||||||
@@ -38,14 +64,14 @@ jobs:
|
|||||||
###########################
|
###########################
|
||||||
# Checkout #
|
# Checkout #
|
||||||
###########################
|
###########################
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
|
|
||||||
###########################
|
###########################
|
||||||
# Cache #
|
# Cache #
|
||||||
###########################
|
###########################
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: ${{ matrix.projectPath }}/Library
|
path: ${{ matrix.projectPath }}/Library
|
||||||
key: Library-${{ matrix.projectPath }}-ubuntu-${{ matrix.targetPlatform }}
|
key: Library-${{ matrix.projectPath }}-ubuntu-${{ matrix.targetPlatform }}
|
||||||
@@ -62,11 +88,12 @@ jobs:
|
|||||||
unityVersion: ${{ matrix.unityVersion }}
|
unityVersion: ${{ matrix.unityVersion }}
|
||||||
targetPlatform: ${{ matrix.targetPlatform }}
|
targetPlatform: ${{ matrix.targetPlatform }}
|
||||||
customParameters: -profile SomeProfile -someBoolean -someValue exampleValue
|
customParameters: -profile SomeProfile -someBoolean -someValue exampleValue
|
||||||
|
cloudRunnerCluster: ${{ matrix.cloudRunnerCluster }}
|
||||||
|
|
||||||
###########################
|
###########################
|
||||||
# Upload #
|
# Upload #
|
||||||
###########################
|
###########################
|
||||||
- uses: actions/upload-artifact@v2
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: Build Ubuntu (${{ matrix.unityVersion }})
|
name: Build Ubuntu (${{ matrix.unityVersion }})
|
||||||
path: build
|
path: build
|
||||||
|
|||||||
8
.github/workflows/cleanup.yml
vendored
8
.github/workflows/cleanup.yml
vendored
@@ -15,13 +15,13 @@ jobs:
|
|||||||
cleanupCloudRunner:
|
cleanupCloudRunner:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
if: github.event.event_type != 'pull_request_target'
|
if: github.event.event_type != 'pull_request_target'
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 12.x
|
node-version: 16.x
|
||||||
- run: yarn
|
- run: yarn
|
||||||
- run: yarn run cli --help
|
- run: yarn run cli --help
|
||||||
env:
|
env:
|
||||||
@@ -29,7 +29,7 @@ jobs:
|
|||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
AWS_DEFAULT_REGION: eu-west-2
|
AWS_DEFAULT_REGION: eu-west-2
|
||||||
- run: yarn run cli -m aws-list-all
|
- run: yarn run cli -m list-resources
|
||||||
env:
|
env:
|
||||||
AWS_REGION: eu-west-2
|
AWS_REGION: eu-west-2
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
|||||||
58
.github/workflows/cloud-runner-async-checks.yml
vendored
Normal file
58
.github/workflows/cloud-runner-async-checks.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
name: Async Checks API
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
checksObject:
|
||||||
|
description: ''
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
checks: write
|
||||||
|
|
||||||
|
env:
|
||||||
|
GKE_ZONE: 'us-central1'
|
||||||
|
GKE_REGION: 'us-central1'
|
||||||
|
GKE_PROJECT: 'unitykubernetesbuilder'
|
||||||
|
GKE_CLUSTER: 'game-ci-github-pipelines'
|
||||||
|
GCP_LOGGING: true
|
||||||
|
GCP_PROJECT: unitykubernetesbuilder
|
||||||
|
GCP_LOG_FILE: ${{ github.workspace }}/cloud-runner-logs.txt
|
||||||
|
AWS_REGION: eu-west-2
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_DEFAULT_REGION: eu-west-2
|
||||||
|
AWS_BASE_STACK_NAME: game-ci-github-pipelines
|
||||||
|
CLOUD_RUNNER_BRANCH: ${{ github.ref }}
|
||||||
|
CLOUD_RUNNER_DEBUG: true
|
||||||
|
CLOUD_RUNNER_DEBUG_TREE: true
|
||||||
|
DEBUG: true
|
||||||
|
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||||
|
PROJECT_PATH: test-project
|
||||||
|
UNITY_VERSION: 2019.3.15f1
|
||||||
|
USE_IL2CPP: false
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
asyncChecks:
|
||||||
|
name: Async Checks
|
||||||
|
if: github.event.event_type != 'pull_request_target'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout (default)
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
lfs: false
|
||||||
|
- run: yarn
|
||||||
|
- run: yarn run cli -m checks-update
|
||||||
|
timeout-minutes: 180
|
||||||
|
env:
|
||||||
|
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||||
|
PROJECT_PATH: test-project
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
TARGET_PLATFORM: StandaloneWindows64
|
||||||
|
cloudRunnerTests: true
|
||||||
|
versioning: None
|
||||||
|
CLOUD_RUNNER_CLUSTER: local-docker
|
||||||
|
AWS_BASE_STACK_NAME: game-ci-github-pipelines
|
||||||
|
CHECKS_UPDATE: ${{ github.event.inputs.checksObject }}
|
||||||
241
.github/workflows/cloud-runner-pipeline.yml
vendored
241
.github/workflows/cloud-runner-pipeline.yml
vendored
@@ -1,11 +1,13 @@
|
|||||||
name: Cloud Runner
|
name: Cloud Runner CI Pipeline
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push: { branches: [cloud-runner-develop, main] }
|
push: { branches: [cloud-runner-develop, cloud-runner-preview, main] }
|
||||||
# push: { branches: [main] }
|
workflow_dispatch:
|
||||||
# pull_request:
|
|
||||||
# paths-ignore:
|
permissions:
|
||||||
# - '.github/**'
|
checks: write
|
||||||
|
contents: read
|
||||||
|
actions: write
|
||||||
|
|
||||||
env:
|
env:
|
||||||
GKE_ZONE: 'us-central1'
|
GKE_ZONE: 'us-central1'
|
||||||
@@ -19,45 +21,44 @@ env:
|
|||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
AWS_DEFAULT_REGION: eu-west-2
|
AWS_DEFAULT_REGION: eu-west-2
|
||||||
AWS_BASE_STACK_NAME: game-ci-github-pipelines
|
AWS_BASE_STACK_NAME: game-ci-team-pipelines
|
||||||
CLOUD_RUNNER_BRANCH: ${{ github.ref }}
|
CLOUD_RUNNER_BRANCH: ${{ github.ref }}
|
||||||
CLOUD_RUNNER_TESTS: true
|
CLOUD_RUNNER_DEBUG: true
|
||||||
|
CLOUD_RUNNER_DEBUG_TREE: true
|
||||||
DEBUG: true
|
DEBUG: true
|
||||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||||
|
PROJECT_PATH: test-project
|
||||||
|
UNITY_VERSION: 2019.3.15f1
|
||||||
|
USE_IL2CPP: false
|
||||||
|
USE_GKE_GCLOUD_AUTH_PLUGIN: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
awsBuild:
|
integrationTests:
|
||||||
name: AWS Fargate Build
|
name: Integration Tests
|
||||||
if: github.event.pull_request.draft == false
|
if: github.event.event_type != 'pull_request_target'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
projectPath:
|
cloudRunnerCluster:
|
||||||
- test-project
|
- aws
|
||||||
unityVersion:
|
- local-docker
|
||||||
# - 2019.2.11f1
|
- k8s
|
||||||
- 2019.3.15f1
|
|
||||||
targetPlatform:
|
|
||||||
#- StandaloneOSX # Build a macOS standalone (Intel 64-bit).
|
|
||||||
- StandaloneWindows64 # Build a Windows 64-bit standalone.
|
|
||||||
- StandaloneLinux64 # Build a Linux 64-bit standalone.
|
|
||||||
- WebGL # WebGL.
|
|
||||||
#- iOS # Build an iOS player.
|
|
||||||
#- Android # Build an Android .apk.
|
|
||||||
# - StandaloneWindows # Build a Windows standalone.
|
|
||||||
# - WSAPlayer # Build an Windows Store Apps player.
|
|
||||||
# - PS4 # Build a PS4 Standalone.
|
|
||||||
# - XboxOne # Build a Xbox One Standalone.
|
|
||||||
# - tvOS # Build to Apple's tvOS platform.
|
|
||||||
# - Switch # Build a Nintendo Switch player
|
|
||||||
# steps
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout (default)
|
- name: Checkout (default)
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
if: github.event.event_type != 'pull_request_target'
|
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: false
|
||||||
|
- uses: google-github-actions/auth@v1
|
||||||
|
with:
|
||||||
|
credentials_json: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_KEY }}
|
||||||
|
- name: 'Set up Cloud SDK'
|
||||||
|
uses: 'google-github-actions/setup-gcloud@v1'
|
||||||
|
- name: Get GKE cluster credentials
|
||||||
|
run: |
|
||||||
|
export USE_GKE_GCLOUD_AUTH_PLUGIN=True
|
||||||
|
gcloud components install gke-gcloud-auth-plugin
|
||||||
|
gcloud container clusters get-credentials $GKE_CLUSTER --zone $GKE_ZONE --project $GKE_PROJECT
|
||||||
- name: Configure AWS Credentials
|
- name: Configure AWS Credentials
|
||||||
uses: aws-actions/configure-aws-credentials@v1
|
uses: aws-actions/configure-aws-credentials@v1
|
||||||
with:
|
with:
|
||||||
@@ -65,154 +66,68 @@ jobs:
|
|||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
aws-region: eu-west-2
|
aws-region: eu-west-2
|
||||||
- run: yarn
|
- run: yarn
|
||||||
- run: yarn run cli --help
|
- run: yarn run test "cloud-runner-async-workflow" --detectOpenHandles --forceExit --runInBand
|
||||||
- run: yarn run test "caching"
|
if: matrix.CloudRunnerCluster != 'local-docker'
|
||||||
- run: yarn run test-i-aws
|
timeout-minutes: 180
|
||||||
env:
|
env:
|
||||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||||
PROJECT_PATH: ${{ matrix.projectPath }}
|
PROJECT_PATH: test-project
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GIT_PRIVATE_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
TARGET_PLATFORM: ${{ matrix.targetPlatform }}
|
TARGET_PLATFORM: StandaloneWindows64
|
||||||
cloudRunnerTests: true
|
cloudRunnerTests: true
|
||||||
versioning: None
|
versioning: None
|
||||||
- uses: ./
|
CLOUD_RUNNER_CLUSTER: ${{ matrix.cloudRunnerCluster }}
|
||||||
id: aws-fargate-unity-build
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
timeout-minutes: 25
|
- run: yarn run test-i --detectOpenHandles --forceExit --runInBand
|
||||||
with:
|
if: matrix.CloudRunnerCluster == 'local-docker'
|
||||||
cloudRunnerCluster: aws
|
timeout-minutes: 180
|
||||||
|
env:
|
||||||
|
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||||
|
PROJECT_PATH: test-project
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
TARGET_PLATFORM: StandaloneWindows64
|
||||||
|
cloudRunnerTests: true
|
||||||
versioning: None
|
versioning: None
|
||||||
projectPath: ${{ matrix.projectPath }}
|
CLOUD_RUNNER_CLUSTER: ${{ matrix.cloudRunnerCluster }}
|
||||||
unityVersion: ${{ matrix.unityVersion }}
|
localBuildTests:
|
||||||
targetPlatform: ${{ matrix.targetPlatform }}
|
name: Local Build Target Tests
|
||||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
postBuildSteps: |
|
|
||||||
- name: upload
|
|
||||||
image: amazon/aws-cli
|
|
||||||
commands: |
|
|
||||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
|
||||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
|
||||||
aws configure set region $AWS_DEFAULT_REGION --profile default
|
|
||||||
aws s3 ls
|
|
||||||
aws s3 ls game-ci-test-storage
|
|
||||||
ls /data/cache/$CACHE_KEY
|
|
||||||
ls /data/cache/$CACHE_KEY/build
|
|
||||||
aws s3 cp /data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar s3://game-ci-test-storage/$CACHE_KEY/build-$BUILD_GUID.tar
|
|
||||||
secrets:
|
|
||||||
- name: awsAccessKeyId
|
|
||||||
value: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
- name: awsSecretAccessKey
|
|
||||||
value: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
- name: awsDefaultRegion
|
|
||||||
value: eu-west-2
|
|
||||||
- run: |
|
|
||||||
aws s3 cp s3://game-ci-test-storage/${{ steps.aws-fargate-unity-build.outputs.CACHE_KEY }}/build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar
|
|
||||||
ls
|
|
||||||
- run: yarn run cli -m aws-garbage-collect
|
|
||||||
###########################
|
|
||||||
# Upload #
|
|
||||||
###########################
|
|
||||||
# download from cloud storage
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: AWS Build (${{ matrix.targetPlatform }})
|
|
||||||
path: build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar
|
|
||||||
retention-days: 14
|
|
||||||
k8sBuilds:
|
|
||||||
name: K8s (GKE Autopilot) build for ${{ matrix.targetPlatform }} on version ${{ matrix.unityVersion }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
unityVersion:
|
cloudRunnerCluster:
|
||||||
# - 2019.2.11f1
|
#- aws
|
||||||
- 2019.3.15f1
|
- local-docker
|
||||||
|
#- k8s
|
||||||
targetPlatform:
|
targetPlatform:
|
||||||
# - StandaloneWindows64
|
- StandaloneOSX # Build a macOS standalone (Intel 64-bit).
|
||||||
- StandaloneLinux64
|
- StandaloneWindows64 # Build a Windows 64-bit standalone.
|
||||||
|
- StandaloneLinux64 # Build a Linux 64-bit standalone.
|
||||||
|
- WebGL # WebGL.
|
||||||
|
- iOS # Build an iOS player.
|
||||||
|
- Android # Build an Android .apk.
|
||||||
steps:
|
steps:
|
||||||
###########################
|
- name: Checkout (default)
|
||||||
# Checkout #
|
uses: actions/checkout@v3
|
||||||
###########################
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
if: github.event.event_type != 'pull_request_target'
|
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: false
|
||||||
|
|
||||||
###########################
|
|
||||||
# Setup #
|
|
||||||
###########################
|
|
||||||
- uses: google-github-actions/setup-gcloud@v0
|
|
||||||
with:
|
|
||||||
version: '288.0.0'
|
|
||||||
service_account_email: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_EMAIL }}
|
|
||||||
service_account_key: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_KEY }}
|
|
||||||
- name: Get GKE cluster credentials
|
|
||||||
run: gcloud container clusters get-credentials $GKE_CLUSTER --zone $GKE_ZONE --project $GKE_PROJECT
|
|
||||||
|
|
||||||
###########################
|
|
||||||
# Cloud Runner Test Suite #
|
|
||||||
###########################
|
|
||||||
- uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: 12.x
|
|
||||||
- run: yarn
|
- run: yarn
|
||||||
- run: yarn run cli --help
|
- uses: ./
|
||||||
- run: yarn run test "caching"
|
id: unity-build
|
||||||
- name: Cloud Runner Test Suite
|
timeout-minutes: 90
|
||||||
run: yarn run test-i-k8s --detectOpenHandles --forceExit
|
|
||||||
env:
|
env:
|
||||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||||
PROJECT_PATH: ${{ matrix.projectPath }}
|
with:
|
||||||
TARGET_PLATFORM: ${{ matrix.targetPlatform }}
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
KUBE_CONFIG: ${{ steps.read-base64.outputs.base64 }}
|
|
||||||
unityVersion: ${{ matrix.unityVersion }}
|
|
||||||
cloudRunnerTests: true
|
cloudRunnerTests: true
|
||||||
versioning: None
|
versioning: None
|
||||||
|
|
||||||
###########################
|
|
||||||
# Cloud Runner Build Test #
|
|
||||||
###########################
|
|
||||||
- name: Cloud Runner Build Test
|
|
||||||
uses: ./
|
|
||||||
id: k8s-unity-build
|
|
||||||
timeout-minutes: 30
|
|
||||||
with:
|
|
||||||
cloudRunnerCluster: k8s
|
|
||||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
|
||||||
targetPlatform: ${{ matrix.targetPlatform }}
|
|
||||||
kubeConfig: ${{ steps.read-base64.outputs.base64 }}
|
|
||||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
projectPath: test-project
|
projectPath: test-project
|
||||||
unityVersion: ${{ matrix.unityVersion }}
|
gitPrivateToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
versioning: None
|
targetPlatform: ${{ matrix.targetPlatform }}
|
||||||
postBuildSteps: |
|
cloudRunnerCluster: ${{ matrix.cloudRunnerCluster }}
|
||||||
- name: upload
|
|
||||||
image: amazon/aws-cli
|
|
||||||
commands: |
|
|
||||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
|
||||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
|
||||||
aws configure set region $AWS_DEFAULT_REGION --profile default
|
|
||||||
aws s3 ls
|
|
||||||
aws s3 ls game-ci-test-storage
|
|
||||||
ls /data/cache/$CACHE_KEY
|
|
||||||
aws s3 cp /data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar s3://game-ci-test-storage/$CACHE_KEY/build-$BUILD_GUID.tar
|
|
||||||
secrets:
|
|
||||||
- name: awsAccessKeyId
|
|
||||||
value: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
- name: awsSecretAccessKey
|
|
||||||
value: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
- name: awsDefaultRegion
|
|
||||||
value: eu-west-2
|
|
||||||
- run: |
|
- run: |
|
||||||
aws s3 cp s3://game-ci-test-storage/${{ steps.k8s-unity-build.outputs.CACHE_KEY }}/build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar
|
cp ./cloud-runner-cache/cache/${{ steps.unity-build.outputs.CACHE_KEY }}/build/${{ steps.unity-build.outputs.BUILD_ARTIFACT }} ${{ steps.unity-build.outputs.BUILD_ARTIFACT }}
|
||||||
ls
|
- uses: actions/upload-artifact@v3
|
||||||
###########################
|
|
||||||
# Upload #
|
|
||||||
###########################
|
|
||||||
# download from cloud storage
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
with:
|
||||||
name: K8s Build (${{ matrix.targetPlatform }})
|
name: ${{ matrix.cloudRunnerCluster }} Build (${{ matrix.targetPlatform }})
|
||||||
path: build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar
|
path: ${{ steps.unity-build.outputs.BUILD_ARTIFACT }}
|
||||||
retention-days: 14
|
retention-days: 14
|
||||||
|
|||||||
6
.github/workflows/mac-build-tests.yml
vendored
6
.github/workflows/mac-build-tests.yml
vendored
@@ -26,14 +26,14 @@ jobs:
|
|||||||
###########################
|
###########################
|
||||||
# Checkout #
|
# Checkout #
|
||||||
###########################
|
###########################
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
|
|
||||||
###########################
|
###########################
|
||||||
# Cache #
|
# Cache #
|
||||||
###########################
|
###########################
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: ${{ matrix.projectPath }}/Library
|
path: ${{ matrix.projectPath }}/Library
|
||||||
key: Library-${{ matrix.projectPath }}-macos-${{ matrix.targetPlatform }}
|
key: Library-${{ matrix.projectPath }}-macos-${{ matrix.targetPlatform }}
|
||||||
@@ -67,7 +67,7 @@ jobs:
|
|||||||
###########################
|
###########################
|
||||||
# Upload #
|
# Upload #
|
||||||
###########################
|
###########################
|
||||||
- uses: actions/upload-artifact@v2
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: Build MacOS (${{ matrix.unityVersion }})
|
name: Build MacOS (${{ matrix.unityVersion }})
|
||||||
path: build
|
path: build
|
||||||
|
|||||||
6
.github/workflows/windows-build-tests.yml
vendored
6
.github/workflows/windows-build-tests.yml
vendored
@@ -29,14 +29,14 @@ jobs:
|
|||||||
###########################
|
###########################
|
||||||
# Checkout #
|
# Checkout #
|
||||||
###########################
|
###########################
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
lfs: true
|
lfs: true
|
||||||
|
|
||||||
###########################
|
###########################
|
||||||
# Cache #
|
# Cache #
|
||||||
###########################
|
###########################
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: ${{ matrix.projectPath }}/Library
|
path: ${{ matrix.projectPath }}/Library
|
||||||
key: Library-${{ matrix.projectPath }}-windows-${{ matrix.targetPlatform }}
|
key: Library-${{ matrix.projectPath }}-windows-${{ matrix.targetPlatform }}
|
||||||
@@ -70,7 +70,7 @@ jobs:
|
|||||||
###########################
|
###########################
|
||||||
# Upload #
|
# Upload #
|
||||||
###########################
|
###########################
|
||||||
- uses: actions/upload-artifact@v2
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: Build Windows (${{ matrix.unityVersion }})
|
name: Build Windows (${{ matrix.unityVersion }})
|
||||||
path: build
|
path: build
|
||||||
|
|||||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -1,4 +1,5 @@
|
|||||||
{
|
{
|
||||||
|
"files.eol": "\n",
|
||||||
"god.tsconfig": "./tsconfig.json",
|
"god.tsconfig": "./tsconfig.json",
|
||||||
"yaml.customTags": [
|
"yaml.customTags": [
|
||||||
"!And",
|
"!And",
|
||||||
|
|||||||
91
action.yml
91
action.yml
@@ -9,7 +9,8 @@ inputs:
|
|||||||
unityVersion:
|
unityVersion:
|
||||||
required: false
|
required: false
|
||||||
default: 'auto'
|
default: 'auto'
|
||||||
description: 'Version of unity to use for building the project. Use "auto" to get from your ProjectSettings/ProjectVersion.txt'
|
description:
|
||||||
|
'Version of unity to use for building the project. Use "auto" to get from your ProjectSettings/ProjectVersion.txt'
|
||||||
customImage:
|
customImage:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
@@ -81,96 +82,118 @@ inputs:
|
|||||||
gitPrivateToken:
|
gitPrivateToken:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
description: 'Github private token to pull from github'
|
description: '[CloudRunner] Github private token to pull from github'
|
||||||
|
githubOwner:
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
description: '[CloudRunner] GitHub owner name or organization/team name'
|
||||||
chownFilesTo:
|
chownFilesTo:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
description: 'User and optionally group (user or user:group or uid:gid) to give ownership of the resulting build artifacts'
|
description:
|
||||||
|
'User and optionally group (user or user:group or uid:gid) to give ownership of the resulting build artifacts'
|
||||||
allowDirtyBuild:
|
allowDirtyBuild:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
description: 'Allows the branch of the build to be dirty, and still generate the build.'
|
description: '[CloudRunner] Allows the branch of the build to be dirty, and still generate the build.'
|
||||||
postBuildSteps:
|
postBuildSteps:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
description: 'run a post build job in yaml format with the keys image, secrets (name, value object array), command string'
|
description:
|
||||||
|
'[CloudRunner] run a post build job in yaml format with the keys image, secrets (name, value object array),
|
||||||
|
command string'
|
||||||
preBuildSteps:
|
preBuildSteps:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
description: 'Run a pre build job after the repository setup but before the build job (in yaml format with the keys image, secrets (name, value object array), command line string)'
|
description:
|
||||||
|
'[CloudRunner] Run a pre build job after the repository setup but before the build job (in yaml format with the
|
||||||
|
keys image, secrets (name, value object array), command line string)'
|
||||||
|
customStepFiles:
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
description:
|
||||||
|
'[CloudRunner] Specify the names (by file name) of custom steps to run before or after cloud runner jobs, must
|
||||||
|
match a yaml step file inside your repo in the folder .game-ci/steps/'
|
||||||
|
customHookFiles:
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
description:
|
||||||
|
'[CloudRunner] Specify the names (by file name) of custom hooks to run before or after cloud runner jobs, must
|
||||||
|
match a yaml step file inside your repo in the folder .game-ci/hooks/'
|
||||||
customJobHooks:
|
customJobHooks:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
description: 'Specify custom commands and trigger hooks (injects commands into jobs)'
|
description: '[CloudRunner] Specify custom commands and trigger hooks (injects commands into jobs)'
|
||||||
customJob:
|
customJob:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
description: 'Run a custom job instead of the standard build automation for cloud runner (in yaml format with the keys image, secrets (name, value object array), command line string)'
|
description:
|
||||||
|
'[CloudRunner] Run a custom job instead of the standard build automation for cloud runner (in yaml format with the
|
||||||
|
keys image, secrets (name, value object array), command line string)'
|
||||||
awsBaseStackName:
|
awsBaseStackName:
|
||||||
default: 'game-ci'
|
default: 'game-ci'
|
||||||
required: false
|
required: false
|
||||||
description: 'The Cloud Formation stack name that must be setup before using this option.'
|
description: '[CloudRunner] The Cloud Formation stack name that must be setup before using this option.'
|
||||||
cloudRunnerCluster:
|
cloudRunnerCluster:
|
||||||
default: 'local'
|
default: 'local'
|
||||||
required: false
|
required: false
|
||||||
description: 'Either local, k8s or aws can be used to run builds on a remote cluster. Additional parameters must be configured.'
|
description:
|
||||||
|
'[CloudRunner] Either local, k8s or aws can be used to run builds on a remote cluster. Additional parameters must
|
||||||
|
be configured.'
|
||||||
cloudRunnerCpu:
|
cloudRunnerCpu:
|
||||||
default: ''
|
default: ''
|
||||||
required: false
|
required: false
|
||||||
description: 'Amount of CPU time to assign the remote build container'
|
description: '[CloudRunner] Amount of CPU time to assign the remote build container'
|
||||||
cloudRunnerMemory:
|
cloudRunnerMemory:
|
||||||
default: ''
|
default: ''
|
||||||
required: false
|
required: false
|
||||||
description: 'Amount of memory to assign the remote build container'
|
description: '[CloudRunner] Amount of memory to assign the remote build container'
|
||||||
cachePushOverrideCommand:
|
|
||||||
default: ''
|
|
||||||
required: false
|
|
||||||
description: 'A command run every time a file is pushed to cache, formatted with input file path and remote cache path'
|
|
||||||
cachePullOverrideCommand:
|
|
||||||
default: ''
|
|
||||||
required: false
|
|
||||||
description: 'A command run every time before a file is being pulled from cache, formatted with request cache file and destination path'
|
|
||||||
readInputFromOverrideList:
|
readInputFromOverrideList:
|
||||||
default: ''
|
default: ''
|
||||||
required: false
|
required: false
|
||||||
description: 'Comma separated list of input value names to read from "input override command"'
|
description: '[CloudRunner] Comma separated list of input value names to read from "input override command"'
|
||||||
readInputOverrideCommand:
|
readInputOverrideCommand:
|
||||||
default: ''
|
default: ''
|
||||||
required: false
|
required: false
|
||||||
description: 'Extend game ci by specifying a command to execute to pull input from external source e.g cloud provider secret managers'
|
description:
|
||||||
|
'[CloudRunner] Extend game ci by specifying a command to execute to pull input from external source e.g cloud
|
||||||
|
provider secret managers'
|
||||||
kubeConfig:
|
kubeConfig:
|
||||||
default: ''
|
default: ''
|
||||||
required: false
|
required: false
|
||||||
description: 'Supply a base64 encoded kubernetes config to run builds on kubernetes and stream logs until completion.'
|
description:
|
||||||
|
'[CloudRunner] Supply a base64 encoded kubernetes config to run builds on kubernetes and stream logs until
|
||||||
|
completion.'
|
||||||
kubeVolume:
|
kubeVolume:
|
||||||
default: ''
|
default: ''
|
||||||
required: false
|
required: false
|
||||||
description: 'Supply a Persistent Volume Claim name to use for the Unity build.'
|
description: '[CloudRunner] Supply a Persistent Volume Claim name to use for the Unity build.'
|
||||||
kubeStorageClass:
|
kubeStorageClass:
|
||||||
default: ''
|
default: ''
|
||||||
required: false
|
required: false
|
||||||
description: 'Kubernetes storage class to use for cloud runner jobs, leave empty to install rook cluster.'
|
description:
|
||||||
|
'[CloudRunner] Kubernetes storage class to use for cloud runner jobs, leave empty to install rook cluster.'
|
||||||
kubeVolumeSize:
|
kubeVolumeSize:
|
||||||
default: '5Gi'
|
default: '5Gi'
|
||||||
required: false
|
required: false
|
||||||
description: 'Amount of disc space to assign the Kubernetes Persistent Volume'
|
description: '[CloudRunner] Amount of disc space to assign the Kubernetes Persistent Volume'
|
||||||
cacheKey:
|
cacheKey:
|
||||||
default: ''
|
default: ''
|
||||||
required: false
|
required: false
|
||||||
description: 'Cache key to indicate bucket for cache'
|
description: '[CloudRunner] Cache key to indicate bucket for cache'
|
||||||
checkDependencyHealthOverride:
|
watchToEnd:
|
||||||
default: ''
|
default: 'true'
|
||||||
required: false
|
required: false
|
||||||
description: 'Use to specify a way to check depdency services health to enable resilient self-starting jobs'
|
description:
|
||||||
startDependenciesOverride:
|
'[CloudRunner] Whether or not to watch the build to the end. Can be used for especially long running jobs e.g
|
||||||
default: ''
|
imports or self-hosted ephemeral runners.'
|
||||||
required: false
|
|
||||||
description: 'Use to specify a way to start depdency services health to enable resilient self-starting jobs'
|
|
||||||
outputs:
|
outputs:
|
||||||
volume:
|
volume:
|
||||||
description: 'The Persistent Volume (PV) where the build artifacts have been stored by Kubernetes'
|
description: 'The Persistent Volume (PV) where the build artifacts have been stored by Kubernetes'
|
||||||
buildVersion:
|
buildVersion:
|
||||||
description: 'The generated version used for the Unity build'
|
description: 'The generated version used for the Unity build'
|
||||||
|
androidVersionCode:
|
||||||
|
description: 'The generated versionCode used for the Android Unity build'
|
||||||
branding:
|
branding:
|
||||||
icon: 'box'
|
icon: 'box'
|
||||||
color: 'gray-dark'
|
color: 'gray-dark'
|
||||||
|
|||||||
5084
dist/index.js
generated
vendored
5084
dist/index.js
generated
vendored
File diff suppressed because it is too large
Load Diff
2
dist/index.js.map
generated
vendored
2
dist/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
410
dist/licenses.txt
generated
vendored
410
dist/licenses.txt
generated
vendored
@@ -264,6 +264,156 @@ Apache-2.0
|
|||||||
limitations under the License.
|
limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
@octokit/auth-token
|
||||||
|
MIT
|
||||||
|
The MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2019 Octokit contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
@octokit/core
|
||||||
|
MIT
|
||||||
|
The MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2019 Octokit contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
@octokit/endpoint
|
||||||
|
MIT
|
||||||
|
The MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2018 Octokit contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
@octokit/graphql
|
||||||
|
MIT
|
||||||
|
The MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2018 Octokit contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
@octokit/request
|
||||||
|
MIT
|
||||||
|
The MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2018 Octokit contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
@octokit/request-error
|
||||||
|
MIT
|
||||||
|
The MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2019 Octokit contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
@panva/asn1.js
|
@panva/asn1.js
|
||||||
MIT
|
MIT
|
||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
@@ -876,6 +1026,211 @@ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
|||||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
before-after-hook
|
||||||
|
Apache-2.0
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright 2018 Gregor Martynus and other contributors.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
brace-expansion
|
brace-expansion
|
||||||
MIT
|
MIT
|
||||||
MIT License
|
MIT License
|
||||||
@@ -1359,6 +1714,25 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|||||||
THE SOFTWARE.
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
deprecation
|
||||||
|
ISC
|
||||||
|
The ISC License
|
||||||
|
|
||||||
|
Copyright (c) Gregor Martynus and contributors
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||||
|
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
domexception
|
domexception
|
||||||
MIT
|
MIT
|
||||||
MIT License
|
MIT License
|
||||||
@@ -2126,6 +2500,31 @@ PERFORMANCE OF THIS SOFTWARE.
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
is-plain-object
|
||||||
|
MIT
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014-2017, Jon Schlinkert.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
is-potential-custom-element-name
|
is-potential-custom-element-name
|
||||||
MIT
|
MIT
|
||||||
Copyright Mathias Bynens <https://mathiasbynens.be/>
|
Copyright Mathias Bynens <https://mathiasbynens.be/>
|
||||||
@@ -4151,6 +4550,17 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||||||
SOFTWARE.
|
SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
universal-user-agent
|
||||||
|
ISC
|
||||||
|
# [ISC License](https://spdx.org/licenses/ISC)
|
||||||
|
|
||||||
|
Copyright (c) 2018, Gregor Martynus (https://github.com/gr2m)
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
universalify
|
universalify
|
||||||
MIT
|
MIT
|
||||||
(The MIT License)
|
(The MIT License)
|
||||||
|
|||||||
6
dist/platforms/mac/steps/build.sh
vendored
6
dist/platforms/mac/steps/build.sh
vendored
@@ -76,8 +76,10 @@ fi
|
|||||||
|
|
||||||
if [[ "$BUILD_TARGET" == "Android" && -n "$ANDROID_SDK_MANAGER_PARAMETERS" ]]; then
|
if [[ "$BUILD_TARGET" == "Android" && -n "$ANDROID_SDK_MANAGER_PARAMETERS" ]]; then
|
||||||
echo "Updating Android SDK with parameters: $ANDROID_SDK_MANAGER_PARAMETERS"
|
echo "Updating Android SDK with parameters: $ANDROID_SDK_MANAGER_PARAMETERS"
|
||||||
export JAVA_HOME="$(awk -F'=' '/JAVA_HOME=/{print $2}' /usr/bin/unity-editor.d/*)"
|
ANDROID_INSTALL_LOCATION="/Applications/Unity/Hub/Editor/$UNITY_VERSION/PlaybackEngines/AndroidPlayer"
|
||||||
"$(awk -F'=' '/ANDROID_HOME=/{print $2}' /usr/bin/unity-editor.d/*)/tools/bin/sdkmanager" "$ANDROID_SDK_MANAGER_PARAMETERS"
|
export JAVA_HOME="$ANDROID_INSTALL_LOCATION/OpenJDK"
|
||||||
|
export ANDROID_HOME="$ANDROID_INSTALL_LOCATION/SDK"
|
||||||
|
yes | "$ANDROID_HOME/tools/bin/sdkmanager" "$ANDROID_SDK_MANAGER_PARAMETERS"
|
||||||
echo "Updated Android SDK."
|
echo "Updated Android SDK."
|
||||||
else
|
else
|
||||||
echo "Not updating Android SDK."
|
echo "Not updating Android SDK."
|
||||||
|
|||||||
15
dist/platforms/ubuntu/steps/activate.sh
vendored
15
dist/platforms/ubuntu/steps/activate.sh
vendored
@@ -74,6 +74,21 @@ elif [[ -n "$UNITY_SERIAL" && -n "$UNITY_EMAIL" && -n "$UNITY_PASSWORD" ]]; then
|
|||||||
# Store the exit code from the verify command
|
# Store the exit code from the verify command
|
||||||
UNITY_EXIT_CODE=$?
|
UNITY_EXIT_CODE=$?
|
||||||
|
|
||||||
|
elif [[ -n "$UNITY_LICENSING_SERVER" ]]; then
|
||||||
|
#
|
||||||
|
# Custom Unity License Server
|
||||||
|
#
|
||||||
|
echo "Adding licensing server config"
|
||||||
|
|
||||||
|
/opt/unity/Editor/Data/Resources/Licensing/Client/Unity.Licensing.Client --acquire-floating > license.txt #is this accessible in a env variable?
|
||||||
|
PARSEDFILE=$(grep -oP '\".*?\"' < license.txt | tr -d '"')
|
||||||
|
export FLOATING_LICENSE
|
||||||
|
FLOATING_LICENSE=$(sed -n 2p <<< "$PARSEDFILE")
|
||||||
|
FLOATING_LICENSE_TIMEOUT=$(sed -n 4p <<< "$PARSEDFILE")
|
||||||
|
|
||||||
|
echo "Acquired floating license: \"$FLOATING_LICENSE\" with timeout $FLOATING_LICENSE_TIMEOUT"
|
||||||
|
# Store the exit code from the verify command
|
||||||
|
UNITY_EXIT_CODE=$?
|
||||||
else
|
else
|
||||||
#
|
#
|
||||||
# NO LICENSE ACTIVATION STRATEGY MATCHED
|
# NO LICENSE ACTIVATION STRATEGY MATCHED
|
||||||
|
|||||||
10
dist/platforms/ubuntu/steps/build.sh
vendored
10
dist/platforms/ubuntu/steps/build.sh
vendored
@@ -63,17 +63,9 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
#
|
#
|
||||||
# Prepare Android keystore and SDK, if needed
|
# Prepare Android SDK, if needed
|
||||||
#
|
#
|
||||||
|
|
||||||
if [[ "$BUILD_TARGET" == "Android" && -n "$ANDROID_KEYSTORE_NAME" && -n "$ANDROID_KEYSTORE_BASE64" ]]; then
|
|
||||||
echo "Creating Android keystore."
|
|
||||||
echo "$ANDROID_KEYSTORE_BASE64" | base64 --decode > "$UNITY_PROJECT_PATH/$ANDROID_KEYSTORE_NAME"
|
|
||||||
echo "Created Android keystore."
|
|
||||||
else
|
|
||||||
echo "Not creating Android keystore."
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$BUILD_TARGET" == "Android" && -n "$ANDROID_SDK_MANAGER_PARAMETERS" ]]; then
|
if [[ "$BUILD_TARGET" == "Android" && -n "$ANDROID_SDK_MANAGER_PARAMETERS" ]]; then
|
||||||
echo "Updating Android SDK with parameters: $ANDROID_SDK_MANAGER_PARAMETERS"
|
echo "Updating Android SDK with parameters: $ANDROID_SDK_MANAGER_PARAMETERS"
|
||||||
export JAVA_HOME="$(awk -F'=' '/JAVA_HOME=/{print $2}' /usr/bin/unity-editor.d/*)"
|
export JAVA_HOME="$(awk -F'=' '/JAVA_HOME=/{print $2}' /usr/bin/unity-editor.d/*)"
|
||||||
|
|||||||
@@ -4,7 +4,14 @@
|
|||||||
echo "Changing to \"$ACTIVATE_LICENSE_PATH\" directory."
|
echo "Changing to \"$ACTIVATE_LICENSE_PATH\" directory."
|
||||||
pushd "$ACTIVATE_LICENSE_PATH"
|
pushd "$ACTIVATE_LICENSE_PATH"
|
||||||
|
|
||||||
if [[ -n "$UNITY_SERIAL" ]]; then
|
|
||||||
|
if [[ -n "$UNITY_LICENSING_SERVER" ]]; then #
|
||||||
|
#
|
||||||
|
# Return any floating license used.
|
||||||
|
#
|
||||||
|
echo "Returning floating license: \"$FLOATING_LICENSE\""
|
||||||
|
/opt/unity/Editor/Data/Resources/Licensing/Client/Unity.Licensing.Client --return-floating "$FLOATING_LICENSE"
|
||||||
|
elif [[ -n "$UNITY_SERIAL" ]]; then
|
||||||
#
|
#
|
||||||
# PROFESSIONAL (SERIAL) LICENSE MODE
|
# PROFESSIONAL (SERIAL) LICENSE MODE
|
||||||
#
|
#
|
||||||
|
|||||||
6
dist/platforms/windows/build.ps1
vendored
6
dist/platforms/windows/build.ps1
vendored
@@ -109,6 +109,10 @@ Write-Output "# Building project #"
|
|||||||
Write-Output "###########################"
|
Write-Output "###########################"
|
||||||
Write-Output ""
|
Write-Output ""
|
||||||
|
|
||||||
|
# If $Env:CUSTOM_PARAMETERS contains spaces and is passed directly on the command line to Unity, powershell will wrap it
|
||||||
|
# in double quotes. To avoid this, parse $Env:CUSTOM_PARAMETERS into an array, while respecting any quotations within the string.
|
||||||
|
$_, $customParametersArray = Invoke-Expression('Write-Output -- "" ' + $Env:CUSTOM_PARAMETERS)
|
||||||
|
|
||||||
& "C:\Program Files\Unity\Hub\Editor\$Env:UNITY_VERSION\Editor\Unity.exe" -quit -batchmode -nographics `
|
& "C:\Program Files\Unity\Hub\Editor\$Env:UNITY_VERSION\Editor\Unity.exe" -quit -batchmode -nographics `
|
||||||
-projectPath $Env:UNITY_PROJECT_PATH `
|
-projectPath $Env:UNITY_PROJECT_PATH `
|
||||||
-executeMethod $Env:BUILD_METHOD `
|
-executeMethod $Env:BUILD_METHOD `
|
||||||
@@ -122,7 +126,7 @@ Write-Output ""
|
|||||||
-androidKeyaliasName $Env:ANDROID_KEYALIAS_NAME `
|
-androidKeyaliasName $Env:ANDROID_KEYALIAS_NAME `
|
||||||
-androidKeyaliasPass $Env:ANDROID_KEYALIAS_PASS `
|
-androidKeyaliasPass $Env:ANDROID_KEYALIAS_PASS `
|
||||||
-androidTargetSdkVersion $Env:ANDROID_TARGET_SDK_VERSION `
|
-androidTargetSdkVersion $Env:ANDROID_TARGET_SDK_VERSION `
|
||||||
$Env:CUSTOM_PARAMETERS `
|
$customParametersArray `
|
||||||
-logfile | Out-Host
|
-logfile | Out-Host
|
||||||
|
|
||||||
# Catch exit code
|
# Catch exit code
|
||||||
|
|||||||
7
dist/unity-config/services-config.json.template
vendored
Normal file
7
dist/unity-config/services-config.json.template
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"licensingServiceBaseUrl": "%URL%",
|
||||||
|
"enableEntitlementLicensing": true,
|
||||||
|
"enableFloatingApi": true,
|
||||||
|
"clientConnectTimeoutSec": 5,
|
||||||
|
"clientHandshakeTimeoutSec": 10
|
||||||
|
}
|
||||||
3
game-ci/hooks/my-test-hook-post-build.yaml
Normal file
3
game-ci/hooks/my-test-hook-post-build.yaml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
hook: after-build
|
||||||
|
commands: |
|
||||||
|
echo "after-build hook test!"
|
||||||
3
game-ci/hooks/my-test-hook-pre-build.yaml
Normal file
3
game-ci/hooks/my-test-hook-pre-build.yaml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
hook: before-build
|
||||||
|
commands: |
|
||||||
|
echo "before-build hook test!!"
|
||||||
3
game-ci/steps/my-test-step-post-build.yaml
Normal file
3
game-ci/steps/my-test-step-post-build.yaml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
hook: after
|
||||||
|
commands: |
|
||||||
|
echo "after-build step test!"
|
||||||
3
game-ci/steps/my-test-step-pre-build.yaml
Normal file
3
game-ci/steps/my-test-step-pre-build.yaml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
hook: before
|
||||||
|
commands: |
|
||||||
|
echo "before-build step test!"
|
||||||
@@ -18,6 +18,7 @@ module.exports = {
|
|||||||
transform: {
|
transform: {
|
||||||
'^.+\\.ts$': 'ts-jest',
|
'^.+\\.ts$': 'ts-jest',
|
||||||
},
|
},
|
||||||
|
autoRun: false,
|
||||||
|
|
||||||
// Indicates whether each individual test should be reported during the run
|
// Indicates whether each individual test should be reported during the run
|
||||||
verbose: true,
|
verbose: true,
|
||||||
|
|||||||
10
package.json
10
package.json
@@ -12,15 +12,15 @@
|
|||||||
"lint": "prettier --check \"src/**/*.{js,ts}\" && eslint src/**/*.ts",
|
"lint": "prettier --check \"src/**/*.{js,ts}\" && eslint src/**/*.ts",
|
||||||
"format": "prettier --write \"src/**/*.{js,ts}\"",
|
"format": "prettier --write \"src/**/*.{js,ts}\"",
|
||||||
"cli": "yarn ts-node src/index.ts -m cli",
|
"cli": "yarn ts-node src/index.ts -m cli",
|
||||||
"gcp-secrets-tests": "cross-env cloudRunnerCluster=aws cloudRunnerTests=true readInputOverrideCommand=\"gcloud secrets versions access 1 --secret=\"{0}\"\" populateOverride=true readInputFromOverrideList=UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD yarn test -i -t \"cloud runner\"",
|
"gcp-secrets-tests": "cross-env cloudRunnerCluster=aws cloudRunnerTests=true readInputOverrideCommand=\"gcp-secret-manager\" populateOverride=true readInputFromOverrideList=UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD yarn test -i -t \"cloud runner\"",
|
||||||
"gcp-secrets-cli": "cross-env cloudRunnerTests=true readInputOverrideCommand=\"gcloud secrets versions access 1 --secret=\"{0}\"\" yarn ts-node src/index.ts -m cli --populateOverride true --readInputFromOverrideList UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD",
|
"gcp-secrets-cli": "cross-env cloudRunnerTests=true readInputOverrideCommand=\"gcp-secret-manager\" yarn ts-node src/index.ts -m cli --populateOverride true --readInputFromOverrideList UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD",
|
||||||
"aws-secrets-cli": "cross-env cloudRunnerTests=true readInputOverrideCommand=\"aws secretsmanager get-secret-value --secret-id {0}\" yarn ts-node src/index.ts -m cli --populateOverride true --readInputFromOverrideList UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD",
|
"aws-secrets-cli": "cross-env cloudRunnerTests=true readInputOverrideCommand=\"aws-secret-manager\" yarn ts-node src/index.ts -m cli --populateOverride true --readInputFromOverrideList UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD",
|
||||||
"cli-aws": "cross-env cloudRunnerCluster=aws yarn run test-cli",
|
"cli-aws": "cross-env cloudRunnerCluster=aws yarn run test-cli",
|
||||||
"cli-k8s": "cross-env cloudRunnerCluster=k8s yarn run test-cli",
|
"cli-k8s": "cross-env cloudRunnerCluster=k8s yarn run test-cli",
|
||||||
"test-cli": "cross-env cloudRunnerTests=true yarn ts-node src/index.ts -m cli --projectPath test-project",
|
"test-cli": "cross-env cloudRunnerTests=true yarn ts-node src/index.ts -m cli --projectPath test-project",
|
||||||
"test": "jest",
|
"test": "jest",
|
||||||
"test-i": "yarn run test-i-aws && yarn run test-i-k8s",
|
"test-i": "cross-env cloudRunnerTests=true yarn test -i -t \"cloud runner\"",
|
||||||
"test-i-f": "yarn run test-i-aws && yarn run test-i-k8s && yarn run cli-k8s && yarn run cli-aws",
|
"test-i-*": "yarn run test-i-aws && yarn run test-i-k8s",
|
||||||
"test-i-aws": "cross-env cloudRunnerTests=true cloudRunnerCluster=aws yarn test -i -t \"cloud runner\"",
|
"test-i-aws": "cross-env cloudRunnerTests=true cloudRunnerCluster=aws yarn test -i -t \"cloud runner\"",
|
||||||
"test-i-k8s": "cross-env cloudRunnerTests=true cloudRunnerCluster=k8s yarn test -i -t \"cloud runner\""
|
"test-i-k8s": "cross-env cloudRunnerTests=true cloudRunnerCluster=k8s yarn test -i -t \"cloud runner\""
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ async function runMain() {
|
|||||||
|
|
||||||
// Set output
|
// Set output
|
||||||
await Output.setBuildVersion(buildParameters.buildVersion);
|
await Output.setBuildVersion(buildParameters.buildVersion);
|
||||||
|
await Output.setAndroidVersionCode(buildParameters.androidVersionCode);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
core.setFailed((error as Error).message);
|
core.setFailed((error as Error).message);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,21 +5,17 @@ import BuildParameters from './build-parameters';
|
|||||||
import Input from './input';
|
import Input from './input';
|
||||||
import Platform from './platform';
|
import Platform from './platform';
|
||||||
|
|
||||||
// Todo - Don't use process.env directly, that's what the input model class is for.
|
|
||||||
const testLicense =
|
const testLicense =
|
||||||
'<?xml version="1.0" encoding="UTF-8"?><root>\n <License id="Terms">\n <MachineBindings>\n <Binding Key="1" Value="576562626572264761624c65526f7578"/>\n <Binding Key="2" Value="576562626572264761624c65526f7578"/>\n </MachineBindings>\n <MachineID Value="D7nTUnjNAmtsUMcnoyrqkgIbYdM="/>\n <SerialHash Value="2033b8ac3e6faa3742ca9f0bfae44d18f2a96b80"/>\n <Features>\n <Feature Value="33"/>\n <Feature Value="1"/>\n <Feature Value="12"/>\n <Feature Value="2"/>\n <Feature Value="24"/>\n <Feature Value="3"/>\n <Feature Value="36"/>\n <Feature Value="17"/>\n <Feature Value="19"/>\n <Feature Value="62"/>\n </Features>\n <DeveloperData Value="AQAAAEY0LUJHUlgtWEQ0RS1aQ1dWLUM1SlctR0RIQg=="/>\n <SerialMasked Value="F4-BGRX-XD4E-ZCWV-C5JW-XXXX"/>\n <StartDate Value="2021-02-08T00:00:00"/>\n <UpdateDate Value="2021-02-09T00:34:57"/>\n <InitialActivationDate Value="2021-02-08T00:34:56"/>\n <LicenseVersion Value="6.x"/>\n <ClientProvidedVersion Value="2018.4.30f1"/>\n <AlwaysOnline Value="false"/>\n <Entitlements>\n <Entitlement Ns="unity_editor" Tag="UnityPersonal" Type="EDITOR" ValidTo="9999-12-31T00:00:00"/>\n <Entitlement Ns="unity_editor" Tag="DarkSkin" Type="EDITOR_FEATURE" ValidTo="9999-12-31T00:00:00"/>\n </Entitlements>\n </License>\n<Signature xmlns="http://www.w3.org/2000/09/xmldsig#"><SignedInfo><CanonicalizationMethod Algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315#WithComments"/><SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"/><Reference URI="#Terms"><Transforms><Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/></Transforms><DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"/><DigestValue>m0Db8UK+ktnOLJBtHybkfetpcKo=</DigestValue></Reference></SignedInfo><SignatureValue>o/pUbSQAukz7+ZYAWhnA0AJbIlyyCPL7bKVEM2lVqbrXt7cyey+umkCXamuOgsWPVUKBMkXtMH8L\n5etLmD0getWIhTGhzOnDCk+gtIPfL4jMo9tkEuOCROQAXCci23VFscKcrkB+3X6h4wEOtA2APhOY\nB+wvC794o8/82ffjP79aVAi57rp3Wmzx+9pe9yMwoJuljAy2sc2tIMgdQGWVmOGBpQm3JqsidyzI\nJWG2kjnc7pDXK9pwYzXoKiqUqqrut90d+kQqRyv7MSZXR50HFqD/LI69h68b7P8Bjo3bPXOhNXGR\n9YCoemH6EkfCJxp2gIjzjWW+l2Hj2EsFQi8YXw==</SignatureValue></Signature></root>';
|
'<?xml version="1.0" encoding="UTF-8"?><root>\n <License id="Terms">\n <MachineBindings>\n <Binding Key="1" Value="576562626572264761624c65526f7578"/>\n <Binding Key="2" Value="576562626572264761624c65526f7578"/>\n </MachineBindings>\n <MachineID Value="D7nTUnjNAmtsUMcnoyrqkgIbYdM="/>\n <SerialHash Value="2033b8ac3e6faa3742ca9f0bfae44d18f2a96b80"/>\n <Features>\n <Feature Value="33"/>\n <Feature Value="1"/>\n <Feature Value="12"/>\n <Feature Value="2"/>\n <Feature Value="24"/>\n <Feature Value="3"/>\n <Feature Value="36"/>\n <Feature Value="17"/>\n <Feature Value="19"/>\n <Feature Value="62"/>\n </Features>\n <DeveloperData Value="AQAAAEY0LUJHUlgtWEQ0RS1aQ1dWLUM1SlctR0RIQg=="/>\n <SerialMasked Value="F4-BGRX-XD4E-ZCWV-C5JW-XXXX"/>\n <StartDate Value="2021-02-08T00:00:00"/>\n <UpdateDate Value="2021-02-09T00:34:57"/>\n <InitialActivationDate Value="2021-02-08T00:34:56"/>\n <LicenseVersion Value="6.x"/>\n <ClientProvidedVersion Value="2018.4.30f1"/>\n <AlwaysOnline Value="false"/>\n <Entitlements>\n <Entitlement Ns="unity_editor" Tag="UnityPersonal" Type="EDITOR" ValidTo="9999-12-31T00:00:00"/>\n <Entitlement Ns="unity_editor" Tag="DarkSkin" Type="EDITOR_FEATURE" ValidTo="9999-12-31T00:00:00"/>\n </Entitlements>\n </License>\n<Signature xmlns="http://www.w3.org/2000/09/xmldsig#"><SignedInfo><CanonicalizationMethod Algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315#WithComments"/><SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"/><Reference URI="#Terms"><Transforms><Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/></Transforms><DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"/><DigestValue>m0Db8UK+ktnOLJBtHybkfetpcKo=</DigestValue></Reference></SignedInfo><SignatureValue>o/pUbSQAukz7+ZYAWhnA0AJbIlyyCPL7bKVEM2lVqbrXt7cyey+umkCXamuOgsWPVUKBMkXtMH8L\n5etLmD0getWIhTGhzOnDCk+gtIPfL4jMo9tkEuOCROQAXCci23VFscKcrkB+3X6h4wEOtA2APhOY\nB+wvC794o8/82ffjP79aVAi57rp3Wmzx+9pe9yMwoJuljAy2sc2tIMgdQGWVmOGBpQm3JqsidyzI\nJWG2kjnc7pDXK9pwYzXoKiqUqqrut90d+kQqRyv7MSZXR50HFqD/LI69h68b7P8Bjo3bPXOhNXGR\n9YCoemH6EkfCJxp2gIjzjWW+l2Hj2EsFQi8YXw==</SignatureValue></Signature></root>';
|
||||||
process.env.UNITY_LICENSE = testLicense;
|
|
||||||
|
|
||||||
const determineVersion = jest.spyOn(Versioning, 'determineBuildVersion').mockImplementation(async () => '1.3.37');
|
|
||||||
const determineUnityVersion = jest
|
|
||||||
.spyOn(UnityVersioning, 'determineUnityVersion')
|
|
||||||
.mockImplementation(() => '2019.2.11f1');
|
|
||||||
const determineSdkManagerParameters = jest
|
|
||||||
.spyOn(AndroidVersioning, 'determineSdkManagerParameters')
|
|
||||||
.mockImplementation(() => 'platforms;android-30');
|
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
|
jest.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.spyOn(Versioning, 'determineBuildVersion').mockImplementation(async () => '1.3.37');
|
||||||
|
process.env.UNITY_LICENSE = testLicense; // Todo - Don't use process.env directly, that's what the input model class is for.
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('BuildParameters', () => {
|
describe('BuildParameters', () => {
|
||||||
@@ -29,48 +25,54 @@ describe('BuildParameters', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('determines the version only once', async () => {
|
it('determines the version only once', async () => {
|
||||||
|
jest.spyOn(Versioning, 'determineBuildVersion').mockImplementation(async () => '1.3.37');
|
||||||
await BuildParameters.create();
|
await BuildParameters.create();
|
||||||
expect(determineVersion).toHaveBeenCalledTimes(1);
|
await expect(Versioning.determineBuildVersion).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('determines the unity version only once', async () => {
|
it('determines the unity version only once', async () => {
|
||||||
|
jest.spyOn(UnityVersioning, 'determineUnityVersion').mockImplementation(() => '2019.2.11f1');
|
||||||
await BuildParameters.create();
|
await BuildParameters.create();
|
||||||
expect(determineUnityVersion).toHaveBeenCalledTimes(1);
|
await expect(UnityVersioning.determineUnityVersion).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the android version code with provided input', async () => {
|
it('returns the android version code with provided input', async () => {
|
||||||
const mockValue = '42';
|
const mockValue = '42';
|
||||||
jest.spyOn(Input, 'androidVersionCode', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'androidVersionCode', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidVersionCode: mockValue }));
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
|
expect.objectContaining({ androidVersionCode: mockValue }),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the android version code from version by default', async () => {
|
it('returns the android version code from version by default', async () => {
|
||||||
const mockValue = '';
|
const mockValue = '';
|
||||||
jest.spyOn(Input, 'androidVersionCode', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'androidVersionCode', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidVersionCode: 1003037 }));
|
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidVersionCode: 1003037 }));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('determines the android sdk manager parameters only once', async () => {
|
it('determines the android sdk manager parameters only once', async () => {
|
||||||
|
jest.spyOn(AndroidVersioning, 'determineSdkManagerParameters').mockImplementation(() => 'platforms;android-30');
|
||||||
await BuildParameters.create();
|
await BuildParameters.create();
|
||||||
expect(determineSdkManagerParameters).toHaveBeenCalledTimes(1);
|
await expect(AndroidVersioning.determineSdkManagerParameters).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the targetPlatform', async () => {
|
it('returns the targetPlatform', async () => {
|
||||||
const mockValue = 'somePlatform';
|
const mockValue = 'somePlatform';
|
||||||
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ targetPlatform: mockValue }));
|
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ targetPlatform: mockValue }));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the project path', async () => {
|
it('returns the project path', async () => {
|
||||||
const mockValue = 'path/to/project';
|
const mockValue = 'path/to/project';
|
||||||
|
jest.spyOn(UnityVersioning, 'determineUnityVersion').mockImplementation(() => '2019.2.11f1');
|
||||||
jest.spyOn(Input, 'projectPath', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'projectPath', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ projectPath: mockValue }));
|
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ projectPath: mockValue }));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the build name', async () => {
|
it('returns the build name', async () => {
|
||||||
const mockValue = 'someBuildName';
|
const mockValue = 'someBuildName';
|
||||||
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildName: mockValue }));
|
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildName: mockValue }));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the build path', async () => {
|
it('returns the build path', async () => {
|
||||||
@@ -79,13 +81,18 @@ describe('BuildParameters', () => {
|
|||||||
const expectedBuildPath = `${mockPath}/${mockPlatform}`;
|
const expectedBuildPath = `${mockPath}/${mockPlatform}`;
|
||||||
jest.spyOn(Input, 'buildsPath', 'get').mockReturnValue(mockPath);
|
jest.spyOn(Input, 'buildsPath', 'get').mockReturnValue(mockPath);
|
||||||
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(mockPlatform);
|
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(mockPlatform);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildPath: expectedBuildPath }));
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
|
expect.objectContaining({ buildPath: expectedBuildPath }),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the build file', async () => {
|
it('returns the build file', async () => {
|
||||||
const mockValue = 'someBuildName';
|
const mockValue = 'someBuildName';
|
||||||
|
const mockPlatform = 'somePlatform';
|
||||||
|
|
||||||
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildFile: mockValue }));
|
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(mockPlatform);
|
||||||
|
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildFile: mockValue }));
|
||||||
});
|
});
|
||||||
|
|
||||||
test.each([Platform.types.StandaloneWindows, Platform.types.StandaloneWindows64])(
|
test.each([Platform.types.StandaloneWindows, Platform.types.StandaloneWindows64])(
|
||||||
@@ -93,7 +100,7 @@ describe('BuildParameters', () => {
|
|||||||
async (targetPlatform) => {
|
async (targetPlatform) => {
|
||||||
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
|
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
|
||||||
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
|
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
expect.objectContaining({ buildFile: `${targetPlatform}.exe` }),
|
expect.objectContaining({ buildFile: `${targetPlatform}.exe` }),
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
@@ -103,7 +110,7 @@ describe('BuildParameters', () => {
|
|||||||
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
|
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
|
||||||
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
|
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
|
||||||
jest.spyOn(Input, 'androidAppBundle', 'get').mockReturnValue(false);
|
jest.spyOn(Input, 'androidAppBundle', 'get').mockReturnValue(false);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
expect.objectContaining({ buildFile: `${targetPlatform}.apk` }),
|
expect.objectContaining({ buildFile: `${targetPlatform}.apk` }),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -112,7 +119,7 @@ describe('BuildParameters', () => {
|
|||||||
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
|
jest.spyOn(Input, 'targetPlatform', 'get').mockReturnValue(targetPlatform);
|
||||||
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
|
jest.spyOn(Input, 'buildName', 'get').mockReturnValue(targetPlatform);
|
||||||
jest.spyOn(Input, 'androidAppBundle', 'get').mockReturnValue(true);
|
jest.spyOn(Input, 'androidAppBundle', 'get').mockReturnValue(true);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
expect.objectContaining({ buildFile: `${targetPlatform}.aab` }),
|
expect.objectContaining({ buildFile: `${targetPlatform}.aab` }),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -120,51 +127,82 @@ describe('BuildParameters', () => {
|
|||||||
it('returns the build method', async () => {
|
it('returns the build method', async () => {
|
||||||
const mockValue = 'Namespace.ClassName.BuildMethod';
|
const mockValue = 'Namespace.ClassName.BuildMethod';
|
||||||
jest.spyOn(Input, 'buildMethod', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'buildMethod', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildMethod: mockValue }));
|
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ buildMethod: mockValue }));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the android keystore name', async () => {
|
it('returns the android keystore name', async () => {
|
||||||
const mockValue = 'keystore.keystore';
|
const mockValue = 'keystore.keystore';
|
||||||
jest.spyOn(Input, 'androidKeystoreName', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'androidKeystoreName', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeystoreName: mockValue }));
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
|
expect.objectContaining({ androidKeystoreName: mockValue }),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the android keystore base64-encoded content', async () => {
|
it('returns the android keystore base64-encoded content', async () => {
|
||||||
const mockValue = 'secret';
|
const mockValue = 'secret';
|
||||||
jest.spyOn(Input, 'androidKeystoreBase64', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'androidKeystoreBase64', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeystoreBase64: mockValue }));
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
|
expect.objectContaining({ androidKeystoreBase64: mockValue }),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the android keystore pass', async () => {
|
it('returns the android keystore pass', async () => {
|
||||||
const mockValue = 'secret';
|
const mockValue = 'secret';
|
||||||
jest.spyOn(Input, 'androidKeystorePass', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'androidKeystorePass', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeystorePass: mockValue }));
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
|
expect.objectContaining({ androidKeystorePass: mockValue }),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the android keyalias name', async () => {
|
it('returns the android keyalias name', async () => {
|
||||||
const mockValue = 'secret';
|
const mockValue = 'secret';
|
||||||
jest.spyOn(Input, 'androidKeyaliasName', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'androidKeyaliasName', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeyaliasName: mockValue }));
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
|
expect.objectContaining({ androidKeyaliasName: mockValue }),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the android keyalias pass', async () => {
|
it('returns the android keyalias pass', async () => {
|
||||||
const mockValue = 'secret';
|
const mockValue = 'secret';
|
||||||
jest.spyOn(Input, 'androidKeyaliasPass', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'androidKeyaliasPass', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ androidKeyaliasPass: mockValue }));
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
|
expect.objectContaining({ androidKeyaliasPass: mockValue }),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns the android target sdk version', async () => {
|
it('returns the android target sdk version', async () => {
|
||||||
const mockValue = 'AndroidApiLevelAuto';
|
const mockValue = 'AndroidApiLevelAuto';
|
||||||
jest.spyOn(Input, 'androidTargetSdkVersion', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'androidTargetSdkVersion', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
expect.objectContaining({ androidTargetSdkVersion: mockValue }),
|
expect.objectContaining({ androidTargetSdkVersion: mockValue }),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('returns the unity licensing server address', async () => {
|
||||||
|
const mockValue = 'http://example.com';
|
||||||
|
jest.spyOn(Input, 'unityLicensingServer', 'get').mockReturnValue(mockValue);
|
||||||
|
await expect(BuildParameters.create()).resolves.toEqual(
|
||||||
|
expect.objectContaining({ unityLicensingServer: mockValue }),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('throws error when no unity license provider provided', async () => {
|
||||||
|
delete process.env.UNITY_LICENSE; // Need to delete this as it is set for every test currently
|
||||||
|
await expect(BuildParameters.create()).rejects.toThrowError();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('return serial when no license server is provided', async () => {
|
||||||
|
const mockValue = '123';
|
||||||
|
delete process.env.UNITY_LICENSE; // Need to delete this as it is set for every test currently
|
||||||
|
process.env.UNITY_SERIAL = mockValue;
|
||||||
|
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ unitySerial: mockValue }));
|
||||||
|
delete process.env.UNITY_SERIAL;
|
||||||
|
});
|
||||||
|
|
||||||
it('returns the custom parameters', async () => {
|
it('returns the custom parameters', async () => {
|
||||||
const mockValue = '-profile SomeProfile -someBoolean -someValue exampleValue';
|
const mockValue = '-profile SomeProfile -someBoolean -someValue exampleValue';
|
||||||
jest.spyOn(Input, 'customParameters', 'get').mockReturnValue(mockValue);
|
jest.spyOn(Input, 'customParameters', 'get').mockReturnValue(mockValue);
|
||||||
expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ customParameters: mockValue }));
|
await expect(BuildParameters.create()).resolves.toEqual(expect.objectContaining({ customParameters: mockValue }));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -9,11 +9,14 @@ import Versioning from './versioning';
|
|||||||
import { GitRepoReader } from './input-readers/git-repo';
|
import { GitRepoReader } from './input-readers/git-repo';
|
||||||
import { GithubCliReader } from './input-readers/github-cli';
|
import { GithubCliReader } from './input-readers/github-cli';
|
||||||
import { Cli } from './cli/cli';
|
import { Cli } from './cli/cli';
|
||||||
|
import GitHub from './github';
|
||||||
|
import CloudRunnerOptions from './cloud-runner/cloud-runner-options';
|
||||||
|
|
||||||
class BuildParameters {
|
class BuildParameters {
|
||||||
public editorVersion!: string;
|
public editorVersion!: string;
|
||||||
public customImage!: string;
|
public customImage!: string;
|
||||||
public unitySerial!: string;
|
public unitySerial!: string;
|
||||||
|
public unityLicensingServer!: string;
|
||||||
public runnerTempPath: string | undefined;
|
public runnerTempPath: string | undefined;
|
||||||
public targetPlatform!: string;
|
public targetPlatform!: string;
|
||||||
public projectPath!: string;
|
public projectPath!: string;
|
||||||
@@ -44,12 +47,8 @@ class BuildParameters {
|
|||||||
public kubeStorageClass!: string;
|
public kubeStorageClass!: string;
|
||||||
public chownFilesTo!: string;
|
public chownFilesTo!: string;
|
||||||
public customJobHooks!: string;
|
public customJobHooks!: string;
|
||||||
public cachePushOverrideCommand!: string;
|
|
||||||
public cachePullOverrideCommand!: string;
|
|
||||||
public readInputFromOverrideList!: string;
|
public readInputFromOverrideList!: string;
|
||||||
public readInputOverrideCommand!: string;
|
public readInputOverrideCommand!: string;
|
||||||
public checkDependencyHealthOverride!: string;
|
|
||||||
public startDependenciesOverride!: string;
|
|
||||||
public cacheKey!: string;
|
public cacheKey!: string;
|
||||||
|
|
||||||
public postBuildSteps!: string;
|
public postBuildSteps!: string;
|
||||||
@@ -62,9 +61,16 @@ class BuildParameters {
|
|||||||
public logId!: string;
|
public logId!: string;
|
||||||
public buildGuid!: string;
|
public buildGuid!: string;
|
||||||
public cloudRunnerBranch!: string;
|
public cloudRunnerBranch!: string;
|
||||||
public cloudRunnerIntegrationTests!: boolean;
|
public cloudRunnerDebug!: boolean;
|
||||||
public cloudRunnerBuilderPlatform!: string | undefined;
|
public cloudRunnerBuilderPlatform!: string | undefined;
|
||||||
public isCliMode!: boolean;
|
public isCliMode!: boolean;
|
||||||
|
public retainWorkspace!: boolean;
|
||||||
|
public maxRetainedWorkspaces!: number;
|
||||||
|
public useSharedLargePackages!: boolean;
|
||||||
|
public useLz4Compression!: boolean;
|
||||||
|
public garbageCollectionMaxAge!: number;
|
||||||
|
public constantGarbageCollection!: boolean;
|
||||||
|
public githubChecks!: boolean;
|
||||||
|
|
||||||
static async create(): Promise<BuildParameters> {
|
static async create(): Promise<BuildParameters> {
|
||||||
const buildFile = this.parseBuildFile(Input.buildName, Input.targetPlatform, Input.androidAppBundle);
|
const buildFile = this.parseBuildFile(Input.buildName, Input.targetPlatform, Input.androidAppBundle);
|
||||||
@@ -76,24 +82,26 @@ class BuildParameters {
|
|||||||
// Todo - Don't use process.env directly, that's what the input model class is for.
|
// Todo - Don't use process.env directly, that's what the input model class is for.
|
||||||
// ---
|
// ---
|
||||||
let unitySerial = '';
|
let unitySerial = '';
|
||||||
if (!process.env.UNITY_SERIAL && Input.githubInputEnabled) {
|
if (Input.unityLicensingServer === '') {
|
||||||
// No serial was present, so it is a personal license that we need to convert
|
if (!process.env.UNITY_SERIAL && GitHub.githubInputEnabled) {
|
||||||
if (!process.env.UNITY_LICENSE) {
|
// No serial was present, so it is a personal license that we need to convert
|
||||||
throw new Error(`Missing Unity License File and no Serial was found. If this
|
if (!process.env.UNITY_LICENSE) {
|
||||||
is a personal license, make sure to follow the activation
|
throw new Error(`Missing Unity License File and no Serial was found. If this
|
||||||
steps and set the UNITY_LICENSE GitHub secret or enter a Unity
|
is a personal license, make sure to follow the activation
|
||||||
serial number inside the UNITY_SERIAL GitHub secret.`);
|
steps and set the UNITY_LICENSE GitHub secret or enter a Unity
|
||||||
|
serial number inside the UNITY_SERIAL GitHub secret.`);
|
||||||
|
}
|
||||||
|
unitySerial = this.getSerialFromLicenseFile(process.env.UNITY_LICENSE);
|
||||||
|
} else {
|
||||||
|
unitySerial = process.env.UNITY_SERIAL!;
|
||||||
}
|
}
|
||||||
unitySerial = this.getSerialFromLicenseFile(process.env.UNITY_LICENSE);
|
|
||||||
} else {
|
|
||||||
unitySerial = process.env.UNITY_SERIAL!;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
editorVersion,
|
editorVersion,
|
||||||
customImage: Input.customImage,
|
customImage: Input.customImage,
|
||||||
unitySerial,
|
unitySerial,
|
||||||
|
unityLicensingServer: Input.unityLicensingServer,
|
||||||
runnerTempPath: process.env.RUNNER_TEMP,
|
runnerTempPath: process.env.RUNNER_TEMP,
|
||||||
targetPlatform: Input.targetPlatform,
|
targetPlatform: Input.targetPlatform,
|
||||||
projectPath: Input.projectPath,
|
projectPath: Input.projectPath,
|
||||||
@@ -114,36 +122,39 @@ class BuildParameters {
|
|||||||
sshAgent: Input.sshAgent,
|
sshAgent: Input.sshAgent,
|
||||||
gitPrivateToken: Input.gitPrivateToken || (await GithubCliReader.GetGitHubAuthToken()),
|
gitPrivateToken: Input.gitPrivateToken || (await GithubCliReader.GetGitHubAuthToken()),
|
||||||
chownFilesTo: Input.chownFilesTo,
|
chownFilesTo: Input.chownFilesTo,
|
||||||
cloudRunnerCluster: Input.cloudRunnerCluster,
|
cloudRunnerCluster: CloudRunnerOptions.cloudRunnerCluster,
|
||||||
cloudRunnerBuilderPlatform: Input.cloudRunnerBuilderPlatform,
|
cloudRunnerBuilderPlatform: CloudRunnerOptions.cloudRunnerBuilderPlatform,
|
||||||
awsBaseStackName: Input.awsBaseStackName,
|
awsBaseStackName: CloudRunnerOptions.awsBaseStackName,
|
||||||
kubeConfig: Input.kubeConfig,
|
kubeConfig: CloudRunnerOptions.kubeConfig,
|
||||||
cloudRunnerMemory: Input.cloudRunnerMemory,
|
cloudRunnerMemory: CloudRunnerOptions.cloudRunnerMemory,
|
||||||
cloudRunnerCpu: Input.cloudRunnerCpu,
|
cloudRunnerCpu: CloudRunnerOptions.cloudRunnerCpu,
|
||||||
kubeVolumeSize: Input.kubeVolumeSize,
|
kubeVolumeSize: CloudRunnerOptions.kubeVolumeSize,
|
||||||
kubeVolume: Input.kubeVolume,
|
kubeVolume: CloudRunnerOptions.kubeVolume,
|
||||||
postBuildSteps: Input.postBuildSteps,
|
postBuildSteps: CloudRunnerOptions.postBuildSteps,
|
||||||
preBuildSteps: Input.preBuildSteps,
|
preBuildSteps: CloudRunnerOptions.preBuildSteps,
|
||||||
customJob: Input.customJob,
|
customJob: CloudRunnerOptions.customJob,
|
||||||
runNumber: Input.runNumber,
|
runNumber: Input.runNumber,
|
||||||
branch: Input.branch.replace('/head', '') || (await GitRepoReader.GetBranch()),
|
branch: Input.branch.replace('/head', '') || (await GitRepoReader.GetBranch()),
|
||||||
cloudRunnerBranch: Input.cloudRunnerBranch.split('/').reverse()[0],
|
cloudRunnerBranch: CloudRunnerOptions.cloudRunnerBranch.split('/').reverse()[0],
|
||||||
cloudRunnerIntegrationTests: Input.cloudRunnerTests,
|
cloudRunnerDebug: CloudRunnerOptions.cloudRunnerDebug,
|
||||||
githubRepo: Input.githubRepo || (await GitRepoReader.GetRemote()) || 'game-ci/unity-builder',
|
githubRepo: Input.githubRepo || (await GitRepoReader.GetRemote()) || 'game-ci/unity-builder',
|
||||||
isCliMode: Cli.isCliMode,
|
isCliMode: Cli.isCliMode,
|
||||||
awsStackName: Input.awsBaseStackName,
|
awsStackName: CloudRunnerOptions.awsBaseStackName,
|
||||||
gitSha: Input.gitSha,
|
gitSha: Input.gitSha,
|
||||||
logId: customAlphabet(CloudRunnerConstants.alphabet, 9)(),
|
logId: customAlphabet(CloudRunnerConstants.alphabet, 9)(),
|
||||||
buildGuid: CloudRunnerBuildGuid.generateGuid(Input.runNumber, Input.targetPlatform),
|
buildGuid: CloudRunnerBuildGuid.generateGuid(Input.runNumber, Input.targetPlatform),
|
||||||
customJobHooks: Input.customJobHooks(),
|
customJobHooks: CloudRunnerOptions.customJobHooks(),
|
||||||
cachePullOverrideCommand: Input.cachePullOverrideCommand(),
|
readInputOverrideCommand: CloudRunnerOptions.readInputOverrideCommand(),
|
||||||
cachePushOverrideCommand: Input.cachePushOverrideCommand(),
|
readInputFromOverrideList: CloudRunnerOptions.readInputFromOverrideList(),
|
||||||
readInputOverrideCommand: Input.readInputOverrideCommand(),
|
kubeStorageClass: CloudRunnerOptions.kubeStorageClass,
|
||||||
readInputFromOverrideList: Input.readInputFromOverrideList(),
|
cacheKey: CloudRunnerOptions.cacheKey,
|
||||||
kubeStorageClass: Input.kubeStorageClass,
|
retainWorkspace: CloudRunnerOptions.retainWorkspaces,
|
||||||
checkDependencyHealthOverride: Input.checkDependencyHealthOverride,
|
useSharedLargePackages: CloudRunnerOptions.useSharedLargePackages,
|
||||||
startDependenciesOverride: Input.startDependenciesOverride,
|
useLz4Compression: CloudRunnerOptions.useLz4Compression,
|
||||||
cacheKey: Input.cacheKey,
|
maxRetainedWorkspaces: CloudRunnerOptions.maxRetainedWorkspaces,
|
||||||
|
constantGarbageCollection: CloudRunnerOptions.constantGarbageCollection,
|
||||||
|
garbageCollectionMaxAge: CloudRunnerOptions.garbageCollectionMaxAge,
|
||||||
|
githubChecks: CloudRunnerOptions.githubChecks,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -5,10 +5,14 @@ import { ActionYamlReader } from '../input-readers/action-yaml';
|
|||||||
import CloudRunnerLogger from '../cloud-runner/services/cloud-runner-logger';
|
import CloudRunnerLogger from '../cloud-runner/services/cloud-runner-logger';
|
||||||
import CloudRunnerQueryOverride from '../cloud-runner/services/cloud-runner-query-override';
|
import CloudRunnerQueryOverride from '../cloud-runner/services/cloud-runner-query-override';
|
||||||
import { CliFunction, CliFunctionsRepository } from './cli-functions-repository';
|
import { CliFunction, CliFunctionsRepository } from './cli-functions-repository';
|
||||||
import { AwsCliCommands } from '../cloud-runner/providers/aws/commands/aws-cli-commands';
|
|
||||||
import { Caching } from '../cloud-runner/remote-client/caching';
|
import { Caching } from '../cloud-runner/remote-client/caching';
|
||||||
import { LfsHashing } from '../cloud-runner/services/lfs-hashing';
|
import { LfsHashing } from '../cloud-runner/services/lfs-hashing';
|
||||||
import { RemoteClient } from '../cloud-runner/remote-client';
|
import { RemoteClient } from '../cloud-runner/remote-client';
|
||||||
|
import CloudRunnerOptionsReader from '../cloud-runner/services/cloud-runner-options-reader';
|
||||||
|
import GitHub from '../github';
|
||||||
|
import { TaskParameterSerializer } from '../cloud-runner/services/task-parameter-serializer';
|
||||||
|
import { CloudRunnerFolders } from '../cloud-runner/services/cloud-runner-folders';
|
||||||
|
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
||||||
|
|
||||||
export class Cli {
|
export class Cli {
|
||||||
public static options;
|
public static options;
|
||||||
@@ -27,13 +31,13 @@ export class Cli {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static InitCliMode() {
|
public static InitCliMode() {
|
||||||
CliFunctionsRepository.PushCliFunctionSource(AwsCliCommands);
|
CliFunctionsRepository.PushCliFunctionSource(RemoteClient);
|
||||||
CliFunctionsRepository.PushCliFunctionSource(Caching);
|
CliFunctionsRepository.PushCliFunctionSource(Caching);
|
||||||
CliFunctionsRepository.PushCliFunctionSource(LfsHashing);
|
CliFunctionsRepository.PushCliFunctionSource(LfsHashing);
|
||||||
CliFunctionsRepository.PushCliFunctionSource(RemoteClient);
|
|
||||||
const program = new Command();
|
const program = new Command();
|
||||||
program.version('0.0.1');
|
program.version('0.0.1');
|
||||||
const properties = Object.getOwnPropertyNames(Input);
|
|
||||||
|
const properties = CloudRunnerOptionsReader.GetProperties();
|
||||||
const actionYamlReader: ActionYamlReader = new ActionYamlReader();
|
const actionYamlReader: ActionYamlReader = new ActionYamlReader();
|
||||||
for (const element of properties) {
|
for (const element of properties) {
|
||||||
program.option(`--${element} <${element}>`, actionYamlReader.GetActionYamlValue(element));
|
program.option(`--${element} <${element}>`, actionYamlReader.GetActionYamlValue(element));
|
||||||
@@ -48,6 +52,7 @@ export class Cli {
|
|||||||
program.option('--cachePushFrom <cachePushFrom>', 'cache push from source folder');
|
program.option('--cachePushFrom <cachePushFrom>', 'cache push from source folder');
|
||||||
program.option('--cachePushTo <cachePushTo>', 'cache push to caching folder');
|
program.option('--cachePushTo <cachePushTo>', 'cache push to caching folder');
|
||||||
program.option('--artifactName <artifactName>', 'caching artifact name');
|
program.option('--artifactName <artifactName>', 'caching artifact name');
|
||||||
|
program.option('--select <select>', 'select a particular resource');
|
||||||
program.parse(process.argv);
|
program.parse(process.argv);
|
||||||
Cli.options = program.opts();
|
Cli.options = program.opts();
|
||||||
|
|
||||||
@@ -55,23 +60,32 @@ export class Cli {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static async RunCli(): Promise<void> {
|
static async RunCli(): Promise<void> {
|
||||||
Input.githubInputEnabled = false;
|
GitHub.githubInputEnabled = false;
|
||||||
if (Cli.options['populateOverride'] === `true`) {
|
if (Cli.options['populateOverride'] === `true`) {
|
||||||
await CloudRunnerQueryOverride.PopulateQueryOverrideInput();
|
await CloudRunnerQueryOverride.PopulateQueryOverrideInput();
|
||||||
}
|
}
|
||||||
Cli.logInput();
|
if (Cli.options['logInput']) {
|
||||||
|
Cli.logInput();
|
||||||
|
}
|
||||||
const results = CliFunctionsRepository.GetCliFunctions(Cli.options.mode);
|
const results = CliFunctionsRepository.GetCliFunctions(Cli.options.mode);
|
||||||
CloudRunnerLogger.log(`Entrypoint: ${results.key}`);
|
CloudRunnerLogger.log(`Entrypoint: ${results.key}`);
|
||||||
Cli.options.versioning = 'None';
|
Cli.options.versioning = 'None';
|
||||||
|
|
||||||
return await results.target[results.propertyKey]();
|
const buildParameter = TaskParameterSerializer.readBuildParameterFromEnvironment();
|
||||||
|
CloudRunnerLogger.log(`Build Params:
|
||||||
|
${JSON.stringify(buildParameter, undefined, 4)}
|
||||||
|
`);
|
||||||
|
CloudRunner.buildParameters = buildParameter;
|
||||||
|
CloudRunner.lockedWorkspace = process.env.LOCKED_WORKSPACE;
|
||||||
|
|
||||||
|
return await results.target[results.propertyKey](Cli.options);
|
||||||
}
|
}
|
||||||
|
|
||||||
@CliFunction(`print-input`, `prints all input`)
|
@CliFunction(`print-input`, `prints all input`)
|
||||||
private static logInput() {
|
private static logInput() {
|
||||||
core.info(`\n`);
|
core.info(`\n`);
|
||||||
core.info(`INPUT:`);
|
core.info(`INPUT:`);
|
||||||
const properties = Object.getOwnPropertyNames(Input);
|
const properties = CloudRunnerOptionsReader.GetProperties();
|
||||||
for (const element of properties) {
|
for (const element of properties) {
|
||||||
if (
|
if (
|
||||||
Input[element] !== undefined &&
|
Input[element] !== undefined &&
|
||||||
@@ -87,11 +101,110 @@ export class Cli {
|
|||||||
core.info(`\n`);
|
core.info(`\n`);
|
||||||
}
|
}
|
||||||
|
|
||||||
@CliFunction(`cli`, `runs a cloud runner build`)
|
@CliFunction(`cli-build`, `runs a cloud runner build`)
|
||||||
public static async CLIBuild(): Promise<string> {
|
public static async CLIBuild(): Promise<string> {
|
||||||
const buildParameter = await BuildParameters.create();
|
const buildParameter = await BuildParameters.create();
|
||||||
const baseImage = new ImageTag(buildParameter);
|
const baseImage = new ImageTag(buildParameter);
|
||||||
|
|
||||||
return await CloudRunner.run(buildParameter, baseImage.toString());
|
return await CloudRunner.run(buildParameter, baseImage.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CliFunction(`async-workflow`, `runs a cloud runner build`)
|
||||||
|
public static async asyncronousWorkflow(): Promise<string> {
|
||||||
|
const buildParameter = await BuildParameters.create();
|
||||||
|
const baseImage = new ImageTag(buildParameter);
|
||||||
|
|
||||||
|
return await CloudRunner.run(buildParameter, baseImage.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
@CliFunction(`checks-update`, `runs a cloud runner build`)
|
||||||
|
public static async checksUpdate() {
|
||||||
|
const input = JSON.parse(process.env.CHECKS_UPDATE || ``);
|
||||||
|
core.info(`Checks Update ${process.env.CHECKS_UPDATE}`);
|
||||||
|
if (input.mode === `create`) {
|
||||||
|
throw new Error(`Not supported: only use update`);
|
||||||
|
} else if (input.mode === `update`) {
|
||||||
|
await GitHub.updateGitHubCheckRequest(input.data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@CliFunction(`garbage-collect`, `runs garbage collection`)
|
||||||
|
public static async GarbageCollect(): Promise<string> {
|
||||||
|
const buildParameter = await BuildParameters.create();
|
||||||
|
|
||||||
|
await CloudRunner.setup(buildParameter);
|
||||||
|
|
||||||
|
return await CloudRunner.Provider.garbageCollect(``, false, 0, false, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@CliFunction(`list-resources`, `lists active resources`)
|
||||||
|
public static async ListResources(): Promise<string[]> {
|
||||||
|
const buildParameter = await BuildParameters.create();
|
||||||
|
|
||||||
|
await CloudRunner.setup(buildParameter);
|
||||||
|
const result = await CloudRunner.Provider.listResources();
|
||||||
|
CloudRunnerLogger.log(JSON.stringify(result, undefined, 4));
|
||||||
|
|
||||||
|
return result.map((x) => x.Name);
|
||||||
|
}
|
||||||
|
|
||||||
|
@CliFunction(`list-worfklow`, `lists running workflows`)
|
||||||
|
public static async ListWorfklow(): Promise<string[]> {
|
||||||
|
const buildParameter = await BuildParameters.create();
|
||||||
|
|
||||||
|
await CloudRunner.setup(buildParameter);
|
||||||
|
|
||||||
|
return (await CloudRunner.Provider.listWorkflow()).map((x) => x.Name);
|
||||||
|
}
|
||||||
|
|
||||||
|
@CliFunction(`watch`, `follows logs of a running workflow`)
|
||||||
|
public static async Watch(): Promise<string> {
|
||||||
|
const buildParameter = await BuildParameters.create();
|
||||||
|
|
||||||
|
await CloudRunner.setup(buildParameter);
|
||||||
|
|
||||||
|
return await CloudRunner.Provider.watchWorkflow();
|
||||||
|
}
|
||||||
|
|
||||||
|
@CliFunction(`remote-cli-post-build`, `runs a cloud runner build`)
|
||||||
|
public static async PostCLIBuild(): Promise<string> {
|
||||||
|
core.info(`Running POST build tasks`);
|
||||||
|
|
||||||
|
await Caching.PushToCache(
|
||||||
|
CloudRunnerFolders.ToLinuxFolder(`${CloudRunnerFolders.cacheFolderForCacheKeyFull}/Library`),
|
||||||
|
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.libraryFolderAbsolute),
|
||||||
|
`lib-${CloudRunner.buildParameters.buildGuid}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
await Caching.PushToCache(
|
||||||
|
CloudRunnerFolders.ToLinuxFolder(`${CloudRunnerFolders.cacheFolderForCacheKeyFull}/build`),
|
||||||
|
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.projectBuildFolderAbsolute),
|
||||||
|
`build-${CloudRunner.buildParameters.buildGuid}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!CloudRunner.buildParameters.retainWorkspace) {
|
||||||
|
await CloudRunnerSystem.Run(
|
||||||
|
`rm -r ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await RemoteClient.runCustomHookFiles(`after-build`);
|
||||||
|
|
||||||
|
const parameters = await BuildParameters.create();
|
||||||
|
CloudRunner.setup(parameters);
|
||||||
|
if (parameters.constantGarbageCollection) {
|
||||||
|
await CloudRunnerSystem.Run(
|
||||||
|
`find /${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.buildVolumeFolder)}/ -name '*.*' -mmin +${
|
||||||
|
parameters.garbageCollectionMaxAge * 60
|
||||||
|
} -delete`,
|
||||||
|
);
|
||||||
|
await CloudRunnerSystem.Run(
|
||||||
|
`find ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.cacheFolderForAllFull)} -name '*.*' -mmin +${
|
||||||
|
parameters.garbageCollectionMaxAge * 60
|
||||||
|
} -delete`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Promise((result) => result(``));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
290
src/model/cloud-runner/cloud-runner-options.ts
Normal file
290
src/model/cloud-runner/cloud-runner-options.ts
Normal file
@@ -0,0 +1,290 @@
|
|||||||
|
import { Cli } from '../cli/cli';
|
||||||
|
import CloudRunnerQueryOverride from './services/cloud-runner-query-override';
|
||||||
|
import GitHub from '../github';
|
||||||
|
const core = require('@actions/core');
|
||||||
|
|
||||||
|
class CloudRunnerOptions {
|
||||||
|
// ### ### ###
|
||||||
|
// Input Handling
|
||||||
|
// ### ### ###
|
||||||
|
public static getInput(query) {
|
||||||
|
if (GitHub.githubInputEnabled) {
|
||||||
|
const coreInput = core.getInput(query);
|
||||||
|
if (coreInput && coreInput !== '') {
|
||||||
|
return coreInput;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const alternativeQuery = CloudRunnerOptions.ToEnvVarFormat(query);
|
||||||
|
|
||||||
|
// Query input sources
|
||||||
|
if (Cli.query(query, alternativeQuery)) {
|
||||||
|
return Cli.query(query, alternativeQuery);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (CloudRunnerQueryOverride.query(query, alternativeQuery)) {
|
||||||
|
return CloudRunnerQueryOverride.query(query, alternativeQuery);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env[query] !== undefined) {
|
||||||
|
return process.env[query];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (alternativeQuery !== query && process.env[alternativeQuery] !== undefined) {
|
||||||
|
return process.env[alternativeQuery];
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ToEnvVarFormat(input: string) {
|
||||||
|
if (input.toUpperCase() === input) {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
return input
|
||||||
|
.replace(/([A-Z])/g, ' $1')
|
||||||
|
.trim()
|
||||||
|
.toUpperCase()
|
||||||
|
.replace(/ /g, '_');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// Provider parameters
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
static get region(): string {
|
||||||
|
return CloudRunnerOptions.getInput('region') || 'eu-west-2';
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// GitHub parameters
|
||||||
|
// ### ### ###
|
||||||
|
static get githubChecks(): boolean {
|
||||||
|
return CloudRunnerOptions.getInput('githubChecks') || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
static get githubOwner() {
|
||||||
|
return CloudRunnerOptions.getInput('githubOwner') || CloudRunnerOptions.githubRepo.split(`/`)[0] || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
static get githubRepoName() {
|
||||||
|
return CloudRunnerOptions.getInput('githubRepoName') || CloudRunnerOptions.githubRepo.split(`/`)[1] || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// Git syncronization parameters
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
static get githubRepo() {
|
||||||
|
return CloudRunnerOptions.getInput('GITHUB_REPOSITORY') || CloudRunnerOptions.getInput('GITHUB_REPO') || undefined;
|
||||||
|
}
|
||||||
|
static get branch() {
|
||||||
|
if (CloudRunnerOptions.getInput(`GITHUB_REF`)) {
|
||||||
|
return CloudRunnerOptions.getInput(`GITHUB_REF`).replace('refs/', '').replace(`head/`, '').replace(`heads/`, '');
|
||||||
|
} else if (CloudRunnerOptions.getInput('branch')) {
|
||||||
|
return CloudRunnerOptions.getInput('branch');
|
||||||
|
} else {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static get gitSha() {
|
||||||
|
if (CloudRunnerOptions.getInput(`GITHUB_SHA`)) {
|
||||||
|
return CloudRunnerOptions.getInput(`GITHUB_SHA`);
|
||||||
|
} else if (CloudRunnerOptions.getInput(`GitSHA`)) {
|
||||||
|
return CloudRunnerOptions.getInput(`GitSHA`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// Cloud Runner parameters
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
static get cloudRunnerBuilderPlatform() {
|
||||||
|
const input = CloudRunnerOptions.getInput('cloudRunnerBuilderPlatform');
|
||||||
|
if (input) {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
if (CloudRunnerOptions.cloudRunnerCluster !== 'local') {
|
||||||
|
return 'linux';
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
static get cloudRunnerBranch() {
|
||||||
|
return CloudRunnerOptions.getInput('cloudRunnerBranch') || 'main';
|
||||||
|
}
|
||||||
|
|
||||||
|
static get cloudRunnerCluster() {
|
||||||
|
if (Cli.isCliMode) {
|
||||||
|
return CloudRunnerOptions.getInput('cloudRunnerCluster') || 'aws';
|
||||||
|
}
|
||||||
|
|
||||||
|
return CloudRunnerOptions.getInput('cloudRunnerCluster') || 'local';
|
||||||
|
}
|
||||||
|
|
||||||
|
static get cloudRunnerCpu() {
|
||||||
|
return CloudRunnerOptions.getInput('cloudRunnerCpu');
|
||||||
|
}
|
||||||
|
|
||||||
|
static get cloudRunnerMemory() {
|
||||||
|
return CloudRunnerOptions.getInput('cloudRunnerMemory');
|
||||||
|
}
|
||||||
|
|
||||||
|
static get customJob() {
|
||||||
|
return CloudRunnerOptions.getInput('customJob') || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// Custom commands from files parameters
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
static get customStepFiles() {
|
||||||
|
return CloudRunnerOptions.getInput('customStepFiles')?.split(`,`) || [];
|
||||||
|
}
|
||||||
|
|
||||||
|
static get customHookFiles() {
|
||||||
|
return CloudRunnerOptions.getInput('customHookFiles')?.split(`,`) || [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// Custom commands from yaml parameters
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
static customJobHooks() {
|
||||||
|
return CloudRunnerOptions.getInput('customJobHooks') || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
static get postBuildSteps() {
|
||||||
|
return CloudRunnerOptions.getInput('postBuildSteps') || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
static get preBuildSteps() {
|
||||||
|
return CloudRunnerOptions.getInput('preBuildSteps') || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// Input override handling
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
static readInputFromOverrideList() {
|
||||||
|
return CloudRunnerOptions.getInput('readInputFromOverrideList') || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
static readInputOverrideCommand() {
|
||||||
|
const value = CloudRunnerOptions.getInput('readInputOverrideCommand');
|
||||||
|
|
||||||
|
if (value === 'gcp-secret-manager') {
|
||||||
|
return 'gcloud secrets versions access 1 --secret="{0}"';
|
||||||
|
} else if (value === 'aws-secret-manager') {
|
||||||
|
return 'aws secretsmanager get-secret-value --secret-id {0}';
|
||||||
|
}
|
||||||
|
|
||||||
|
return value || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// Aws
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
static get awsBaseStackName() {
|
||||||
|
return CloudRunnerOptions.getInput('awsBaseStackName') || 'game-ci';
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// K8s
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
static get kubeConfig() {
|
||||||
|
return CloudRunnerOptions.getInput('kubeConfig') || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
static get kubeVolume() {
|
||||||
|
return CloudRunnerOptions.getInput('kubeVolume') || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
static get kubeVolumeSize() {
|
||||||
|
return CloudRunnerOptions.getInput('kubeVolumeSize') || '5Gi';
|
||||||
|
}
|
||||||
|
|
||||||
|
static get kubeStorageClass(): string {
|
||||||
|
return CloudRunnerOptions.getInput('kubeStorageClass') || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// Caching
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
static get cacheKey(): string {
|
||||||
|
return CloudRunnerOptions.getInput('cacheKey') || CloudRunnerOptions.branch;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// Utility Parameters
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
static get cloudRunnerDebug(): boolean {
|
||||||
|
return CloudRunnerOptions.getInput(`cloudRunnerTests`) || CloudRunnerOptions.getInput(`cloudRunnerDebug`) || false;
|
||||||
|
}
|
||||||
|
static get cloudRunnerDebugTree(): boolean {
|
||||||
|
return CloudRunnerOptions.getInput(`cloudRunnerDebugTree`) || false;
|
||||||
|
}
|
||||||
|
static get cloudRunnerDebugEnv(): boolean {
|
||||||
|
return CloudRunnerOptions.getInput(`cloudRunnerDebugEnv`) || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
static get watchCloudRunnerToEnd(): boolean {
|
||||||
|
if (CloudRunnerOptions.asyncCloudRunner) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return CloudRunnerOptions.getInput(`watchToEnd`) || true;
|
||||||
|
}
|
||||||
|
|
||||||
|
static get asyncCloudRunner(): boolean {
|
||||||
|
return (CloudRunnerOptions.getInput('asyncCloudRunner') || `false`) === `true` || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static get useSharedLargePackages(): boolean {
|
||||||
|
return (CloudRunnerOptions.getInput(`useSharedLargePackages`) || 'false') === 'true';
|
||||||
|
}
|
||||||
|
|
||||||
|
public static get useSharedBuilder(): boolean {
|
||||||
|
return (CloudRunnerOptions.getInput(`useSharedBuilder`) || 'true') === 'true';
|
||||||
|
}
|
||||||
|
|
||||||
|
public static get useLz4Compression(): boolean {
|
||||||
|
return (CloudRunnerOptions.getInput(`useLz4Compression`) || 'false') === 'true';
|
||||||
|
}
|
||||||
|
|
||||||
|
public static get useCleanupCron(): boolean {
|
||||||
|
return (CloudRunnerOptions.getInput(`useCleanupCron`) || 'true') === 'true';
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// Retained Workspace
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
public static get retainWorkspaces(): boolean {
|
||||||
|
return CloudRunnerOptions.getInput(`retainWorkspaces`) || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
static get maxRetainedWorkspaces(): number {
|
||||||
|
return Number(CloudRunnerOptions.getInput(`maxRetainedWorkspaces`)) || 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ### ### ###
|
||||||
|
// Garbage Collection
|
||||||
|
// ### ### ###
|
||||||
|
|
||||||
|
static get constantGarbageCollection(): boolean {
|
||||||
|
return CloudRunnerOptions.getInput(`constantGarbageCollection`) || true;
|
||||||
|
}
|
||||||
|
|
||||||
|
static get garbageCollectionMaxAge(): number {
|
||||||
|
return Number(CloudRunnerOptions.getInput(`garbageCollectionMaxAge`)) || 24;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default CloudRunnerOptions;
|
||||||
@@ -1,142 +0,0 @@
|
|||||||
import { BuildParameters, ImageTag } from '..';
|
|
||||||
import CloudRunner from './cloud-runner';
|
|
||||||
import Input from '../input';
|
|
||||||
import { CloudRunnerStatics } from './cloud-runner-statics';
|
|
||||||
import { TaskParameterSerializer } from './services/task-parameter-serializer';
|
|
||||||
import UnityVersioning from '../unity-versioning';
|
|
||||||
import { Cli } from '../cli/cli';
|
|
||||||
import CloudRunnerLogger from './services/cloud-runner-logger';
|
|
||||||
import { v4 as uuidv4 } from 'uuid';
|
|
||||||
|
|
||||||
describe('Cloud Runner', () => {
|
|
||||||
it('responds', () => {});
|
|
||||||
});
|
|
||||||
describe('Cloud Runner', () => {
|
|
||||||
const testSecretName = 'testSecretName';
|
|
||||||
const testSecretValue = 'testSecretValue';
|
|
||||||
if (Input.cloudRunnerTests) {
|
|
||||||
it('All build parameters sent to cloud runner as env vars', async () => {
|
|
||||||
// Build parameters
|
|
||||||
Cli.options = {
|
|
||||||
versioning: 'None',
|
|
||||||
projectPath: 'test-project',
|
|
||||||
unityVersion: UnityVersioning.read('test-project'),
|
|
||||||
targetPlatform: 'StandaloneLinux64',
|
|
||||||
customJob: `
|
|
||||||
- name: 'step 1'
|
|
||||||
image: 'alpine'
|
|
||||||
commands: 'printenv'
|
|
||||||
secrets:
|
|
||||||
- name: '${testSecretName}'
|
|
||||||
value: '${testSecretValue}'
|
|
||||||
`,
|
|
||||||
};
|
|
||||||
Input.githubInputEnabled = false;
|
|
||||||
|
|
||||||
// Setup parameters
|
|
||||||
const buildParameter = await BuildParameters.create();
|
|
||||||
Input.githubInputEnabled = true;
|
|
||||||
const baseImage = new ImageTag(buildParameter);
|
|
||||||
|
|
||||||
// Run the job
|
|
||||||
const file = await CloudRunner.run(buildParameter, baseImage.toString());
|
|
||||||
|
|
||||||
// Assert results
|
|
||||||
expect(file).toContain(JSON.stringify(buildParameter));
|
|
||||||
expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`);
|
|
||||||
const environmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables();
|
|
||||||
const newLinePurgedFile = file
|
|
||||||
.replace(/\s+/g, '')
|
|
||||||
.replace(new RegExp(`\\[${CloudRunnerStatics.logPrefix}\\]`, 'g'), '');
|
|
||||||
for (const element of environmentVariables) {
|
|
||||||
if (element.value !== undefined && typeof element.value !== 'function') {
|
|
||||||
if (typeof element.value === `string`) {
|
|
||||||
element.value = element.value.replace(/\s+/g, '');
|
|
||||||
}
|
|
||||||
CloudRunnerLogger.log(`checking input/build param ${element.name} ${element.value}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (const element of environmentVariables) {
|
|
||||||
if (element.value !== undefined && typeof element.value !== 'function') {
|
|
||||||
expect(newLinePurgedFile).toContain(`${element.name}`);
|
|
||||||
expect(newLinePurgedFile).toContain(`${element.name}=${element.value}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
delete Cli.options;
|
|
||||||
}, 1000000);
|
|
||||||
it('Run one build it should not use cache, run subsequent build which should use cache', async () => {
|
|
||||||
Cli.options = {
|
|
||||||
versioning: 'None',
|
|
||||||
projectPath: 'test-project',
|
|
||||||
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
|
||||||
targetPlatform: 'StandaloneLinux64',
|
|
||||||
cacheKey: `test-case-${uuidv4()}`,
|
|
||||||
};
|
|
||||||
Input.githubInputEnabled = false;
|
|
||||||
const buildParameter = await BuildParameters.create();
|
|
||||||
const baseImage = new ImageTag(buildParameter);
|
|
||||||
const results = await CloudRunner.run(buildParameter, baseImage.toString());
|
|
||||||
const libraryString = 'Rebuilding Library because the asset database could not be found!';
|
|
||||||
const buildSucceededString = 'Build succeeded';
|
|
||||||
expect(results).toContain(libraryString);
|
|
||||||
expect(results).toContain(buildSucceededString);
|
|
||||||
CloudRunnerLogger.log(`run 1 succeeded`);
|
|
||||||
const buildParameter2 = await BuildParameters.create();
|
|
||||||
const baseImage2 = new ImageTag(buildParameter2);
|
|
||||||
const results2 = await CloudRunner.run(buildParameter2, baseImage2.toString());
|
|
||||||
CloudRunnerLogger.log(`run 2 succeeded`);
|
|
||||||
expect(results2).toContain(buildSucceededString);
|
|
||||||
expect(results2).toEqual(expect.not.stringContaining(libraryString));
|
|
||||||
Input.githubInputEnabled = true;
|
|
||||||
delete Cli.options;
|
|
||||||
}, 1000000);
|
|
||||||
}
|
|
||||||
it('Local cloud runner returns commands', async () => {
|
|
||||||
// Build parameters
|
|
||||||
Cli.options = {
|
|
||||||
versioning: 'None',
|
|
||||||
projectPath: 'test-project',
|
|
||||||
unityVersion: UnityVersioning.read('test-project'),
|
|
||||||
cloudRunnerCluster: 'local-system',
|
|
||||||
targetPlatform: 'StandaloneLinux64',
|
|
||||||
customJob: `
|
|
||||||
- name: 'step 1'
|
|
||||||
image: 'alpine'
|
|
||||||
commands: 'dir'
|
|
||||||
secrets:
|
|
||||||
- name: '${testSecretName}'
|
|
||||||
value: '${testSecretValue}'
|
|
||||||
`,
|
|
||||||
};
|
|
||||||
Input.githubInputEnabled = false;
|
|
||||||
|
|
||||||
// Setup parameters
|
|
||||||
const buildParameter = await BuildParameters.create();
|
|
||||||
const baseImage = new ImageTag(buildParameter);
|
|
||||||
|
|
||||||
// Run the job
|
|
||||||
await expect(CloudRunner.run(buildParameter, baseImage.toString())).resolves.not.toThrow();
|
|
||||||
Input.githubInputEnabled = true;
|
|
||||||
delete Cli.options;
|
|
||||||
}, 1000000);
|
|
||||||
it('Test cloud runner returns commands', async () => {
|
|
||||||
// Build parameters
|
|
||||||
Cli.options = {
|
|
||||||
versioning: 'None',
|
|
||||||
projectPath: 'test-project',
|
|
||||||
unityVersion: UnityVersioning.read('test-project'),
|
|
||||||
cloudRunnerCluster: 'test',
|
|
||||||
targetPlatform: 'StandaloneLinux64',
|
|
||||||
};
|
|
||||||
Input.githubInputEnabled = false;
|
|
||||||
|
|
||||||
// Setup parameters
|
|
||||||
const buildParameter = await BuildParameters.create();
|
|
||||||
const baseImage = new ImageTag(buildParameter);
|
|
||||||
|
|
||||||
// Run the job
|
|
||||||
await expect(CloudRunner.run(buildParameter, baseImage.toString())).resolves.not.toThrow();
|
|
||||||
Input.githubInputEnabled = true;
|
|
||||||
delete Cli.options;
|
|
||||||
}, 1000000);
|
|
||||||
});
|
|
||||||
@@ -12,31 +12,46 @@ import { ProviderInterface } from './providers/provider-interface';
|
|||||||
import CloudRunnerEnvironmentVariable from './services/cloud-runner-environment-variable';
|
import CloudRunnerEnvironmentVariable from './services/cloud-runner-environment-variable';
|
||||||
import TestCloudRunner from './providers/test';
|
import TestCloudRunner from './providers/test';
|
||||||
import LocalCloudRunner from './providers/local';
|
import LocalCloudRunner from './providers/local';
|
||||||
import LocalDockerCloudRunner from './providers/local-docker';
|
import LocalDockerCloudRunner from './providers/docker';
|
||||||
|
import GitHub from '../github';
|
||||||
|
import SharedWorkspaceLocking from './services/shared-workspace-locking';
|
||||||
|
|
||||||
class CloudRunner {
|
class CloudRunner {
|
||||||
public static Provider: ProviderInterface;
|
public static Provider: ProviderInterface;
|
||||||
static buildParameters: BuildParameters;
|
public static buildParameters: BuildParameters;
|
||||||
public static defaultSecrets: CloudRunnerSecret[];
|
private static defaultSecrets: CloudRunnerSecret[];
|
||||||
public static cloudRunnerEnvironmentVariables: CloudRunnerEnvironmentVariable[];
|
private static cloudRunnerEnvironmentVariables: CloudRunnerEnvironmentVariable[];
|
||||||
private static setup(buildParameters: BuildParameters) {
|
static lockedWorkspace: string | undefined;
|
||||||
|
public static readonly retainedWorkspacePrefix: string = `retained-workspace`;
|
||||||
|
public static githubCheckId;
|
||||||
|
public static setup(buildParameters: BuildParameters) {
|
||||||
CloudRunnerLogger.setup();
|
CloudRunnerLogger.setup();
|
||||||
|
CloudRunnerLogger.log(`Setting up cloud runner`);
|
||||||
CloudRunner.buildParameters = buildParameters;
|
CloudRunner.buildParameters = buildParameters;
|
||||||
CloudRunner.setupBuildPlatform();
|
CloudRunner.setupSelectedBuildPlatform();
|
||||||
CloudRunner.defaultSecrets = TaskParameterSerializer.readDefaultSecrets();
|
CloudRunner.defaultSecrets = TaskParameterSerializer.readDefaultSecrets();
|
||||||
CloudRunner.cloudRunnerEnvironmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables();
|
CloudRunner.cloudRunnerEnvironmentVariables =
|
||||||
if (!buildParameters.isCliMode) {
|
TaskParameterSerializer.createCloudRunnerEnvironmentVariables(buildParameters);
|
||||||
|
if (GitHub.githubInputEnabled) {
|
||||||
const buildParameterPropertyNames = Object.getOwnPropertyNames(buildParameters);
|
const buildParameterPropertyNames = Object.getOwnPropertyNames(buildParameters);
|
||||||
for (const element of CloudRunner.cloudRunnerEnvironmentVariables) {
|
for (const element of CloudRunner.cloudRunnerEnvironmentVariables) {
|
||||||
|
// CloudRunnerLogger.log(`Cloud Runner output ${Input.ToEnvVarFormat(element.name)} = ${element.value}`);
|
||||||
core.setOutput(Input.ToEnvVarFormat(element.name), element.value);
|
core.setOutput(Input.ToEnvVarFormat(element.name), element.value);
|
||||||
}
|
}
|
||||||
for (const element of buildParameterPropertyNames) {
|
for (const element of buildParameterPropertyNames) {
|
||||||
|
// CloudRunnerLogger.log(`Cloud Runner output ${Input.ToEnvVarFormat(element)} = ${buildParameters[element]}`);
|
||||||
core.setOutput(Input.ToEnvVarFormat(element), buildParameters[element]);
|
core.setOutput(Input.ToEnvVarFormat(element), buildParameters[element]);
|
||||||
}
|
}
|
||||||
|
core.setOutput(
|
||||||
|
Input.ToEnvVarFormat(`buildArtifact`),
|
||||||
|
`build-${CloudRunner.buildParameters.buildGuid}.tar${
|
||||||
|
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
|
||||||
|
}`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static setupBuildPlatform() {
|
private static setupSelectedBuildPlatform() {
|
||||||
CloudRunnerLogger.log(`Cloud Runner platform selected ${CloudRunner.buildParameters.cloudRunnerCluster}`);
|
CloudRunnerLogger.log(`Cloud Runner platform selected ${CloudRunner.buildParameters.cloudRunnerCluster}`);
|
||||||
switch (CloudRunner.buildParameters.cloudRunnerCluster) {
|
switch (CloudRunner.buildParameters.cloudRunnerCluster) {
|
||||||
case 'k8s':
|
case 'k8s':
|
||||||
@@ -48,31 +63,55 @@ class CloudRunner {
|
|||||||
case 'test':
|
case 'test':
|
||||||
CloudRunner.Provider = new TestCloudRunner();
|
CloudRunner.Provider = new TestCloudRunner();
|
||||||
break;
|
break;
|
||||||
case 'local-system':
|
|
||||||
CloudRunner.Provider = new LocalCloudRunner();
|
|
||||||
break;
|
|
||||||
case 'local-docker':
|
case 'local-docker':
|
||||||
CloudRunner.Provider = new LocalDockerCloudRunner();
|
CloudRunner.Provider = new LocalDockerCloudRunner();
|
||||||
break;
|
break;
|
||||||
|
case 'local-system':
|
||||||
|
CloudRunner.Provider = new LocalCloudRunner();
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static async run(buildParameters: BuildParameters, baseImage: string) {
|
static async run(buildParameters: BuildParameters, baseImage: string) {
|
||||||
CloudRunner.setup(buildParameters);
|
CloudRunner.setup(buildParameters);
|
||||||
try {
|
try {
|
||||||
|
CloudRunner.githubCheckId = await GitHub.createGitHubCheck(CloudRunner.buildParameters.buildGuid);
|
||||||
|
|
||||||
|
if (buildParameters.retainWorkspace) {
|
||||||
|
CloudRunner.lockedWorkspace = `${CloudRunner.retainedWorkspacePrefix}-${CloudRunner.buildParameters.buildGuid}`;
|
||||||
|
|
||||||
|
const result = await SharedWorkspaceLocking.GetOrCreateLockedWorkspace(
|
||||||
|
CloudRunner.lockedWorkspace,
|
||||||
|
CloudRunner.buildParameters.buildGuid,
|
||||||
|
CloudRunner.buildParameters,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
CloudRunnerLogger.logLine(`Using retained workspace ${CloudRunner.lockedWorkspace}`);
|
||||||
|
CloudRunner.cloudRunnerEnvironmentVariables = [
|
||||||
|
...CloudRunner.cloudRunnerEnvironmentVariables,
|
||||||
|
{ name: `LOCKED_WORKSPACE`, value: CloudRunner.lockedWorkspace },
|
||||||
|
];
|
||||||
|
} else {
|
||||||
|
CloudRunnerLogger.log(`Max retained workspaces reached ${buildParameters.maxRetainedWorkspaces}`);
|
||||||
|
buildParameters.retainWorkspace = false;
|
||||||
|
CloudRunner.lockedWorkspace = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('Setup shared cloud runner resources');
|
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('Setup shared cloud runner resources');
|
||||||
await CloudRunner.Provider.setup(
|
await CloudRunner.Provider.setupWorkflow(
|
||||||
CloudRunner.buildParameters.buildGuid,
|
CloudRunner.buildParameters.buildGuid,
|
||||||
CloudRunner.buildParameters,
|
CloudRunner.buildParameters,
|
||||||
CloudRunner.buildParameters.branch,
|
CloudRunner.buildParameters.branch,
|
||||||
CloudRunner.defaultSecrets,
|
CloudRunner.defaultSecrets,
|
||||||
);
|
);
|
||||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||||
|
await GitHub.updateGitHubCheck(CloudRunner.buildParameters.buildGuid, CloudRunner.buildParameters.buildGuid);
|
||||||
const output = await new WorkflowCompositionRoot().run(
|
const output = await new WorkflowCompositionRoot().run(
|
||||||
new CloudRunnerStepState(baseImage, CloudRunner.cloudRunnerEnvironmentVariables, CloudRunner.defaultSecrets),
|
new CloudRunnerStepState(baseImage, CloudRunner.cloudRunnerEnvironmentVariables, CloudRunner.defaultSecrets),
|
||||||
);
|
);
|
||||||
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('Cleanup shared cloud runner resources');
|
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('Cleanup shared cloud runner resources');
|
||||||
await CloudRunner.Provider.cleanup(
|
await CloudRunner.Provider.cleanupWorkflow(
|
||||||
CloudRunner.buildParameters.buildGuid,
|
CloudRunner.buildParameters.buildGuid,
|
||||||
CloudRunner.buildParameters,
|
CloudRunner.buildParameters,
|
||||||
CloudRunner.buildParameters.branch,
|
CloudRunner.buildParameters.branch,
|
||||||
@@ -80,11 +119,26 @@ class CloudRunner {
|
|||||||
);
|
);
|
||||||
CloudRunnerLogger.log(`Cleanup complete`);
|
CloudRunnerLogger.log(`Cleanup complete`);
|
||||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||||
|
await GitHub.updateGitHubCheck(CloudRunner.buildParameters.buildGuid, `success`, `success`, `completed`);
|
||||||
|
|
||||||
|
if (CloudRunner.buildParameters.retainWorkspace) {
|
||||||
|
await SharedWorkspaceLocking.ReleaseWorkspace(
|
||||||
|
CloudRunner.lockedWorkspace || ``,
|
||||||
|
CloudRunner.buildParameters.buildGuid,
|
||||||
|
CloudRunner.buildParameters,
|
||||||
|
);
|
||||||
|
CloudRunner.lockedWorkspace = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (buildParameters.constantGarbageCollection) {
|
||||||
|
CloudRunner.Provider.garbageCollect(``, true, buildParameters.garbageCollectionMaxAge, true, true);
|
||||||
|
}
|
||||||
|
|
||||||
return output;
|
return output;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
await GitHub.updateGitHubCheck(CloudRunner.buildParameters.buildGuid, error, `failure`, `completed`);
|
||||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||||
await CloudRunnerError.handleException(error);
|
await CloudRunnerError.handleException(error, CloudRunner.buildParameters, CloudRunner.defaultSecrets);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,20 @@
|
|||||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||||
import * as core from '@actions/core';
|
import * as core from '@actions/core';
|
||||||
import CloudRunner from '../cloud-runner';
|
import CloudRunner from '../cloud-runner';
|
||||||
|
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||||
|
import BuildParameters from '../../build-parameters';
|
||||||
|
|
||||||
export class CloudRunnerError {
|
export class CloudRunnerError {
|
||||||
public static async handleException(error: unknown) {
|
public static async handleException(error: unknown, buildParameters: BuildParameters, secrets: CloudRunnerSecret[]) {
|
||||||
CloudRunnerLogger.error(JSON.stringify(error, undefined, 4));
|
CloudRunnerLogger.error(JSON.stringify(error, undefined, 4));
|
||||||
core.setFailed('Cloud Runner failed');
|
core.setFailed('Cloud Runner failed');
|
||||||
await CloudRunner.Provider.cleanup(
|
if (CloudRunner.Provider !== undefined) {
|
||||||
CloudRunner.buildParameters.buildGuid,
|
await CloudRunner.Provider.cleanupWorkflow(
|
||||||
CloudRunner.buildParameters,
|
buildParameters.buildGuid,
|
||||||
CloudRunner.buildParameters.branch,
|
buildParameters,
|
||||||
CloudRunner.defaultSecrets,
|
buildParameters.branch,
|
||||||
);
|
secrets,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ export class AWSError {
|
|||||||
static async handleStackCreationFailure(error: any, CF: SDK.CloudFormation, taskDefStackName: string) {
|
static async handleStackCreationFailure(error: any, CF: SDK.CloudFormation, taskDefStackName: string) {
|
||||||
CloudRunnerLogger.log('aws error: ');
|
CloudRunnerLogger.log('aws error: ');
|
||||||
core.error(JSON.stringify(error, undefined, 4));
|
core.error(JSON.stringify(error, undefined, 4));
|
||||||
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
if (CloudRunner.buildParameters.cloudRunnerDebug) {
|
||||||
CloudRunnerLogger.log('Getting events and resources for task stack');
|
CloudRunnerLogger.log('Getting events and resources for task stack');
|
||||||
const events = (await CF.describeStackEvents({ StackName: taskDefStackName }).promise()).StackEvents;
|
const events = (await CF.describeStackEvents({ StackName: taskDefStackName }).promise()).StackEvents;
|
||||||
CloudRunnerLogger.log(JSON.stringify(events, undefined, 4));
|
CloudRunnerLogger.log(JSON.stringify(events, undefined, 4));
|
||||||
|
|||||||
@@ -5,6 +5,9 @@ import { AWSCloudFormationTemplates } from './aws-cloud-formation-templates';
|
|||||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||||
import { AWSError } from './aws-error';
|
import { AWSError } from './aws-error';
|
||||||
import CloudRunner from '../../cloud-runner';
|
import CloudRunner from '../../cloud-runner';
|
||||||
|
import { CleanupCronFormation } from './cloud-formations/cleanup-cron-formation';
|
||||||
|
import CloudRunnerOptions from '../../cloud-runner-options';
|
||||||
|
import { TaskDefinitionFormation } from './cloud-formations/task-definition-formation';
|
||||||
|
|
||||||
export class AWSJobStack {
|
export class AWSJobStack {
|
||||||
private baseStackName: string;
|
private baseStackName: string;
|
||||||
@@ -38,6 +41,13 @@ export class AWSJobStack {
|
|||||||
`ContainerMemory:
|
`ContainerMemory:
|
||||||
Default: ${Number.parseInt(memory)}`,
|
Default: ${Number.parseInt(memory)}`,
|
||||||
);
|
);
|
||||||
|
if (CloudRunnerOptions.watchCloudRunnerToEnd) {
|
||||||
|
taskDefCloudFormation = AWSCloudFormationTemplates.insertAtTemplate(
|
||||||
|
taskDefCloudFormation,
|
||||||
|
'# template resources logstream',
|
||||||
|
TaskDefinitionFormation.streamLogs,
|
||||||
|
);
|
||||||
|
}
|
||||||
for (const secret of secrets) {
|
for (const secret of secrets) {
|
||||||
secret.ParameterKey = `${buildGuid.replace(/[^\dA-Za-z]/g, '')}${secret.ParameterKey.replace(
|
secret.ParameterKey = `${buildGuid.replace(/[^\dA-Za-z]/g, '')}${secret.ParameterKey.replace(
|
||||||
/[^\dA-Za-z]/g,
|
/[^\dA-Za-z]/g,
|
||||||
@@ -57,7 +67,7 @@ export class AWSJobStack {
|
|||||||
);
|
);
|
||||||
taskDefCloudFormation = AWSCloudFormationTemplates.insertAtTemplate(
|
taskDefCloudFormation = AWSCloudFormationTemplates.insertAtTemplate(
|
||||||
taskDefCloudFormation,
|
taskDefCloudFormation,
|
||||||
'p2 - secret',
|
'# template resources secrets',
|
||||||
AWSCloudFormationTemplates.getSecretTemplate(`${secret.ParameterKey}`),
|
AWSCloudFormationTemplates.getSecretTemplate(`${secret.ParameterKey}`),
|
||||||
);
|
);
|
||||||
taskDefCloudFormation = AWSCloudFormationTemplates.insertAtTemplate(
|
taskDefCloudFormation = AWSCloudFormationTemplates.insertAtTemplate(
|
||||||
@@ -69,6 +79,7 @@ export class AWSJobStack {
|
|||||||
const secretsMappedToCloudFormationParameters = secrets.map((x) => {
|
const secretsMappedToCloudFormationParameters = secrets.map((x) => {
|
||||||
return { ParameterKey: x.ParameterKey.replace(/[^\dA-Za-z]/g, ''), ParameterValue: x.ParameterValue };
|
return { ParameterKey: x.ParameterKey.replace(/[^\dA-Za-z]/g, ''), ParameterValue: x.ParameterValue };
|
||||||
});
|
});
|
||||||
|
const logGroupName = `${this.baseStackName}/${taskDefStackName}`;
|
||||||
const parameters = [
|
const parameters = [
|
||||||
{
|
{
|
||||||
ParameterKey: 'EnvironmentName',
|
ParameterKey: 'EnvironmentName',
|
||||||
@@ -82,6 +93,10 @@ export class AWSJobStack {
|
|||||||
ParameterKey: 'ServiceName',
|
ParameterKey: 'ServiceName',
|
||||||
ParameterValue: taskDefStackName,
|
ParameterValue: taskDefStackName,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
ParameterKey: 'LogGroupName',
|
||||||
|
ParameterValue: logGroupName,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
ParameterKey: 'Command',
|
ParameterKey: 'Command',
|
||||||
ParameterValue: 'echo "this template should be overwritten when running a task"',
|
ParameterValue: 'echo "this template should be overwritten when running a task"',
|
||||||
@@ -115,6 +130,7 @@ export class AWSJobStack {
|
|||||||
if (element.StackName === taskDefStackName && element.StackStatus !== 'DELETE_COMPLETE') {
|
if (element.StackName === taskDefStackName && element.StackStatus !== 'DELETE_COMPLETE') {
|
||||||
previousStackExists = true;
|
previousStackExists = true;
|
||||||
CloudRunnerLogger.log(`Previous stack still exists: ${JSON.stringify(element)}`);
|
CloudRunnerLogger.log(`Previous stack still exists: ${JSON.stringify(element)}`);
|
||||||
|
await new Promise((promise) => setTimeout(promise, 5000));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -126,14 +142,53 @@ export class AWSJobStack {
|
|||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
CloudRunnerLogger.log(`Creating job aws formation ${taskDefStackName}`);
|
||||||
await CF.createStack(createStackInput).promise();
|
await CF.createStack(createStackInput).promise();
|
||||||
CloudRunnerLogger.log('Creating cloud runner job');
|
|
||||||
await CF.waitFor('stackCreateComplete', { StackName: taskDefStackName }).promise();
|
await CF.waitFor('stackCreateComplete', { StackName: taskDefStackName }).promise();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await AWSError.handleStackCreationFailure(error, CF, taskDefStackName);
|
await AWSError.handleStackCreationFailure(error, CF, taskDefStackName);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const createCleanupStackInput: SDK.CloudFormation.CreateStackInput = {
|
||||||
|
StackName: `${taskDefStackName}-cleanup`,
|
||||||
|
TemplateBody: CleanupCronFormation.formation,
|
||||||
|
Capabilities: ['CAPABILITY_IAM'],
|
||||||
|
Parameters: [
|
||||||
|
{
|
||||||
|
ParameterKey: 'StackName',
|
||||||
|
ParameterValue: taskDefStackName,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ParameterKey: 'DeleteStackName',
|
||||||
|
ParameterValue: `${taskDefStackName}-cleanup`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ParameterKey: 'TTL',
|
||||||
|
ParameterValue: `1080`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ParameterKey: 'BUILDGUID',
|
||||||
|
ParameterValue: CloudRunner.buildParameters.buildGuid,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ParameterKey: 'EnvironmentName',
|
||||||
|
ParameterValue: this.baseStackName,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
if (CloudRunnerOptions.useCleanupCron) {
|
||||||
|
try {
|
||||||
|
CloudRunnerLogger.log(`Creating job cleanup formation`);
|
||||||
|
CF.createStack(createCleanupStackInput).promise();
|
||||||
|
|
||||||
|
// await CF.waitFor('stackCreateComplete', { StackName: createCleanupStackInput.StackName }).promise();
|
||||||
|
} catch (error) {
|
||||||
|
await AWSError.handleStackCreationFailure(error, CF, taskDefStackName);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const taskDefResources = (
|
const taskDefResources = (
|
||||||
await CF.describeStackResources({
|
await CF.describeStackResources({
|
||||||
StackName: taskDefStackName,
|
StackName: taskDefStackName,
|
||||||
|
|||||||
@@ -6,18 +6,20 @@ import * as zlib from 'zlib';
|
|||||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||||
import { Input } from '../../..';
|
import { Input } from '../../..';
|
||||||
import CloudRunner from '../../cloud-runner';
|
import CloudRunner from '../../cloud-runner';
|
||||||
import { CloudRunnerBuildCommandProcessor } from '../../services/cloud-runner-build-command-process';
|
import { CloudRunnerCustomHooks } from '../../services/cloud-runner-custom-hooks';
|
||||||
import { FollowLogStreamService } from '../../services/follow-log-stream-service';
|
import { FollowLogStreamService } from '../../services/follow-log-stream-service';
|
||||||
|
import CloudRunnerOptions from '../../cloud-runner-options';
|
||||||
|
import GitHub from '../../../github';
|
||||||
|
|
||||||
class AWSTaskRunner {
|
class AWSTaskRunner {
|
||||||
|
public static ECS: AWS.ECS;
|
||||||
|
public static Kinesis: AWS.Kinesis;
|
||||||
|
private static readonly encodedUnderscore = `$252F`;
|
||||||
static async runTask(
|
static async runTask(
|
||||||
taskDef: CloudRunnerAWSTaskDef,
|
taskDef: CloudRunnerAWSTaskDef,
|
||||||
ECS: AWS.ECS,
|
|
||||||
CF: AWS.CloudFormation,
|
|
||||||
environment: CloudRunnerEnvironmentVariable[],
|
environment: CloudRunnerEnvironmentVariable[],
|
||||||
buildGuid: string,
|
|
||||||
commands: string,
|
commands: string,
|
||||||
) {
|
): Promise<{ output: string; shouldCleanup: boolean }> {
|
||||||
const cluster = taskDef.baseResources?.find((x) => x.LogicalResourceId === 'ECSCluster')?.PhysicalResourceId || '';
|
const cluster = taskDef.baseResources?.find((x) => x.LogicalResourceId === 'ECSCluster')?.PhysicalResourceId || '';
|
||||||
const taskDefinition =
|
const taskDefinition =
|
||||||
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'TaskDefinition')?.PhysicalResourceId || '';
|
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'TaskDefinition')?.PhysicalResourceId || '';
|
||||||
@@ -30,7 +32,7 @@ class AWSTaskRunner {
|
|||||||
const streamName =
|
const streamName =
|
||||||
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'KinesisStream')?.PhysicalResourceId || '';
|
taskDef.taskDefResources?.find((x) => x.LogicalResourceId === 'KinesisStream')?.PhysicalResourceId || '';
|
||||||
|
|
||||||
const task = await ECS.runTask({
|
const task = await AWSTaskRunner.ECS.runTask({
|
||||||
cluster,
|
cluster,
|
||||||
taskDefinition,
|
taskDefinition,
|
||||||
platformVersion: '1.4.0',
|
platformVersion: '1.4.0',
|
||||||
@@ -39,7 +41,7 @@ class AWSTaskRunner {
|
|||||||
{
|
{
|
||||||
name: taskDef.taskDefStackName,
|
name: taskDef.taskDefStackName,
|
||||||
environment,
|
environment,
|
||||||
command: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(commands, CloudRunner.buildParameters)],
|
command: ['-c', CloudRunnerCustomHooks.ApplyHooksToCommands(commands, CloudRunner.buildParameters)],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@@ -54,20 +56,27 @@ class AWSTaskRunner {
|
|||||||
}).promise();
|
}).promise();
|
||||||
const taskArn = task.tasks?.[0].taskArn || '';
|
const taskArn = task.tasks?.[0].taskArn || '';
|
||||||
CloudRunnerLogger.log('Cloud runner job is starting');
|
CloudRunnerLogger.log('Cloud runner job is starting');
|
||||||
await AWSTaskRunner.waitUntilTaskRunning(ECS, taskArn, cluster);
|
await AWSTaskRunner.waitUntilTaskRunning(taskArn, cluster);
|
||||||
CloudRunnerLogger.log(
|
CloudRunnerLogger.log(
|
||||||
`Cloud runner job status is running ${(await AWSTaskRunner.describeTasks(ECS, cluster, taskArn))?.lastStatus}`,
|
`Cloud runner job status is running ${(await AWSTaskRunner.describeTasks(cluster, taskArn))?.lastStatus} Watch:${
|
||||||
|
CloudRunnerOptions.watchCloudRunnerToEnd
|
||||||
|
} Async:${CloudRunnerOptions.asyncCloudRunner}`,
|
||||||
);
|
);
|
||||||
const { output, shouldCleanup } = await this.streamLogsUntilTaskStops(
|
if (!CloudRunnerOptions.watchCloudRunnerToEnd) {
|
||||||
ECS,
|
const shouldCleanup: boolean = false;
|
||||||
CF,
|
const output: string = '';
|
||||||
taskDef,
|
CloudRunnerLogger.log(`Watch Cloud Runner To End: false`);
|
||||||
cluster,
|
|
||||||
taskArn,
|
return { output, shouldCleanup };
|
||||||
streamName,
|
}
|
||||||
);
|
|
||||||
const taskData = await AWSTaskRunner.describeTasks(ECS, cluster, taskArn);
|
CloudRunnerLogger.log(`Streaming...`);
|
||||||
const exitCode = taskData.containers?.[0].exitCode;
|
const { output, shouldCleanup } = await this.streamLogsUntilTaskStops(cluster, taskArn, streamName);
|
||||||
|
await new Promise((resolve) => resolve(5000));
|
||||||
|
const taskData = await AWSTaskRunner.describeTasks(cluster, taskArn);
|
||||||
|
const containerState = taskData.containers?.[0];
|
||||||
|
const exitCode = containerState?.exitCode || undefined;
|
||||||
|
CloudRunnerLogger.log(`Container State: ${JSON.stringify(containerState, undefined, 4)}`);
|
||||||
const wasSuccessful = exitCode === 0 || (exitCode === undefined && taskData.lastStatus === 'RUNNING');
|
const wasSuccessful = exitCode === 0 || (exitCode === undefined && taskData.lastStatus === 'RUNNING');
|
||||||
if (wasSuccessful) {
|
if (wasSuccessful) {
|
||||||
CloudRunnerLogger.log(`Cloud runner job has finished successfully`);
|
CloudRunnerLogger.log(`Cloud runner job has finished successfully`);
|
||||||
@@ -85,15 +94,15 @@ class AWSTaskRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async waitUntilTaskRunning(ECS: AWS.ECS, taskArn: string, cluster: string) {
|
private static async waitUntilTaskRunning(taskArn: string, cluster: string) {
|
||||||
try {
|
try {
|
||||||
await ECS.waitFor('tasksRunning', { tasks: [taskArn], cluster }).promise();
|
await AWSTaskRunner.ECS.waitFor('tasksRunning', { tasks: [taskArn], cluster }).promise();
|
||||||
} catch (error_) {
|
} catch (error_) {
|
||||||
const error = error_ as Error;
|
const error = error_ as Error;
|
||||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||||
CloudRunnerLogger.log(
|
CloudRunnerLogger.log(
|
||||||
`Cloud runner job has ended ${
|
`Cloud runner job has ended ${
|
||||||
(await AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].lastStatus
|
(await AWSTaskRunner.describeTasks(cluster, taskArn)).containers?.[0].lastStatus
|
||||||
}`,
|
}`,
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -102,8 +111,8 @@ class AWSTaskRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static async describeTasks(ECS: AWS.ECS, clusterName: string, taskArn: string) {
|
static async describeTasks(clusterName: string, taskArn: string) {
|
||||||
const tasks = await ECS.describeTasks({
|
const tasks = await AWSTaskRunner.ECS.describeTasks({
|
||||||
cluster: clusterName,
|
cluster: clusterName,
|
||||||
tasks: [taskArn],
|
tasks: [taskArn],
|
||||||
}).promise();
|
}).promise();
|
||||||
@@ -114,33 +123,26 @@ class AWSTaskRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static async streamLogsUntilTaskStops(
|
static async streamLogsUntilTaskStops(clusterName: string, taskArn: string, kinesisStreamName: string) {
|
||||||
ECS: AWS.ECS,
|
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||||
CF: AWS.CloudFormation,
|
CloudRunnerLogger.log(`Streaming...`);
|
||||||
taskDef: CloudRunnerAWSTaskDef,
|
const stream = await AWSTaskRunner.getLogStream(kinesisStreamName);
|
||||||
clusterName: string,
|
let iterator = await AWSTaskRunner.getLogIterator(stream);
|
||||||
taskArn: string,
|
|
||||||
kinesisStreamName: string,
|
|
||||||
) {
|
|
||||||
const kinesis = new AWS.Kinesis();
|
|
||||||
const stream = await AWSTaskRunner.getLogStream(kinesis, kinesisStreamName);
|
|
||||||
let iterator = await AWSTaskRunner.getLogIterator(kinesis, stream);
|
|
||||||
|
|
||||||
const logBaseUrl = `https://${Input.region}.console.aws.amazon.com/cloudwatch/home?region=${Input.region}#logsV2:log-groups/log-group/${CloudRunner.buildParameters.awsBaseStackName}-${CloudRunner.buildParameters.buildGuid}`;
|
const logBaseUrl = `https://${Input.region}.console.aws.amazon.com/cloudwatch/home?region=${Input.region}#logsV2:log-groups/log-group/${CloudRunner.buildParameters.awsBaseStackName}${AWSTaskRunner.encodedUnderscore}${CloudRunner.buildParameters.awsBaseStackName}-${CloudRunner.buildParameters.buildGuid}`;
|
||||||
CloudRunnerLogger.log(`You view the log stream on AWS Cloud Watch: ${logBaseUrl}`);
|
CloudRunnerLogger.log(`You view the log stream on AWS Cloud Watch: ${logBaseUrl}`);
|
||||||
|
await GitHub.updateGitHubCheck(`You view the log stream on AWS Cloud Watch: ${logBaseUrl}`, ``);
|
||||||
let shouldReadLogs = true;
|
let shouldReadLogs = true;
|
||||||
let shouldCleanup = true;
|
let shouldCleanup = true;
|
||||||
let timestamp: number = 0;
|
let timestamp: number = 0;
|
||||||
let output = '';
|
let output = '';
|
||||||
while (shouldReadLogs) {
|
while (shouldReadLogs) {
|
||||||
await new Promise((resolve) => setTimeout(resolve, 1500));
|
await new Promise((resolve) => setTimeout(resolve, 1500));
|
||||||
const taskData = await AWSTaskRunner.describeTasks(ECS, clusterName, taskArn);
|
const taskData = await AWSTaskRunner.describeTasks(clusterName, taskArn);
|
||||||
({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs));
|
({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs));
|
||||||
({ iterator, shouldReadLogs, output, shouldCleanup } = await AWSTaskRunner.handleLogStreamIteration(
|
({ iterator, shouldReadLogs, output, shouldCleanup } = await AWSTaskRunner.handleLogStreamIteration(
|
||||||
kinesis,
|
|
||||||
iterator,
|
iterator,
|
||||||
shouldReadLogs,
|
shouldReadLogs,
|
||||||
taskDef,
|
|
||||||
output,
|
output,
|
||||||
shouldCleanup,
|
shouldCleanup,
|
||||||
));
|
));
|
||||||
@@ -150,23 +152,18 @@ class AWSTaskRunner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static async handleLogStreamIteration(
|
private static async handleLogStreamIteration(
|
||||||
kinesis: AWS.Kinesis,
|
|
||||||
iterator: string,
|
iterator: string,
|
||||||
shouldReadLogs: boolean,
|
shouldReadLogs: boolean,
|
||||||
taskDef: CloudRunnerAWSTaskDef,
|
|
||||||
output: string,
|
output: string,
|
||||||
shouldCleanup: boolean,
|
shouldCleanup: boolean,
|
||||||
) {
|
) {
|
||||||
const records = await kinesis
|
const records = await AWSTaskRunner.Kinesis.getRecords({
|
||||||
.getRecords({
|
ShardIterator: iterator,
|
||||||
ShardIterator: iterator,
|
}).promise();
|
||||||
})
|
|
||||||
.promise();
|
|
||||||
iterator = records.NextShardIterator || '';
|
iterator = records.NextShardIterator || '';
|
||||||
({ shouldReadLogs, output, shouldCleanup } = AWSTaskRunner.logRecords(
|
({ shouldReadLogs, output, shouldCleanup } = AWSTaskRunner.logRecords(
|
||||||
records,
|
records,
|
||||||
iterator,
|
iterator,
|
||||||
taskDef,
|
|
||||||
shouldReadLogs,
|
shouldReadLogs,
|
||||||
output,
|
output,
|
||||||
shouldCleanup,
|
shouldCleanup,
|
||||||
@@ -197,7 +194,6 @@ class AWSTaskRunner {
|
|||||||
private static logRecords(
|
private static logRecords(
|
||||||
records,
|
records,
|
||||||
iterator: string,
|
iterator: string,
|
||||||
taskDef: CloudRunnerAWSTaskDef,
|
|
||||||
shouldReadLogs: boolean,
|
shouldReadLogs: boolean,
|
||||||
output: string,
|
output: string,
|
||||||
shouldCleanup: boolean,
|
shouldCleanup: boolean,
|
||||||
@@ -224,24 +220,20 @@ class AWSTaskRunner {
|
|||||||
return { shouldReadLogs, output, shouldCleanup };
|
return { shouldReadLogs, output, shouldCleanup };
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async getLogStream(kinesis: AWS.Kinesis, kinesisStreamName: string) {
|
private static async getLogStream(kinesisStreamName: string) {
|
||||||
return await kinesis
|
return await AWSTaskRunner.Kinesis.describeStream({
|
||||||
.describeStream({
|
StreamName: kinesisStreamName,
|
||||||
StreamName: kinesisStreamName,
|
}).promise();
|
||||||
})
|
|
||||||
.promise();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async getLogIterator(kinesis: AWS.Kinesis, stream) {
|
private static async getLogIterator(stream) {
|
||||||
return (
|
return (
|
||||||
(
|
(
|
||||||
await kinesis
|
await AWSTaskRunner.Kinesis.getShardIterator({
|
||||||
.getShardIterator({
|
ShardIteratorType: 'TRIM_HORIZON',
|
||||||
ShardIteratorType: 'TRIM_HORIZON',
|
StreamName: stream.StreamDescription.StreamName,
|
||||||
StreamName: stream.StreamDescription.StreamName,
|
ShardId: stream.StreamDescription.Shards[0].ShardId,
|
||||||
ShardId: stream.StreamDescription.Shards[0].ShardId,
|
}).promise()
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
).ShardIterator || ''
|
).ShardIterator || ''
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -47,6 +47,11 @@ Resources:
|
|||||||
EnableDnsHostnames: true
|
EnableDnsHostnames: true
|
||||||
CidrBlock: !FindInMap ['SubnetConfig', 'VPC', 'CIDR']
|
CidrBlock: !FindInMap ['SubnetConfig', 'VPC', 'CIDR']
|
||||||
|
|
||||||
|
MainBucket:
|
||||||
|
Type: "AWS::S3::Bucket"
|
||||||
|
Properties:
|
||||||
|
BucketName: !Ref EnvironmentName
|
||||||
|
|
||||||
EFSServerSecurityGroup:
|
EFSServerSecurityGroup:
|
||||||
Type: AWS::EC2::SecurityGroup
|
Type: AWS::EC2::SecurityGroup
|
||||||
Properties:
|
Properties:
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
AWSTemplateFormatVersion: '2010-09-09'
|
export class CleanupCronFormation {
|
||||||
|
public static readonly formation: string = `AWSTemplateFormatVersion: '2010-09-09'
|
||||||
Description: Schedule automatic deletion of CloudFormation stacks
|
Description: Schedule automatic deletion of CloudFormation stacks
|
||||||
Metadata:
|
Metadata:
|
||||||
AWS::CloudFormation::Interface:
|
AWS::CloudFormation::Interface:
|
||||||
@@ -64,10 +65,10 @@ Resources:
|
|||||||
stackName: !Ref 'StackName'
|
stackName: !Ref 'StackName'
|
||||||
deleteStackName: !Ref 'DeleteStackName'
|
deleteStackName: !Ref 'DeleteStackName'
|
||||||
Handler: "index.handler"
|
Handler: "index.handler"
|
||||||
Runtime: "python3.6"
|
Runtime: "python3.9"
|
||||||
Timeout: "5"
|
Timeout: "5"
|
||||||
Role:
|
Role:
|
||||||
'Fn::ImportValue': !Sub '${EnvironmentName}:DeleteCFNLambdaExecutionRole'
|
'Fn::ImportValue': !Sub '\${EnvironmentName}:DeleteCFNLambdaExecutionRole'
|
||||||
DeleteStackEventRule:
|
DeleteStackEventRule:
|
||||||
DependsOn:
|
DependsOn:
|
||||||
- DeleteCFNLambda
|
- DeleteCFNLambda
|
||||||
@@ -130,10 +131,10 @@ Resources:
|
|||||||
status = cfnresponse.FAILED
|
status = cfnresponse.FAILED
|
||||||
cfnresponse.send(event, context, status, {}, None)
|
cfnresponse.send(event, context, status, {}, None)
|
||||||
Handler: "index.handler"
|
Handler: "index.handler"
|
||||||
Runtime: "python3.6"
|
Runtime: "python3.9"
|
||||||
Timeout: "5"
|
Timeout: "5"
|
||||||
Role:
|
Role:
|
||||||
'Fn::ImportValue': !Sub '${EnvironmentName}:DeleteCFNLambdaExecutionRole'
|
'Fn::ImportValue': !Sub '\${EnvironmentName}:DeleteCFNLambdaExecutionRole'
|
||||||
GenerateCronExpression:
|
GenerateCronExpression:
|
||||||
Type: "Custom::GenerateCronExpression"
|
Type: "Custom::GenerateCronExpression"
|
||||||
Version: "1.0"
|
Version: "1.0"
|
||||||
@@ -141,3 +142,5 @@ Resources:
|
|||||||
Name: !Join [ "", [ 'GenerateCronExpression', !Ref BUILDGUID ] ]
|
Name: !Join [ "", [ 'GenerateCronExpression', !Ref BUILDGUID ] ]
|
||||||
ServiceToken: !GetAtt GenerateCronExpLambda.Arn
|
ServiceToken: !GetAtt GenerateCronExpLambda.Arn
|
||||||
ttl: !Ref 'TTL'
|
ttl: !Ref 'TTL'
|
||||||
|
`;
|
||||||
|
}
|
||||||
@@ -11,6 +11,10 @@ Parameters:
|
|||||||
Type: String
|
Type: String
|
||||||
Default: example
|
Default: example
|
||||||
Description: A name for the service
|
Description: A name for the service
|
||||||
|
LogGroupName:
|
||||||
|
Type: String
|
||||||
|
Default: example
|
||||||
|
Description: Name to use for the log group created for this task
|
||||||
ImageUrl:
|
ImageUrl:
|
||||||
Type: String
|
Type: String
|
||||||
Default: nginx
|
Default: nginx
|
||||||
@@ -68,36 +72,14 @@ Resources:
|
|||||||
LogGroup:
|
LogGroup:
|
||||||
Type: 'AWS::Logs::LogGroup'
|
Type: 'AWS::Logs::LogGroup'
|
||||||
Properties:
|
Properties:
|
||||||
LogGroupName: !Ref ServiceName
|
LogGroupName: !Ref LogGroupName
|
||||||
Metadata:
|
Metadata:
|
||||||
'AWS::CloudFormation::Designer':
|
'AWS::CloudFormation::Designer':
|
||||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||||
SubscriptionFilter:
|
# template resources secrets
|
||||||
Type: 'AWS::Logs::SubscriptionFilter'
|
|
||||||
Properties:
|
# template resources logstream
|
||||||
FilterPattern: ''
|
|
||||||
RoleArn:
|
|
||||||
'Fn::ImportValue': !Sub '${'${EnvironmentName}'}:CloudWatchIAMRole'
|
|
||||||
LogGroupName: !Ref ServiceName
|
|
||||||
DestinationArn:
|
|
||||||
'Fn::GetAtt':
|
|
||||||
- KinesisStream
|
|
||||||
- Arn
|
|
||||||
Metadata:
|
|
||||||
'AWS::CloudFormation::Designer':
|
|
||||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
|
||||||
DependsOn:
|
|
||||||
- LogGroup
|
|
||||||
- KinesisStream
|
|
||||||
KinesisStream:
|
|
||||||
Type: 'AWS::Kinesis::Stream'
|
|
||||||
Properties:
|
|
||||||
Name: !Ref ServiceName
|
|
||||||
ShardCount: 1
|
|
||||||
Metadata:
|
|
||||||
'AWS::CloudFormation::Designer':
|
|
||||||
id: c6f18447-b879-4696-8873-f981b2cedd2b
|
|
||||||
# template secrets p2 - secret
|
|
||||||
TaskDefinition:
|
TaskDefinition:
|
||||||
Type: 'AWS::ECS::TaskDefinition'
|
Type: 'AWS::ECS::TaskDefinition'
|
||||||
Properties:
|
Properties:
|
||||||
@@ -147,10 +129,37 @@ Resources:
|
|||||||
LogConfiguration:
|
LogConfiguration:
|
||||||
LogDriver: awslogs
|
LogDriver: awslogs
|
||||||
Options:
|
Options:
|
||||||
awslogs-group: !Ref ServiceName
|
awslogs-group: !Ref LogGroupName
|
||||||
awslogs-region: !Ref 'AWS::Region'
|
awslogs-region: !Ref 'AWS::Region'
|
||||||
awslogs-stream-prefix: !Ref ServiceName
|
awslogs-stream-prefix: !Ref ServiceName
|
||||||
DependsOn:
|
DependsOn:
|
||||||
- LogGroup
|
- LogGroup
|
||||||
|
`;
|
||||||
|
public static streamLogs = `
|
||||||
|
SubscriptionFilter:
|
||||||
|
Type: 'AWS::Logs::SubscriptionFilter'
|
||||||
|
Properties:
|
||||||
|
FilterPattern: ''
|
||||||
|
RoleArn:
|
||||||
|
'Fn::ImportValue': !Sub '${'${EnvironmentName}'}:CloudWatchIAMRole'
|
||||||
|
LogGroupName: !Ref LogGroupName
|
||||||
|
DestinationArn:
|
||||||
|
'Fn::GetAtt':
|
||||||
|
- KinesisStream
|
||||||
|
- Arn
|
||||||
|
Metadata:
|
||||||
|
'AWS::CloudFormation::Designer':
|
||||||
|
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||||
|
DependsOn:
|
||||||
|
- LogGroup
|
||||||
|
- KinesisStream
|
||||||
|
KinesisStream:
|
||||||
|
Type: 'AWS::Kinesis::Stream'
|
||||||
|
Properties:
|
||||||
|
Name: !Ref ServiceName
|
||||||
|
ShardCount: 1
|
||||||
|
Metadata:
|
||||||
|
'AWS::CloudFormation::Designer':
|
||||||
|
id: c6f18447-b879-4696-8873-f981b2cedd2b
|
||||||
`;
|
`;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,170 +0,0 @@
|
|||||||
import AWS from 'aws-sdk';
|
|
||||||
import { CliFunction } from '../../../../cli/cli-functions-repository';
|
|
||||||
import Input from '../../../../input';
|
|
||||||
import CloudRunnerLogger from '../../../services/cloud-runner-logger';
|
|
||||||
import { BaseStackFormation } from '../cloud-formations/base-stack-formation';
|
|
||||||
|
|
||||||
export class AwsCliCommands {
|
|
||||||
@CliFunction(`aws-list-all`, `List all resources`)
|
|
||||||
static async awsListAll() {
|
|
||||||
await AwsCliCommands.awsListStacks(undefined, true);
|
|
||||||
await AwsCliCommands.awsListTasks();
|
|
||||||
await AwsCliCommands.awsListLogGroups(undefined, true);
|
|
||||||
}
|
|
||||||
@CliFunction(`aws-garbage-collect`, `garbage collect aws resources not in use !WIP!`)
|
|
||||||
static async garbageCollectAws() {
|
|
||||||
await AwsCliCommands.cleanup(false);
|
|
||||||
}
|
|
||||||
@CliFunction(`aws-garbage-collect-all`, `garbage collect aws resources regardless of whether they are in use`)
|
|
||||||
static async garbageCollectAwsAll() {
|
|
||||||
await AwsCliCommands.cleanup(true);
|
|
||||||
}
|
|
||||||
@CliFunction(
|
|
||||||
`aws-garbage-collect-all-1d-older`,
|
|
||||||
`garbage collect aws resources created more than 1d ago (ignore if they are in use)`,
|
|
||||||
)
|
|
||||||
static async garbageCollectAwsAllOlderThanOneDay() {
|
|
||||||
await AwsCliCommands.cleanup(true, true);
|
|
||||||
}
|
|
||||||
static isOlderThan1day(date: any) {
|
|
||||||
const ageDate = new Date(date.getTime() - Date.now());
|
|
||||||
|
|
||||||
return ageDate.getDay() > 0;
|
|
||||||
}
|
|
||||||
@CliFunction(`aws-list-stacks`, `List stacks`)
|
|
||||||
static async awsListStacks(perResultCallback: any = false, verbose: boolean = false) {
|
|
||||||
process.env.AWS_REGION = Input.region;
|
|
||||||
const CF = new AWS.CloudFormation();
|
|
||||||
const stacks =
|
|
||||||
(await CF.listStacks().promise()).StackSummaries?.filter(
|
|
||||||
(_x) => _x.StackStatus !== 'DELETE_COMPLETE', // &&
|
|
||||||
// _x.TemplateDescription === TaskDefinitionFormation.description.replace('\n', ''),
|
|
||||||
) || [];
|
|
||||||
CloudRunnerLogger.log(`Stacks ${stacks.length}`);
|
|
||||||
for (const element of stacks) {
|
|
||||||
const ageDate = new Date(element.CreationTime.getTime() - Date.now());
|
|
||||||
if (verbose)
|
|
||||||
CloudRunnerLogger.log(
|
|
||||||
`Task Stack ${element.StackName} - Age D${ageDate.getDay()} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
|
||||||
);
|
|
||||||
if (perResultCallback) await perResultCallback(element);
|
|
||||||
}
|
|
||||||
const baseStacks =
|
|
||||||
(await CF.listStacks().promise()).StackSummaries?.filter(
|
|
||||||
(_x) =>
|
|
||||||
_x.StackStatus !== 'DELETE_COMPLETE' && _x.TemplateDescription === BaseStackFormation.baseStackDecription,
|
|
||||||
) || [];
|
|
||||||
CloudRunnerLogger.log(`Base Stacks ${baseStacks.length}`);
|
|
||||||
for (const element of baseStacks) {
|
|
||||||
const ageDate = new Date(element.CreationTime.getTime() - Date.now());
|
|
||||||
if (verbose)
|
|
||||||
CloudRunnerLogger.log(
|
|
||||||
`Base Stack ${
|
|
||||||
element.StackName
|
|
||||||
} - Age D${ageDate.getHours()} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
|
||||||
);
|
|
||||||
if (perResultCallback) await perResultCallback(element);
|
|
||||||
}
|
|
||||||
if (stacks === undefined) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@CliFunction(`aws-list-tasks`, `List tasks`)
|
|
||||||
static async awsListTasks(perResultCallback: any = false) {
|
|
||||||
process.env.AWS_REGION = Input.region;
|
|
||||||
const ecs = new AWS.ECS();
|
|
||||||
const clusters = (await ecs.listClusters().promise()).clusterArns || [];
|
|
||||||
CloudRunnerLogger.log(`Clusters ${clusters.length}`);
|
|
||||||
for (const element of clusters) {
|
|
||||||
const input: AWS.ECS.ListTasksRequest = {
|
|
||||||
cluster: element,
|
|
||||||
};
|
|
||||||
|
|
||||||
const list = (await ecs.listTasks(input).promise()).taskArns || [];
|
|
||||||
if (list.length > 0) {
|
|
||||||
const describeInput: AWS.ECS.DescribeTasksRequest = { tasks: list, cluster: element };
|
|
||||||
const describeList = (await ecs.describeTasks(describeInput).promise()).tasks || [];
|
|
||||||
if (describeList === []) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
CloudRunnerLogger.log(`Tasks ${describeList.length}`);
|
|
||||||
for (const taskElement of describeList) {
|
|
||||||
if (taskElement === undefined) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
taskElement.overrides = {};
|
|
||||||
taskElement.attachments = [];
|
|
||||||
if (taskElement.createdAt === undefined) {
|
|
||||||
CloudRunnerLogger.log(`Skipping ${taskElement.taskDefinitionArn} no createdAt date`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (perResultCallback) await perResultCallback(taskElement, element);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@CliFunction(`aws-list-log-groups`, `List tasks`)
|
|
||||||
static async awsListLogGroups(perResultCallback: any = false, verbose: boolean = false) {
|
|
||||||
process.env.AWS_REGION = Input.region;
|
|
||||||
const ecs = new AWS.CloudWatchLogs();
|
|
||||||
let logStreamInput: AWS.CloudWatchLogs.DescribeLogGroupsRequest = {
|
|
||||||
/* logGroupNamePrefix: 'game-ci' */
|
|
||||||
};
|
|
||||||
let logGroupsDescribe = await ecs.describeLogGroups(logStreamInput).promise();
|
|
||||||
const logGroups = logGroupsDescribe.logGroups || [];
|
|
||||||
while (logGroupsDescribe.nextToken) {
|
|
||||||
logStreamInput = { /* logGroupNamePrefix: 'game-ci',*/ nextToken: logGroupsDescribe.nextToken };
|
|
||||||
logGroupsDescribe = await ecs.describeLogGroups(logStreamInput).promise();
|
|
||||||
logGroups.push(...(logGroupsDescribe?.logGroups || []));
|
|
||||||
}
|
|
||||||
|
|
||||||
CloudRunnerLogger.log(`Log Groups ${logGroups.length}`);
|
|
||||||
for (const element of logGroups) {
|
|
||||||
if (element.creationTime === undefined) {
|
|
||||||
CloudRunnerLogger.log(`Skipping ${element.logGroupName} no createdAt date`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const ageDate = new Date(new Date(element.creationTime).getTime() - Date.now());
|
|
||||||
if (verbose)
|
|
||||||
CloudRunnerLogger.log(
|
|
||||||
`Log Group Name ${
|
|
||||||
element.logGroupName
|
|
||||||
} - Age D${ageDate.getDay()} H${ageDate.getHours()} M${ageDate.getMinutes()} - 1d old ${AwsCliCommands.isOlderThan1day(
|
|
||||||
new Date(element.creationTime),
|
|
||||||
)}`,
|
|
||||||
);
|
|
||||||
if (perResultCallback) await perResultCallback(element, element);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static async cleanup(deleteResources = false, OneDayOlderOnly: boolean = false) {
|
|
||||||
process.env.AWS_REGION = Input.region;
|
|
||||||
const CF = new AWS.CloudFormation();
|
|
||||||
const ecs = new AWS.ECS();
|
|
||||||
const cwl = new AWS.CloudWatchLogs();
|
|
||||||
await AwsCliCommands.awsListStacks(async (element) => {
|
|
||||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(element.CreationTime))) {
|
|
||||||
if (element.StackName === 'game-ci' || element.TemplateDescription === 'Game-CI base stack') {
|
|
||||||
CloudRunnerLogger.log(`Skipping ${element.StackName} ignore list`);
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
CloudRunnerLogger.log(`Deleting ${element.logGroupName}`);
|
|
||||||
const deleteStackInput: AWS.CloudFormation.DeleteStackInput = { StackName: element.StackName };
|
|
||||||
await CF.deleteStack(deleteStackInput).promise();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
await AwsCliCommands.awsListTasks(async (taskElement, element) => {
|
|
||||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(taskElement.CreatedAt))) {
|
|
||||||
CloudRunnerLogger.log(`Stopping task ${taskElement.containers?.[0].name}`);
|
|
||||||
await ecs.stopTask({ task: taskElement.taskArn || '', cluster: element }).promise();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
await AwsCliCommands.awsListLogGroups(async (element) => {
|
|
||||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(new Date(element.createdAt)))) {
|
|
||||||
CloudRunnerLogger.log(`Deleting ${element.logGroupName}`);
|
|
||||||
await cwl.deleteLogGroup({ logGroupName: element.logGroupName || '' }).promise();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -2,13 +2,18 @@ import * as SDK from 'aws-sdk';
|
|||||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||||
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
|
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
|
||||||
import AWSTaskRunner from './aws-task-runner';
|
import AwsTaskRunner from './aws-task-runner';
|
||||||
import { ProviderInterface } from '../provider-interface';
|
import { ProviderInterface } from '../provider-interface';
|
||||||
import BuildParameters from '../../../build-parameters';
|
import BuildParameters from '../../../build-parameters';
|
||||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||||
import { AWSJobStack } from './aws-job-stack';
|
import { AWSJobStack as AwsJobStack } from './aws-job-stack';
|
||||||
import { AWSBaseStack } from './aws-base-stack';
|
import { AWSBaseStack as AwsBaseStack } from './aws-base-stack';
|
||||||
import { Input } from '../../..';
|
import { Input } from '../../..';
|
||||||
|
import { GarbageCollectionService } from './services/garbage-collection-service';
|
||||||
|
import { ProviderResource } from '../provider-resource';
|
||||||
|
import { ProviderWorkflow } from '../provider-workflow';
|
||||||
|
import { TaskService } from './services/task-service';
|
||||||
|
import CloudRunnerOptions from '../../cloud-runner-options';
|
||||||
|
|
||||||
class AWSBuildEnvironment implements ProviderInterface {
|
class AWSBuildEnvironment implements ProviderInterface {
|
||||||
private baseStackName: string;
|
private baseStackName: string;
|
||||||
@@ -16,7 +21,42 @@ class AWSBuildEnvironment implements ProviderInterface {
|
|||||||
constructor(buildParameters: BuildParameters) {
|
constructor(buildParameters: BuildParameters) {
|
||||||
this.baseStackName = buildParameters.awsBaseStackName;
|
this.baseStackName = buildParameters.awsBaseStackName;
|
||||||
}
|
}
|
||||||
async cleanup(
|
async listResources(): Promise<ProviderResource[]> {
|
||||||
|
await TaskService.getCloudFormationJobStacks();
|
||||||
|
await TaskService.getLogGroups();
|
||||||
|
await TaskService.getTasks();
|
||||||
|
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
listWorkflow(): Promise<ProviderWorkflow[]> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
async watchWorkflow(): Promise<string> {
|
||||||
|
return await TaskService.watch();
|
||||||
|
}
|
||||||
|
|
||||||
|
async listOtherResources(): Promise<string> {
|
||||||
|
await TaskService.getLogGroups();
|
||||||
|
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
async garbageCollect(
|
||||||
|
filter: string,
|
||||||
|
previewOnly: boolean,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
olderThan: Number,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
fullCache: boolean,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
baseDependencies: boolean,
|
||||||
|
): Promise<string> {
|
||||||
|
await GarbageCollectionService.cleanup(!previewOnly);
|
||||||
|
|
||||||
|
return ``;
|
||||||
|
}
|
||||||
|
|
||||||
|
async cleanupWorkflow(
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
@@ -26,7 +66,7 @@ class AWSBuildEnvironment implements ProviderInterface {
|
|||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
) {}
|
) {}
|
||||||
async setup(
|
async setupWorkflow(
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
@@ -37,7 +77,7 @@ class AWSBuildEnvironment implements ProviderInterface {
|
|||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
async runTask(
|
async runTaskInWorkflow(
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
image: string,
|
image: string,
|
||||||
commands: string,
|
commands: string,
|
||||||
@@ -49,12 +89,14 @@ class AWSBuildEnvironment implements ProviderInterface {
|
|||||||
process.env.AWS_REGION = Input.region;
|
process.env.AWS_REGION = Input.region;
|
||||||
const ECS = new SDK.ECS();
|
const ECS = new SDK.ECS();
|
||||||
const CF = new SDK.CloudFormation();
|
const CF = new SDK.CloudFormation();
|
||||||
|
AwsTaskRunner.ECS = ECS;
|
||||||
|
AwsTaskRunner.Kinesis = new SDK.Kinesis();
|
||||||
CloudRunnerLogger.log(`AWS Region: ${CF.config.region}`);
|
CloudRunnerLogger.log(`AWS Region: ${CF.config.region}`);
|
||||||
const entrypoint = ['/bin/sh'];
|
const entrypoint = ['/bin/sh'];
|
||||||
const startTimeMs = Date.now();
|
const startTimeMs = Date.now();
|
||||||
|
|
||||||
await new AWSBaseStack(this.baseStackName).setupBaseStack(CF);
|
await new AwsBaseStack(this.baseStackName).setupBaseStack(CF);
|
||||||
const taskDef = await new AWSJobStack(this.baseStackName).setupCloudFormations(
|
const taskDef = await new AwsJobStack(this.baseStackName).setupCloudFormations(
|
||||||
CF,
|
CF,
|
||||||
buildGuid,
|
buildGuid,
|
||||||
image,
|
image,
|
||||||
@@ -69,7 +111,7 @@ class AWSBuildEnvironment implements ProviderInterface {
|
|||||||
try {
|
try {
|
||||||
const postSetupStacksTimeMs = Date.now();
|
const postSetupStacksTimeMs = Date.now();
|
||||||
CloudRunnerLogger.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`);
|
CloudRunnerLogger.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`);
|
||||||
const { output, shouldCleanup } = await AWSTaskRunner.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
|
const { output, shouldCleanup } = await AwsTaskRunner.runTask(taskDef, environment, commands);
|
||||||
postRunTaskTimeMs = Date.now();
|
postRunTaskTimeMs = Date.now();
|
||||||
CloudRunnerLogger.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`);
|
CloudRunnerLogger.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`);
|
||||||
if (shouldCleanup) {
|
if (shouldCleanup) {
|
||||||
@@ -81,6 +123,7 @@ class AWSBuildEnvironment implements ProviderInterface {
|
|||||||
|
|
||||||
return output;
|
return output;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
CloudRunnerLogger.log(`error running task ${error}`);
|
||||||
await this.cleanupResources(CF, taskDef);
|
await this.cleanupResources(CF, taskDef);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
@@ -91,6 +134,11 @@ class AWSBuildEnvironment implements ProviderInterface {
|
|||||||
await CF.deleteStack({
|
await CF.deleteStack({
|
||||||
StackName: taskDef.taskDefStackName,
|
StackName: taskDef.taskDefStackName,
|
||||||
}).promise();
|
}).promise();
|
||||||
|
if (CloudRunnerOptions.useCleanupCron) {
|
||||||
|
await CF.deleteStack({
|
||||||
|
StackName: `${taskDef.taskDefStackName}-cleanup`,
|
||||||
|
}).promise();
|
||||||
|
}
|
||||||
|
|
||||||
await CF.waitFor('stackDeleteComplete', {
|
await CF.waitFor('stackDeleteComplete', {
|
||||||
StackName: taskDef.taskDefStackName,
|
StackName: taskDef.taskDefStackName,
|
||||||
|
|||||||
@@ -0,0 +1,66 @@
|
|||||||
|
import AWS from 'aws-sdk';
|
||||||
|
import Input from '../../../../input';
|
||||||
|
import CloudRunnerLogger from '../../../services/cloud-runner-logger';
|
||||||
|
import { TaskService } from './task-service';
|
||||||
|
|
||||||
|
export class GarbageCollectionService {
|
||||||
|
static isOlderThan1day(date: any) {
|
||||||
|
const ageDate = new Date(date.getTime() - Date.now());
|
||||||
|
|
||||||
|
return ageDate.getDay() > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async cleanup(deleteResources = false, OneDayOlderOnly: boolean = false) {
|
||||||
|
process.env.AWS_REGION = Input.region;
|
||||||
|
const CF = new AWS.CloudFormation();
|
||||||
|
const ecs = new AWS.ECS();
|
||||||
|
const cwl = new AWS.CloudWatchLogs();
|
||||||
|
const taskDefinitionsInUse = new Array();
|
||||||
|
const tasks = await TaskService.getTasks();
|
||||||
|
for (const task of tasks) {
|
||||||
|
const { taskElement, element } = task;
|
||||||
|
taskDefinitionsInUse.push(taskElement.taskDefinitionArn);
|
||||||
|
if (deleteResources && (!OneDayOlderOnly || GarbageCollectionService.isOlderThan1day(taskElement.CreatedAt))) {
|
||||||
|
CloudRunnerLogger.log(`Stopping task ${taskElement.containers?.[0].name}`);
|
||||||
|
await ecs.stopTask({ task: taskElement.taskArn || '', cluster: element }).promise();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const jobStacks = await TaskService.getCloudFormationJobStacks();
|
||||||
|
for (const element of jobStacks) {
|
||||||
|
if (
|
||||||
|
(await CF.describeStackResources({ StackName: element.StackName }).promise()).StackResources?.some(
|
||||||
|
(x) => x.ResourceType === 'AWS::ECS::TaskDefinition' && taskDefinitionsInUse.includes(x.PhysicalResourceId),
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
CloudRunnerLogger.log(`Skipping ${element.StackName} - active task was running not deleting`);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (deleteResources && (!OneDayOlderOnly || GarbageCollectionService.isOlderThan1day(element.CreationTime))) {
|
||||||
|
if (element.StackName === 'game-ci' || element.TemplateDescription === 'Game-CI base stack') {
|
||||||
|
CloudRunnerLogger.log(`Skipping ${element.StackName} ignore list`);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
CloudRunnerLogger.log(`Deleting ${element.logGroupName}`);
|
||||||
|
const deleteStackInput: AWS.CloudFormation.DeleteStackInput = { StackName: element.StackName };
|
||||||
|
await CF.deleteStack(deleteStackInput).promise();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const logGroups = await TaskService.getLogGroups();
|
||||||
|
for (const element of logGroups) {
|
||||||
|
if (
|
||||||
|
deleteResources &&
|
||||||
|
(!OneDayOlderOnly || GarbageCollectionService.isOlderThan1day(new Date(element.createdAt)))
|
||||||
|
) {
|
||||||
|
CloudRunnerLogger.log(`Deleting ${element.logGroupName}`);
|
||||||
|
await cwl.deleteLogGroup({ logGroupName: element.logGroupName || '' }).promise();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const locks = await TaskService.getLocks();
|
||||||
|
for (const element of locks) {
|
||||||
|
CloudRunnerLogger.log(`Lock: ${element.Key}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
168
src/model/cloud-runner/providers/aws/services/task-service.ts
Normal file
168
src/model/cloud-runner/providers/aws/services/task-service.ts
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
import AWS from 'aws-sdk';
|
||||||
|
import Input from '../../../../input';
|
||||||
|
import CloudRunnerLogger from '../../../services/cloud-runner-logger';
|
||||||
|
import { BaseStackFormation } from '../cloud-formations/base-stack-formation';
|
||||||
|
import AwsTaskRunner from '../aws-task-runner';
|
||||||
|
import { ListObjectsRequest } from 'aws-sdk/clients/s3';
|
||||||
|
import CloudRunner from '../../../cloud-runner';
|
||||||
|
|
||||||
|
export class TaskService {
|
||||||
|
static async watch() {
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
const { output, shouldCleanup } = await AwsTaskRunner.streamLogsUntilTaskStops(
|
||||||
|
process.env.cluster || ``,
|
||||||
|
process.env.taskArn || ``,
|
||||||
|
process.env.streamName || ``,
|
||||||
|
);
|
||||||
|
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
public static async getCloudFormationJobStacks() {
|
||||||
|
const result: any[] = [];
|
||||||
|
CloudRunnerLogger.log(``);
|
||||||
|
CloudRunnerLogger.log(`List Cloud Formation Stacks`);
|
||||||
|
process.env.AWS_REGION = Input.region;
|
||||||
|
const CF = new AWS.CloudFormation();
|
||||||
|
const stacks =
|
||||||
|
(await CF.listStacks().promise()).StackSummaries?.filter(
|
||||||
|
(_x) =>
|
||||||
|
_x.StackStatus !== 'DELETE_COMPLETE' && _x.TemplateDescription !== BaseStackFormation.baseStackDecription,
|
||||||
|
) || [];
|
||||||
|
CloudRunnerLogger.log(``);
|
||||||
|
CloudRunnerLogger.log(`Cloud Formation Stacks ${stacks.length}`);
|
||||||
|
for (const element of stacks) {
|
||||||
|
const ageDate: Date = new Date(Date.now() - element.CreationTime.getTime());
|
||||||
|
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`Task Stack ${element.StackName} - Age D${Math.floor(
|
||||||
|
ageDate.getHours() / 24,
|
||||||
|
)} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
||||||
|
);
|
||||||
|
result.push(element);
|
||||||
|
}
|
||||||
|
const baseStacks =
|
||||||
|
(await CF.listStacks().promise()).StackSummaries?.filter(
|
||||||
|
(_x) =>
|
||||||
|
_x.StackStatus !== 'DELETE_COMPLETE' && _x.TemplateDescription === BaseStackFormation.baseStackDecription,
|
||||||
|
) || [];
|
||||||
|
CloudRunnerLogger.log(``);
|
||||||
|
CloudRunnerLogger.log(`Base Stacks ${baseStacks.length}`);
|
||||||
|
for (const element of baseStacks) {
|
||||||
|
const ageDate: Date = new Date(Date.now() - element.CreationTime.getTime());
|
||||||
|
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`Task Stack ${element.StackName} - Age D${Math.floor(
|
||||||
|
ageDate.getHours() / 24,
|
||||||
|
)} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
||||||
|
);
|
||||||
|
result.push(element);
|
||||||
|
}
|
||||||
|
CloudRunnerLogger.log(``);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
public static async getTasks() {
|
||||||
|
const result: any[] = [];
|
||||||
|
CloudRunnerLogger.log(``);
|
||||||
|
CloudRunnerLogger.log(`List Tasks`);
|
||||||
|
process.env.AWS_REGION = Input.region;
|
||||||
|
const ecs = new AWS.ECS();
|
||||||
|
const clusters = (await ecs.listClusters().promise()).clusterArns || [];
|
||||||
|
CloudRunnerLogger.log(`Task Clusters ${clusters.length}`);
|
||||||
|
for (const element of clusters) {
|
||||||
|
const input: AWS.ECS.ListTasksRequest = {
|
||||||
|
cluster: element,
|
||||||
|
};
|
||||||
|
|
||||||
|
const list = (await ecs.listTasks(input).promise()).taskArns || [];
|
||||||
|
if (list.length > 0) {
|
||||||
|
const describeInput: AWS.ECS.DescribeTasksRequest = { tasks: list, cluster: element };
|
||||||
|
const describeList = (await ecs.describeTasks(describeInput).promise()).tasks || [];
|
||||||
|
if (describeList.length === 0) {
|
||||||
|
CloudRunnerLogger.log(`No Tasks`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
CloudRunnerLogger.log(`Tasks ${describeList.length}`);
|
||||||
|
for (const taskElement of describeList) {
|
||||||
|
if (taskElement === undefined) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
taskElement.overrides = {};
|
||||||
|
taskElement.attachments = [];
|
||||||
|
if (taskElement.createdAt === undefined) {
|
||||||
|
CloudRunnerLogger.log(`Skipping ${taskElement.taskDefinitionArn} no createdAt date`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
result.push({ taskElement, element });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CloudRunnerLogger.log(``);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
public static async awsDescribeJob(job: string) {
|
||||||
|
process.env.AWS_REGION = Input.region;
|
||||||
|
const CF = new AWS.CloudFormation();
|
||||||
|
const stack = (await CF.listStacks().promise()).StackSummaries?.find((_x) => _x.StackName === job) || undefined;
|
||||||
|
const stackInfo = (await CF.describeStackResources({ StackName: job }).promise()) || undefined;
|
||||||
|
const stackInfo2 = (await CF.describeStacks({ StackName: job }).promise()) || undefined;
|
||||||
|
if (stack === undefined) {
|
||||||
|
throw new Error('stack not defined');
|
||||||
|
}
|
||||||
|
const ageDate: Date = new Date(Date.now() - stack.CreationTime.getTime());
|
||||||
|
const message = `
|
||||||
|
Task Stack ${stack.StackName}
|
||||||
|
Age D${Math.floor(ageDate.getHours() / 24)} H${ageDate.getHours()} M${ageDate.getMinutes()}
|
||||||
|
${JSON.stringify(stack, undefined, 4)}
|
||||||
|
${JSON.stringify(stackInfo, undefined, 4)}
|
||||||
|
${JSON.stringify(stackInfo2, undefined, 4)}
|
||||||
|
`;
|
||||||
|
CloudRunnerLogger.log(message);
|
||||||
|
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
public static async getLogGroups() {
|
||||||
|
const result: any[] = [];
|
||||||
|
process.env.AWS_REGION = Input.region;
|
||||||
|
const ecs = new AWS.CloudWatchLogs();
|
||||||
|
let logStreamInput: AWS.CloudWatchLogs.DescribeLogGroupsRequest = {
|
||||||
|
/* logGroupNamePrefix: 'game-ci' */
|
||||||
|
};
|
||||||
|
let logGroupsDescribe = await ecs.describeLogGroups(logStreamInput).promise();
|
||||||
|
const logGroups = logGroupsDescribe.logGroups || [];
|
||||||
|
while (logGroupsDescribe.nextToken) {
|
||||||
|
logStreamInput = { /* logGroupNamePrefix: 'game-ci',*/ nextToken: logGroupsDescribe.nextToken };
|
||||||
|
logGroupsDescribe = await ecs.describeLogGroups(logStreamInput).promise();
|
||||||
|
logGroups.push(...(logGroupsDescribe?.logGroups || []));
|
||||||
|
}
|
||||||
|
|
||||||
|
CloudRunnerLogger.log(`Log Groups ${logGroups.length}`);
|
||||||
|
for (const element of logGroups) {
|
||||||
|
if (element.creationTime === undefined) {
|
||||||
|
CloudRunnerLogger.log(`Skipping ${element.logGroupName} no createdAt date`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const ageDate: Date = new Date(Date.now() - element.creationTime);
|
||||||
|
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`Task Stack ${element.logGroupName} - Age D${Math.floor(
|
||||||
|
ageDate.getHours() / 24,
|
||||||
|
)} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
||||||
|
);
|
||||||
|
result.push(element);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
public static async getLocks() {
|
||||||
|
process.env.AWS_REGION = Input.region;
|
||||||
|
const s3 = new AWS.S3();
|
||||||
|
const listRequest: ListObjectsRequest = {
|
||||||
|
Bucket: CloudRunner.buildParameters.awsBaseStackName,
|
||||||
|
};
|
||||||
|
const results = await s3.listObjects(listRequest).promise();
|
||||||
|
|
||||||
|
return results.Contents || [];
|
||||||
|
}
|
||||||
|
}
|
||||||
156
src/model/cloud-runner/providers/docker/index.ts
Normal file
156
src/model/cloud-runner/providers/docker/index.ts
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
import BuildParameters from '../../../build-parameters';
|
||||||
|
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||||
|
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||||
|
import { ProviderInterface } from '../provider-interface';
|
||||||
|
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||||
|
import Docker from '../../../docker';
|
||||||
|
import { Action } from '../../..';
|
||||||
|
import { writeFileSync } from 'fs';
|
||||||
|
import CloudRunner from '../../cloud-runner';
|
||||||
|
import { ProviderResource } from '../provider-resource';
|
||||||
|
import { ProviderWorkflow } from '../provider-workflow';
|
||||||
|
import { CloudRunnerSystem } from '../../services/cloud-runner-system';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
|
||||||
|
class LocalDockerCloudRunner implements ProviderInterface {
|
||||||
|
public buildParameters: BuildParameters | undefined;
|
||||||
|
|
||||||
|
listResources(): Promise<ProviderResource[]> {
|
||||||
|
return new Promise((resolve) => resolve([]));
|
||||||
|
}
|
||||||
|
listWorkflow(): Promise<ProviderWorkflow[]> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
watchWorkflow(): Promise<string> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
garbageCollect(
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
filter: string,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
previewOnly: boolean,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
olderThan: Number,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
fullCache: boolean,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
baseDependencies: boolean,
|
||||||
|
): Promise<string> {
|
||||||
|
return new Promise((result) => result(``));
|
||||||
|
}
|
||||||
|
async cleanupWorkflow(
|
||||||
|
buildGuid: string,
|
||||||
|
buildParameters: BuildParameters,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
branchName: string,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
|
) {
|
||||||
|
const { workspace } = Action;
|
||||||
|
if (
|
||||||
|
fs.existsSync(
|
||||||
|
`${workspace}/cloud-runner-cache/cache/build/build-${buildParameters.buildGuid}.tar${
|
||||||
|
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
|
||||||
|
}`,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
await CloudRunnerSystem.Run(`ls ${workspace}/cloud-runner-cache/cache/build/`);
|
||||||
|
await CloudRunnerSystem.Run(
|
||||||
|
`rm -r ${workspace}/cloud-runner-cache/cache/build/build-${buildParameters.buildGuid}.tar${
|
||||||
|
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
|
||||||
|
}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
setupWorkflow(
|
||||||
|
buildGuid: string,
|
||||||
|
buildParameters: BuildParameters,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
branchName: string,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
|
) {
|
||||||
|
this.buildParameters = buildParameters;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async runTaskInWorkflow(
|
||||||
|
buildGuid: string,
|
||||||
|
image: string,
|
||||||
|
commands: string,
|
||||||
|
mountdir: string,
|
||||||
|
workingdir: string,
|
||||||
|
environment: CloudRunnerEnvironmentVariable[],
|
||||||
|
secrets: CloudRunnerSecret[],
|
||||||
|
): Promise<string> {
|
||||||
|
CloudRunnerLogger.log(buildGuid);
|
||||||
|
CloudRunnerLogger.log(commands);
|
||||||
|
|
||||||
|
const { workspace, actionFolder } = Action;
|
||||||
|
const content: any[] = [];
|
||||||
|
for (const x of secrets) {
|
||||||
|
content.push({ name: x.EnvironmentVariable, value: x.ParameterValue });
|
||||||
|
}
|
||||||
|
for (const x of environment) {
|
||||||
|
content.push({ name: x.name, value: x.value });
|
||||||
|
}
|
||||||
|
|
||||||
|
// if (this.buildParameters?.cloudRunnerIntegrationTests) {
|
||||||
|
// core.info(JSON.stringify(content, undefined, 4));
|
||||||
|
// core.info(JSON.stringify(secrets, undefined, 4));
|
||||||
|
// core.info(JSON.stringify(environment, undefined, 4));
|
||||||
|
// }
|
||||||
|
|
||||||
|
// eslint-disable-next-line unicorn/no-for-loop
|
||||||
|
for (let index = 0; index < content.length; index++) {
|
||||||
|
if (content[index] === undefined) {
|
||||||
|
delete content[index];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let myOutput = '';
|
||||||
|
const sharedFolder = `/data/`;
|
||||||
|
|
||||||
|
// core.info(JSON.stringify({ workspace, actionFolder, ...this.buildParameters, ...content }, undefined, 4));
|
||||||
|
const entrypointFilePath = `start.sh`;
|
||||||
|
const fileContents = `#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
mkdir -p /github/workspace/cloud-runner-cache
|
||||||
|
mkdir -p /data/cache
|
||||||
|
cp -a /github/workspace/cloud-runner-cache/. ${sharedFolder}
|
||||||
|
${commands}
|
||||||
|
cp -a ${sharedFolder}. /github/workspace/cloud-runner-cache/
|
||||||
|
`;
|
||||||
|
writeFileSync(`${workspace}/${entrypointFilePath}`, fileContents, {
|
||||||
|
flag: 'w',
|
||||||
|
});
|
||||||
|
|
||||||
|
if (CloudRunner.buildParameters.cloudRunnerDebug) {
|
||||||
|
CloudRunnerLogger.log(`Running local-docker: \n ${fileContents}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fs.existsSync(`${workspace}/cloud-runner-cache`)) {
|
||||||
|
await CloudRunnerSystem.Run(`ls ${workspace}/cloud-runner-cache && du -sh ${workspace}/cloud-runner-cache`);
|
||||||
|
}
|
||||||
|
await Docker.run(
|
||||||
|
image,
|
||||||
|
{ workspace, actionFolder, ...this.buildParameters },
|
||||||
|
false,
|
||||||
|
`chmod +x /github/workspace/${entrypointFilePath} && /github/workspace/${entrypointFilePath}`,
|
||||||
|
content,
|
||||||
|
{
|
||||||
|
listeners: {
|
||||||
|
stdout: (data: Buffer) => {
|
||||||
|
myOutput += data.toString();
|
||||||
|
},
|
||||||
|
stderr: (data: Buffer) => {
|
||||||
|
myOutput += `[LOCAL-DOCKER-ERROR]${data.toString()}`;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
|
||||||
|
return myOutput;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export default LocalDockerCloudRunner;
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import * as k8s from '@kubernetes/client-node';
|
import * as k8s from '@kubernetes/client-node';
|
||||||
import { BuildParameters, Output } from '../../..';
|
import { BuildParameters } from '../../..';
|
||||||
import * as core from '@actions/core';
|
import * as core from '@actions/core';
|
||||||
import { ProviderInterface } from '../provider-interface';
|
import { ProviderInterface } from '../provider-interface';
|
||||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||||
@@ -7,39 +7,84 @@ import KubernetesStorage from './kubernetes-storage';
|
|||||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||||
import KubernetesTaskRunner from './kubernetes-task-runner';
|
import KubernetesTaskRunner from './kubernetes-task-runner';
|
||||||
import KubernetesSecret from './kubernetes-secret';
|
import KubernetesSecret from './kubernetes-secret';
|
||||||
import waitUntil from 'async-wait-until';
|
|
||||||
import KubernetesJobSpecFactory from './kubernetes-job-spec-factory';
|
import KubernetesJobSpecFactory from './kubernetes-job-spec-factory';
|
||||||
import KubernetesServiceAccount from './kubernetes-service-account';
|
import KubernetesServiceAccount from './kubernetes-service-account';
|
||||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||||
import { CoreV1Api } from '@kubernetes/client-node';
|
import { CoreV1Api } from '@kubernetes/client-node';
|
||||||
import DependencyOverrideService from '../../services/depdency-override-service';
|
import CloudRunner from '../../cloud-runner';
|
||||||
|
import { ProviderResource } from '../provider-resource';
|
||||||
|
import { ProviderWorkflow } from '../provider-workflow';
|
||||||
|
import KubernetesPods from './kubernetes-pods';
|
||||||
|
|
||||||
class Kubernetes implements ProviderInterface {
|
class Kubernetes implements ProviderInterface {
|
||||||
private kubeConfig: k8s.KubeConfig;
|
public static Instance: Kubernetes;
|
||||||
private kubeClient: k8s.CoreV1Api;
|
public kubeConfig!: k8s.KubeConfig;
|
||||||
private kubeClientBatch: k8s.BatchV1Api;
|
public kubeClient!: k8s.CoreV1Api;
|
||||||
private buildGuid: string = '';
|
public kubeClientBatch!: k8s.BatchV1Api;
|
||||||
private buildParameters: BuildParameters;
|
public buildGuid: string = '';
|
||||||
private pvcName: string = '';
|
public buildParameters!: BuildParameters;
|
||||||
private secretName: string = '';
|
public pvcName: string = '';
|
||||||
private jobName: string = '';
|
public secretName: string = '';
|
||||||
private namespace: string;
|
public jobName: string = '';
|
||||||
private podName: string = '';
|
public namespace!: string;
|
||||||
private containerName: string = '';
|
public podName: string = '';
|
||||||
private cleanupCronJobName: string = '';
|
public containerName: string = '';
|
||||||
private serviceAccountName: string = '';
|
public cleanupCronJobName: string = '';
|
||||||
|
public serviceAccountName: string = '';
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
constructor(buildParameters: BuildParameters) {
|
constructor(buildParameters: BuildParameters) {
|
||||||
|
Kubernetes.Instance = this;
|
||||||
this.kubeConfig = new k8s.KubeConfig();
|
this.kubeConfig = new k8s.KubeConfig();
|
||||||
this.kubeConfig.loadFromDefault();
|
this.kubeConfig.loadFromDefault();
|
||||||
this.kubeClient = this.kubeConfig.makeApiClient(k8s.CoreV1Api);
|
this.kubeClient = this.kubeConfig.makeApiClient(k8s.CoreV1Api);
|
||||||
this.kubeClientBatch = this.kubeConfig.makeApiClient(k8s.BatchV1Api);
|
this.kubeClientBatch = this.kubeConfig.makeApiClient(k8s.BatchV1Api);
|
||||||
CloudRunnerLogger.log('Loaded default Kubernetes configuration for this environment');
|
|
||||||
|
|
||||||
this.namespace = 'default';
|
this.namespace = 'default';
|
||||||
this.buildParameters = buildParameters;
|
CloudRunnerLogger.log('Loaded default Kubernetes configuration for this environment');
|
||||||
}
|
}
|
||||||
public async setup(
|
|
||||||
|
async listResources(): Promise<ProviderResource[]> {
|
||||||
|
const pods = await this.kubeClient.listNamespacedPod(this.namespace);
|
||||||
|
const serviceAccounts = await this.kubeClient.listNamespacedServiceAccount(this.namespace);
|
||||||
|
const secrets = await this.kubeClient.listNamespacedSecret(this.namespace);
|
||||||
|
const jobs = await this.kubeClientBatch.listNamespacedJob(this.namespace);
|
||||||
|
|
||||||
|
return [
|
||||||
|
...pods.body.items.map((x) => {
|
||||||
|
return { Name: x.metadata?.name || `` };
|
||||||
|
}),
|
||||||
|
...serviceAccounts.body.items.map((x) => {
|
||||||
|
return { Name: x.metadata?.name || `` };
|
||||||
|
}),
|
||||||
|
...secrets.body.items.map((x) => {
|
||||||
|
return { Name: x.metadata?.name || `` };
|
||||||
|
}),
|
||||||
|
...jobs.body.items.map((x) => {
|
||||||
|
return { Name: x.metadata?.name || `` };
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
}
|
||||||
|
listWorkflow(): Promise<ProviderWorkflow[]> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
watchWorkflow(): Promise<string> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
garbageCollect(
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
filter: string,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
previewOnly: boolean,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
olderThan: Number,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
fullCache: boolean,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
baseDependencies: boolean,
|
||||||
|
): Promise<string> {
|
||||||
|
return new Promise((result) => result(``));
|
||||||
|
}
|
||||||
|
public async setupWorkflow(
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
buildParameters: BuildParameters,
|
buildParameters: BuildParameters,
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
@@ -48,12 +93,11 @@ class Kubernetes implements ProviderInterface {
|
|||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
this.pvcName = `unity-builder-pvc-${buildGuid}`;
|
this.buildParameters = buildParameters;
|
||||||
this.cleanupCronJobName = `unity-builder-cronjob-${buildGuid}`;
|
const id = buildParameters.retainWorkspace ? CloudRunner.lockedWorkspace : buildParameters.buildGuid;
|
||||||
this.serviceAccountName = `service-account-${buildGuid}`;
|
this.pvcName = `unity-builder-pvc-${id}`;
|
||||||
if (await DependencyOverrideService.CheckHealth()) {
|
this.cleanupCronJobName = `unity-builder-cronjob-${id}`;
|
||||||
await DependencyOverrideService.TryStartDependencies();
|
this.serviceAccountName = `service-account-${buildParameters.buildGuid}`;
|
||||||
}
|
|
||||||
await KubernetesStorage.createPersistentVolumeClaim(
|
await KubernetesStorage.createPersistentVolumeClaim(
|
||||||
buildParameters,
|
buildParameters,
|
||||||
this.pvcName,
|
this.pvcName,
|
||||||
@@ -67,7 +111,7 @@ class Kubernetes implements ProviderInterface {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async runTask(
|
async runTaskInWorkflow(
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
image: string,
|
image: string,
|
||||||
commands: string,
|
commands: string,
|
||||||
@@ -77,40 +121,22 @@ class Kubernetes implements ProviderInterface {
|
|||||||
secrets: CloudRunnerSecret[],
|
secrets: CloudRunnerSecret[],
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
try {
|
try {
|
||||||
|
CloudRunnerLogger.log('Cloud Runner K8s workflow!');
|
||||||
|
|
||||||
// Setup
|
// Setup
|
||||||
this.buildGuid = buildGuid;
|
this.buildGuid = buildGuid;
|
||||||
this.secretName = `build-credentials-${buildGuid}`;
|
this.secretName = `build-credentials-${this.buildGuid}`;
|
||||||
this.jobName = `unity-builder-job-${buildGuid}`;
|
this.jobName = `unity-builder-job-${this.buildGuid}`;
|
||||||
this.containerName = `main`;
|
this.containerName = `main`;
|
||||||
await KubernetesSecret.createSecret(secrets, this.secretName, this.namespace, this.kubeClient);
|
await KubernetesSecret.createSecret(secrets, this.secretName, this.namespace, this.kubeClient);
|
||||||
const jobSpec = KubernetesJobSpecFactory.getJobSpec(
|
await this.createNamespacedJob(commands, image, mountdir, workingdir, environment, secrets);
|
||||||
commands,
|
|
||||||
image,
|
|
||||||
mountdir,
|
|
||||||
workingdir,
|
|
||||||
environment,
|
|
||||||
secrets,
|
|
||||||
this.buildGuid,
|
|
||||||
this.buildParameters,
|
|
||||||
this.secretName,
|
|
||||||
this.pvcName,
|
|
||||||
this.jobName,
|
|
||||||
k8s,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Run
|
|
||||||
const jobResult = await this.kubeClientBatch.createNamespacedJob(this.namespace, jobSpec);
|
|
||||||
CloudRunnerLogger.log(`Creating build job ${JSON.stringify(jobResult.body.metadata, undefined, 4)}`);
|
|
||||||
|
|
||||||
await new Promise((promise) => setTimeout(promise, 5000));
|
|
||||||
CloudRunnerLogger.log('Job created');
|
|
||||||
this.setPodNameAndContainerName(await Kubernetes.findPodFromJob(this.kubeClient, this.jobName, this.namespace));
|
this.setPodNameAndContainerName(await Kubernetes.findPodFromJob(this.kubeClient, this.jobName, this.namespace));
|
||||||
CloudRunnerLogger.log('Watching pod until running');
|
CloudRunnerLogger.log('Watching pod until running');
|
||||||
|
await KubernetesTaskRunner.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
|
||||||
let output = '';
|
let output = '';
|
||||||
// eslint-disable-next-line no-constant-condition
|
// eslint-disable-next-line no-constant-condition
|
||||||
while (true) {
|
while (true) {
|
||||||
try {
|
try {
|
||||||
await KubernetesTaskRunner.watchUntilPodRunning(this.kubeClient, this.podName, this.namespace);
|
|
||||||
CloudRunnerLogger.log('Pod running, streaming logs');
|
CloudRunnerLogger.log('Pod running, streaming logs');
|
||||||
output = await KubernetesTaskRunner.runTask(
|
output = await KubernetesTaskRunner.runTask(
|
||||||
this.kubeConfig,
|
this.kubeConfig,
|
||||||
@@ -120,16 +146,42 @@ class Kubernetes implements ProviderInterface {
|
|||||||
'main',
|
'main',
|
||||||
this.namespace,
|
this.namespace,
|
||||||
);
|
);
|
||||||
break;
|
const running = await KubernetesPods.IsPodRunning(this.podName, this.namespace, this.kubeClient);
|
||||||
|
|
||||||
|
if (!running) {
|
||||||
|
CloudRunnerLogger.log(`Pod not found, assumed ended!`);
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
CloudRunnerLogger.log('Pod still running, recovering stream...');
|
||||||
|
}
|
||||||
|
await this.cleanupTaskResources();
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
if (error.message.includes(`HTTP`)) {
|
let errorParsed;
|
||||||
|
try {
|
||||||
|
errorParsed = JSON.parse(error);
|
||||||
|
} catch {
|
||||||
|
errorParsed = error;
|
||||||
|
}
|
||||||
|
|
||||||
|
const reason = errorParsed.reason || errorParsed.response?.body?.reason || ``;
|
||||||
|
const errorMessage = errorParsed.message || reason;
|
||||||
|
|
||||||
|
const continueStreaming =
|
||||||
|
errorMessage.includes(`dial timeout, backstop`) ||
|
||||||
|
errorMessage.includes(`HttpError: HTTP request failed`) ||
|
||||||
|
errorMessage.includes(`an error occurred when try to find container`) ||
|
||||||
|
errorMessage.includes(`not found`) ||
|
||||||
|
errorMessage.includes(`Not Found`);
|
||||||
|
if (continueStreaming) {
|
||||||
|
CloudRunnerLogger.log('Log Stream Container Not Found');
|
||||||
|
await new Promise((resolve) => resolve(5000));
|
||||||
continue;
|
continue;
|
||||||
} else {
|
} else {
|
||||||
|
CloudRunnerLogger.log(`error running k8s workflow ${error}`);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await this.cleanupTaskResources();
|
|
||||||
|
|
||||||
return output;
|
return output;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -140,6 +192,44 @@ class Kubernetes implements ProviderInterface {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async createNamespacedJob(
|
||||||
|
commands: string,
|
||||||
|
image: string,
|
||||||
|
mountdir: string,
|
||||||
|
workingdir: string,
|
||||||
|
environment: CloudRunnerEnvironmentVariable[],
|
||||||
|
secrets: CloudRunnerSecret[],
|
||||||
|
) {
|
||||||
|
for (let index = 0; index < 3; index++) {
|
||||||
|
try {
|
||||||
|
const jobSpec = KubernetesJobSpecFactory.getJobSpec(
|
||||||
|
commands,
|
||||||
|
image,
|
||||||
|
mountdir,
|
||||||
|
workingdir,
|
||||||
|
environment,
|
||||||
|
secrets,
|
||||||
|
this.buildGuid,
|
||||||
|
this.buildParameters,
|
||||||
|
this.secretName,
|
||||||
|
this.pvcName,
|
||||||
|
this.jobName,
|
||||||
|
k8s,
|
||||||
|
);
|
||||||
|
await new Promise((promise) => setTimeout(promise, 15000));
|
||||||
|
await this.kubeClientBatch.createNamespacedJob(this.namespace, jobSpec);
|
||||||
|
CloudRunnerLogger.log(`Build job created`);
|
||||||
|
await new Promise((promise) => setTimeout(promise, 5000));
|
||||||
|
CloudRunnerLogger.log('Job created');
|
||||||
|
|
||||||
|
return;
|
||||||
|
} catch (error) {
|
||||||
|
CloudRunnerLogger.log(`Error occured creating job: ${error}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
setPodNameAndContainerName(pod: k8s.V1Pod) {
|
setPodNameAndContainerName(pod: k8s.V1Pod) {
|
||||||
this.podName = pod.metadata?.name || '';
|
this.podName = pod.metadata?.name || '';
|
||||||
this.containerName = pod.status?.containerStatuses?.[0].name || '';
|
this.containerName = pod.status?.containerStatuses?.[0].name || '';
|
||||||
@@ -150,32 +240,24 @@ class Kubernetes implements ProviderInterface {
|
|||||||
try {
|
try {
|
||||||
await this.kubeClientBatch.deleteNamespacedJob(this.jobName, this.namespace);
|
await this.kubeClientBatch.deleteNamespacedJob(this.jobName, this.namespace);
|
||||||
await this.kubeClient.deleteNamespacedPod(this.podName, this.namespace);
|
await this.kubeClient.deleteNamespacedPod(this.podName, this.namespace);
|
||||||
await this.kubeClient.deleteNamespacedSecret(this.secretName, this.namespace);
|
} catch (error: any) {
|
||||||
await new Promise((promise) => setTimeout(promise, 5000));
|
CloudRunnerLogger.log(`Failed to cleanup`);
|
||||||
} catch (error) {
|
if (error.response.body.reason !== `NotFound`) {
|
||||||
CloudRunnerLogger.log('Failed to cleanup, error:');
|
CloudRunnerLogger.log(`Wasn't a not found error: ${error.response.body.reason}`);
|
||||||
core.error(JSON.stringify(error, undefined, 4));
|
throw error;
|
||||||
CloudRunnerLogger.log('Abandoning cleanup, build error:');
|
}
|
||||||
throw error;
|
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
await waitUntil(
|
await this.kubeClient.deleteNamespacedSecret(this.secretName, this.namespace);
|
||||||
async () => {
|
} catch (error: any) {
|
||||||
const jobBody = (await this.kubeClientBatch.readNamespacedJob(this.jobName, this.namespace)).body;
|
CloudRunnerLogger.log(`Failed to cleanup secret`);
|
||||||
const podBody = (await this.kubeClient.readNamespacedPod(this.podName, this.namespace)).body;
|
CloudRunnerLogger.log(error.response.body.reason);
|
||||||
|
}
|
||||||
return (jobBody === null || jobBody.status?.active === 0) && podBody === null;
|
CloudRunnerLogger.log('cleaned up Secret, Job and Pod');
|
||||||
},
|
CloudRunnerLogger.log('cleaning up finished');
|
||||||
{
|
|
||||||
timeout: 500000,
|
|
||||||
intervalBetweenAttempts: 15000,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
// eslint-disable-next-line no-empty
|
|
||||||
} catch {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async cleanup(
|
async cleanupWorkflow(
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
buildParameters: BuildParameters,
|
buildParameters: BuildParameters,
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
@@ -183,11 +265,19 @@ class Kubernetes implements ProviderInterface {
|
|||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
) {
|
) {
|
||||||
|
if (buildParameters.retainWorkspace) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
CloudRunnerLogger.log(`deleting PVC`);
|
CloudRunnerLogger.log(`deleting PVC`);
|
||||||
await this.kubeClient.deleteNamespacedPersistentVolumeClaim(this.pvcName, this.namespace);
|
|
||||||
await Output.setBuildVersion(buildParameters.buildVersion);
|
try {
|
||||||
// eslint-disable-next-line unicorn/no-process-exit
|
await this.kubeClient.deleteNamespacedPersistentVolumeClaim(this.pvcName, this.namespace);
|
||||||
process.exit();
|
await this.kubeClient.deleteNamespacedServiceAccount(this.serviceAccountName, this.namespace);
|
||||||
|
CloudRunnerLogger.log('cleaned up PVC and Service Account');
|
||||||
|
} catch (error: any) {
|
||||||
|
CloudRunnerLogger.log(`Cleanup failed ${JSON.stringify(error, undefined, 4)}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static async findPodFromJob(kubeClient: CoreV1Api, jobName: string, namespace: string) {
|
static async findPodFromJob(kubeClient: CoreV1Api, jobName: string, namespace: string) {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { V1EnvVar, V1EnvVarSource, V1SecretKeySelector } from '@kubernetes/client-node';
|
import { V1EnvVar, V1EnvVarSource, V1SecretKeySelector } from '@kubernetes/client-node';
|
||||||
import BuildParameters from '../../../build-parameters';
|
import BuildParameters from '../../../build-parameters';
|
||||||
import { CloudRunnerBuildCommandProcessor } from '../../services/cloud-runner-build-command-process';
|
import { CloudRunnerCustomHooks } from '../../services/cloud-runner-custom-hooks';
|
||||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||||
import CloudRunner from '../../cloud-runner';
|
import CloudRunner from '../../cloud-runner';
|
||||||
@@ -103,7 +103,7 @@ class KubernetesJobSpecFactory {
|
|||||||
name: 'main',
|
name: 'main',
|
||||||
image,
|
image,
|
||||||
command: ['/bin/sh'],
|
command: ['/bin/sh'],
|
||||||
args: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(command, CloudRunner.buildParameters)],
|
args: ['-c', CloudRunnerCustomHooks.ApplyHooksToCommands(command, CloudRunner.buildParameters)],
|
||||||
|
|
||||||
workingDir: `${workingDirectory}`,
|
workingDir: `${workingDirectory}`,
|
||||||
resources: {
|
resources: {
|
||||||
@@ -158,6 +158,8 @@ class KubernetesJobSpecFactory {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
job.spec.template.spec.containers[0].resources.requests[`ephemeral-storage`] = '5Gi';
|
||||||
|
|
||||||
return job;
|
return job;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
17
src/model/cloud-runner/providers/k8s/kubernetes-pods.ts
Normal file
17
src/model/cloud-runner/providers/k8s/kubernetes-pods.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||||
|
import { CoreV1Api } from '@kubernetes/client-node';
|
||||||
|
class KubernetesPods {
|
||||||
|
public static async IsPodRunning(podName: string, namespace: string, kubeClient: CoreV1Api) {
|
||||||
|
const pods = (await kubeClient.listNamespacedPod(namespace)).body.items.filter((x) => podName === x.metadata?.name);
|
||||||
|
const running = pods.length > 0 && (pods[0].status?.phase === `Running` || pods[0].status?.phase === `Pending`);
|
||||||
|
const phase = pods[0]?.status?.phase || 'undefined status';
|
||||||
|
CloudRunnerLogger.log(`Getting pod status: ${phase}`);
|
||||||
|
if (phase === `Failed`) {
|
||||||
|
throw new Error(`K8s pod failed`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return running;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default KubernetesPods;
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import { CoreV1Api } from '@kubernetes/client-node';
|
import { CoreV1Api } from '@kubernetes/client-node';
|
||||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||||
import * as k8s from '@kubernetes/client-node';
|
import * as k8s from '@kubernetes/client-node';
|
||||||
|
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||||
const base64 = require('base-64');
|
const base64 = require('base-64');
|
||||||
|
|
||||||
class KubernetesSecret {
|
class KubernetesSecret {
|
||||||
@@ -10,19 +11,34 @@ class KubernetesSecret {
|
|||||||
namespace: string,
|
namespace: string,
|
||||||
kubeClient: CoreV1Api,
|
kubeClient: CoreV1Api,
|
||||||
) {
|
) {
|
||||||
const secret = new k8s.V1Secret();
|
try {
|
||||||
secret.apiVersion = 'v1';
|
const secret = new k8s.V1Secret();
|
||||||
secret.kind = 'Secret';
|
secret.apiVersion = 'v1';
|
||||||
secret.type = 'Opaque';
|
secret.kind = 'Secret';
|
||||||
secret.metadata = {
|
secret.type = 'Opaque';
|
||||||
name: secretName,
|
secret.metadata = {
|
||||||
};
|
name: secretName,
|
||||||
secret.data = {};
|
};
|
||||||
for (const buildSecret of secrets) {
|
secret.data = {};
|
||||||
secret.data[buildSecret.ParameterKey] = base64.encode(buildSecret.ParameterValue);
|
for (const buildSecret of secrets) {
|
||||||
}
|
secret.data[buildSecret.ParameterKey] = base64.encode(buildSecret.ParameterValue);
|
||||||
|
}
|
||||||
|
CloudRunnerLogger.log(`Creating secret: ${secretName}`);
|
||||||
|
const existingSecrets = await kubeClient.listNamespacedSecret(namespace);
|
||||||
|
const mappedSecrets = existingSecrets.body.items.map((x) => {
|
||||||
|
return x.metadata?.name || `no name`;
|
||||||
|
});
|
||||||
|
|
||||||
return kubeClient.createNamespacedSecret(namespace, secret);
|
CloudRunnerLogger.log(
|
||||||
|
`ExistsAlready: ${mappedSecrets.includes(secretName)} SecretsCount: ${mappedSecrets.length}`,
|
||||||
|
);
|
||||||
|
await new Promise((promise) => setTimeout(promise, 15000));
|
||||||
|
await kubeClient.createNamespacedSecret(namespace, secret);
|
||||||
|
CloudRunnerLogger.log('Created secret');
|
||||||
|
} catch (error) {
|
||||||
|
CloudRunnerLogger.log(`Created secret failed ${error}`);
|
||||||
|
throw new Error(`Failed to create kubernetes secret`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,8 +3,8 @@ import * as core from '@actions/core';
|
|||||||
import * as k8s from '@kubernetes/client-node';
|
import * as k8s from '@kubernetes/client-node';
|
||||||
import BuildParameters from '../../../build-parameters';
|
import BuildParameters from '../../../build-parameters';
|
||||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||||
import YAML from 'yaml';
|
|
||||||
import { IncomingMessage } from 'http';
|
import { IncomingMessage } from 'http';
|
||||||
|
import GitHub from '../../../github';
|
||||||
|
|
||||||
class KubernetesStorage {
|
class KubernetesStorage {
|
||||||
public static async createPersistentVolumeClaim(
|
public static async createPersistentVolumeClaim(
|
||||||
@@ -13,20 +13,19 @@ class KubernetesStorage {
|
|||||||
kubeClient: k8s.CoreV1Api,
|
kubeClient: k8s.CoreV1Api,
|
||||||
namespace: string,
|
namespace: string,
|
||||||
) {
|
) {
|
||||||
if (buildParameters.kubeVolume) {
|
if (buildParameters.kubeVolume !== ``) {
|
||||||
CloudRunnerLogger.log(buildParameters.kubeVolume);
|
CloudRunnerLogger.log(`Kube Volume was input was set ${buildParameters.kubeVolume} overriding ${pvcName}`);
|
||||||
pvcName = buildParameters.kubeVolume;
|
pvcName = buildParameters.kubeVolume;
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const pvcList = (await kubeClient.listNamespacedPersistentVolumeClaim(namespace)).body.items.map(
|
const allPvc = (await kubeClient.listNamespacedPersistentVolumeClaim(namespace)).body.items;
|
||||||
(x) => x.metadata?.name,
|
const pvcList = allPvc.map((x) => x.metadata?.name);
|
||||||
);
|
|
||||||
CloudRunnerLogger.log(`Current PVCs in namespace ${namespace}`);
|
CloudRunnerLogger.log(`Current PVCs in namespace ${namespace}`);
|
||||||
CloudRunnerLogger.log(JSON.stringify(pvcList, undefined, 4));
|
CloudRunnerLogger.log(JSON.stringify(pvcList, undefined, 4));
|
||||||
if (pvcList.includes(pvcName)) {
|
if (pvcList.includes(pvcName)) {
|
||||||
CloudRunnerLogger.log(`pvc ${pvcName} already exists`);
|
CloudRunnerLogger.log(`pvc ${pvcName} already exists`);
|
||||||
if (!buildParameters.isCliMode) {
|
if (GitHub.githubInputEnabled) {
|
||||||
core.setOutput('volume', pvcName);
|
core.setOutput('volume', pvcName);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -95,9 +94,6 @@ class KubernetesStorage {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
if (process.env.K8s_STORAGE_PVC_SPEC) {
|
|
||||||
YAML.parse(process.env.K8s_STORAGE_PVC_SPEC);
|
|
||||||
}
|
|
||||||
const result = await kubeClient.createNamespacedPersistentVolumeClaim(namespace, pvc);
|
const result = await kubeClient.createNamespacedPersistentVolumeClaim(namespace, pvc);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
|
|||||||
@@ -1,49 +0,0 @@
|
|||||||
import BuildParameters from '../../../build-parameters';
|
|
||||||
import { CloudRunnerSystem } from '../../services/cloud-runner-system';
|
|
||||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
|
||||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
|
||||||
import { ProviderInterface } from '../provider-interface';
|
|
||||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
|
||||||
|
|
||||||
class LocalDockerCloudRunner implements ProviderInterface {
|
|
||||||
cleanup(
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
buildGuid: string,
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
buildParameters: BuildParameters,
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
branchName: string,
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
|
||||||
) {}
|
|
||||||
setup(
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
buildGuid: string,
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
buildParameters: BuildParameters,
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
branchName: string,
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
|
||||||
) {}
|
|
||||||
public runTask(
|
|
||||||
commands: string,
|
|
||||||
buildGuid: string,
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
image: string,
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
mountdir: string,
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
workingdir: string,
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
environment: CloudRunnerEnvironmentVariable[],
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
secrets: CloudRunnerSecret[],
|
|
||||||
): Promise<string> {
|
|
||||||
CloudRunnerLogger.log(buildGuid);
|
|
||||||
CloudRunnerLogger.log(commands);
|
|
||||||
|
|
||||||
return CloudRunnerSystem.Run(commands, false, false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
export default LocalDockerCloudRunner;
|
|
||||||
@@ -4,9 +4,34 @@ import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environm
|
|||||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||||
import { ProviderInterface } from '../provider-interface';
|
import { ProviderInterface } from '../provider-interface';
|
||||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||||
|
import { ProviderResource } from '../provider-resource';
|
||||||
|
import { ProviderWorkflow } from '../provider-workflow';
|
||||||
|
|
||||||
class LocalCloudRunner implements ProviderInterface {
|
class LocalCloudRunner implements ProviderInterface {
|
||||||
cleanup(
|
listResources(): Promise<ProviderResource[]> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
listWorkflow(): Promise<ProviderWorkflow[]> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
watchWorkflow(): Promise<string> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
garbageCollect(
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
filter: string,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
previewOnly: boolean,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
olderThan: Number,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
fullCache: boolean,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
baseDependencies: boolean,
|
||||||
|
): Promise<string> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
cleanupWorkflow(
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
@@ -16,7 +41,7 @@ class LocalCloudRunner implements ProviderInterface {
|
|||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
) {}
|
) {}
|
||||||
public setup(
|
public setupWorkflow(
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
@@ -26,7 +51,7 @@ class LocalCloudRunner implements ProviderInterface {
|
|||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
) {}
|
) {}
|
||||||
public async runTask(
|
public async runTaskInWorkflow(
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
image: string,
|
image: string,
|
||||||
commands: string,
|
commands: string,
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
import BuildParameters from '../../build-parameters';
|
import BuildParameters from '../../build-parameters';
|
||||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||||
|
import { ProviderResource } from './provider-resource';
|
||||||
|
import { ProviderWorkflow } from './provider-workflow';
|
||||||
|
|
||||||
export interface ProviderInterface {
|
export interface ProviderInterface {
|
||||||
cleanup(
|
cleanupWorkflow(
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
@@ -13,7 +15,7 @@ export interface ProviderInterface {
|
|||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
);
|
);
|
||||||
setup(
|
setupWorkflow(
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
@@ -23,7 +25,7 @@ export interface ProviderInterface {
|
|||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
);
|
);
|
||||||
runTask(
|
runTaskInWorkflow(
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
@@ -39,4 +41,19 @@ export interface ProviderInterface {
|
|||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
secrets: CloudRunnerSecret[],
|
secrets: CloudRunnerSecret[],
|
||||||
): Promise<string>;
|
): Promise<string>;
|
||||||
|
garbageCollect(
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
filter: string,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
previewOnly: boolean,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
olderThan: Number,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
fullCache: boolean,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
baseDependencies: boolean,
|
||||||
|
): Promise<string>;
|
||||||
|
listResources(): Promise<ProviderResource[]>;
|
||||||
|
listWorkflow(): Promise<ProviderWorkflow[]>;
|
||||||
|
watchWorkflow(): Promise<string>;
|
||||||
}
|
}
|
||||||
|
|||||||
3
src/model/cloud-runner/providers/provider-resource.ts
Normal file
3
src/model/cloud-runner/providers/provider-resource.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export class ProviderResource {
|
||||||
|
public Name!: string;
|
||||||
|
}
|
||||||
3
src/model/cloud-runner/providers/provider-workflow.ts
Normal file
3
src/model/cloud-runner/providers/provider-workflow.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export class ProviderWorkflow {
|
||||||
|
public Name!: string;
|
||||||
|
}
|
||||||
@@ -3,9 +3,28 @@ import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environm
|
|||||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||||
import { ProviderInterface } from '../provider-interface';
|
import { ProviderInterface } from '../provider-interface';
|
||||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||||
|
import { ProviderResource } from '../provider-resource';
|
||||||
|
import { ProviderWorkflow } from '../provider-workflow';
|
||||||
|
|
||||||
class TestCloudRunner implements ProviderInterface {
|
class TestCloudRunner implements ProviderInterface {
|
||||||
cleanup(
|
listResources(): Promise<ProviderResource[]> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
listWorkflow(): Promise<ProviderWorkflow[]> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
watchWorkflow(): Promise<string> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
garbageCollect(
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
filter: string,
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
previewOnly: boolean,
|
||||||
|
): Promise<string> {
|
||||||
|
throw new Error('Method not implemented.');
|
||||||
|
}
|
||||||
|
cleanupWorkflow(
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
@@ -15,7 +34,7 @@ class TestCloudRunner implements ProviderInterface {
|
|||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
) {}
|
) {}
|
||||||
setup(
|
setupWorkflow(
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
@@ -25,7 +44,7 @@ class TestCloudRunner implements ProviderInterface {
|
|||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||||
) {}
|
) {}
|
||||||
public async runTask(
|
public async runTaskInWorkflow(
|
||||||
commands: string,
|
commands: string,
|
||||||
buildGuid: string,
|
buildGuid: string,
|
||||||
image: string,
|
image: string,
|
||||||
|
|||||||
@@ -46,40 +46,53 @@ export class Caching {
|
|||||||
public static async PushToCache(cacheFolder: string, sourceFolder: string, cacheArtifactName: string) {
|
public static async PushToCache(cacheFolder: string, sourceFolder: string, cacheArtifactName: string) {
|
||||||
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
||||||
const startPath = process.cwd();
|
const startPath = process.cwd();
|
||||||
|
let compressionSuffix = '';
|
||||||
|
if (CloudRunner.buildParameters.useLz4Compression === true) {
|
||||||
|
compressionSuffix = `.lz4`;
|
||||||
|
}
|
||||||
|
CloudRunnerLogger.log(`Compression: ${CloudRunner.buildParameters.useLz4Compression} ${compressionSuffix}`);
|
||||||
try {
|
try {
|
||||||
if (!(await fileExists(cacheFolder))) {
|
if (!(await fileExists(cacheFolder))) {
|
||||||
await CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
|
await CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||||
}
|
}
|
||||||
process.chdir(path.resolve(sourceFolder, '..'));
|
process.chdir(path.resolve(sourceFolder, '..'));
|
||||||
|
|
||||||
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
if (CloudRunner.buildParameters.cloudRunnerDebug) {
|
||||||
CloudRunnerLogger.log(
|
CloudRunnerLogger.log(
|
||||||
`Hashed cache folder ${await LfsHashing.hashAllFiles(sourceFolder)} ${sourceFolder} ${path.basename(
|
`Hashed cache folder ${await LfsHashing.hashAllFiles(sourceFolder)} ${sourceFolder} ${path.basename(
|
||||||
sourceFolder,
|
sourceFolder,
|
||||||
)}`,
|
)}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// eslint-disable-next-line func-style
|
const contents = await fs.promises.readdir(path.basename(sourceFolder));
|
||||||
const formatFunction = function (format: string) {
|
CloudRunnerLogger.log(
|
||||||
const arguments_ = Array.prototype.slice.call(
|
`There is ${contents.length} files/dir in the source folder ${path.basename(sourceFolder)}`,
|
||||||
[path.resolve(sourceFolder, '..'), cacheFolder, cacheArtifactName],
|
);
|
||||||
1,
|
|
||||||
);
|
|
||||||
|
|
||||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
if (CloudRunner.buildParameters.cloudRunnerDebug) {
|
||||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
// await CloudRunnerSystem.Run(`tree -L 2 ./..`);
|
||||||
});
|
// await CloudRunnerSystem.Run(`tree -L 2`);
|
||||||
};
|
|
||||||
await CloudRunnerSystem.Run(`tar -cf ${cacheArtifactName}.tar ${path.basename(sourceFolder)}`);
|
|
||||||
assert(await fileExists(`${cacheArtifactName}.tar`), 'cache archive exists');
|
|
||||||
assert(await fileExists(path.basename(sourceFolder)), 'source folder exists');
|
|
||||||
if (CloudRunner.buildParameters.cachePushOverrideCommand) {
|
|
||||||
await CloudRunnerSystem.Run(formatFunction(CloudRunner.buildParameters.cachePushOverrideCommand));
|
|
||||||
}
|
}
|
||||||
await CloudRunnerSystem.Run(`mv ${cacheArtifactName}.tar ${cacheFolder}`);
|
|
||||||
|
if (contents.length === 0) {
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`Did not push source folder to cache because it was empty ${path.basename(sourceFolder)}`,
|
||||||
|
);
|
||||||
|
process.chdir(`${startPath}`);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await CloudRunnerSystem.Run(
|
||||||
|
`tar -cf ${cacheArtifactName}.tar${compressionSuffix} ${path.basename(sourceFolder)}`,
|
||||||
|
);
|
||||||
|
await CloudRunnerSystem.Run(`du ${cacheArtifactName}.tar${compressionSuffix}`);
|
||||||
|
assert(await fileExists(`${cacheArtifactName}.tar${compressionSuffix}`), 'cache archive exists');
|
||||||
|
assert(await fileExists(path.basename(sourceFolder)), 'source folder exists');
|
||||||
|
await CloudRunnerSystem.Run(`mv ${cacheArtifactName}.tar${compressionSuffix} ${cacheFolder}`);
|
||||||
RemoteClientLogger.log(`moved cache entry ${cacheArtifactName} to ${cacheFolder}`);
|
RemoteClientLogger.log(`moved cache entry ${cacheArtifactName} to ${cacheFolder}`);
|
||||||
assert(
|
assert(
|
||||||
await fileExists(`${path.join(cacheFolder, cacheArtifactName)}.tar`),
|
await fileExists(`${path.join(cacheFolder, cacheArtifactName)}.tar${compressionSuffix}`),
|
||||||
'cache archive exists inside cache folder',
|
'cache archive exists inside cache folder',
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -90,8 +103,12 @@ export class Caching {
|
|||||||
}
|
}
|
||||||
public static async PullFromCache(cacheFolder: string, destinationFolder: string, cacheArtifactName: string = ``) {
|
public static async PullFromCache(cacheFolder: string, destinationFolder: string, cacheArtifactName: string = ``) {
|
||||||
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
||||||
|
let compressionSuffix = '';
|
||||||
|
if (CloudRunner.buildParameters.useLz4Compression === true) {
|
||||||
|
compressionSuffix = `.lz4`;
|
||||||
|
}
|
||||||
const startPath = process.cwd();
|
const startPath = process.cwd();
|
||||||
RemoteClientLogger.log(`Caching for ${path.basename(destinationFolder)}`);
|
RemoteClientLogger.log(`Caching for (lz4 ${compressionSuffix}) ${path.basename(destinationFolder)}`);
|
||||||
try {
|
try {
|
||||||
if (!(await fileExists(cacheFolder))) {
|
if (!(await fileExists(cacheFolder))) {
|
||||||
await fs.promises.mkdir(cacheFolder);
|
await fs.promises.mkdir(cacheFolder);
|
||||||
@@ -101,38 +118,26 @@ export class Caching {
|
|||||||
await fs.promises.mkdir(destinationFolder);
|
await fs.promises.mkdir(destinationFolder);
|
||||||
}
|
}
|
||||||
|
|
||||||
const latestInBranch = await (await CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .tar$ | head -1`))
|
const latestInBranch = await (
|
||||||
|
await CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .tar${compressionSuffix}$ | head -1`)
|
||||||
|
)
|
||||||
.replace(/\n/g, ``)
|
.replace(/\n/g, ``)
|
||||||
.replace('.tar', '');
|
.replace(`.tar${compressionSuffix}`, '');
|
||||||
|
|
||||||
process.chdir(cacheFolder);
|
process.chdir(cacheFolder);
|
||||||
|
|
||||||
const cacheSelection =
|
const cacheSelection =
|
||||||
cacheArtifactName !== `` && (await fileExists(`${cacheArtifactName}.tar`)) ? cacheArtifactName : latestInBranch;
|
cacheArtifactName !== `` && (await fileExists(`${cacheArtifactName}.tar${compressionSuffix}`))
|
||||||
|
? cacheArtifactName
|
||||||
|
: latestInBranch;
|
||||||
await CloudRunnerLogger.log(`cache key ${cacheArtifactName} selection ${cacheSelection}`);
|
await CloudRunnerLogger.log(`cache key ${cacheArtifactName} selection ${cacheSelection}`);
|
||||||
|
|
||||||
// eslint-disable-next-line func-style
|
if (await fileExists(`${cacheSelection}.tar${compressionSuffix}`)) {
|
||||||
const formatFunction = function (format: string) {
|
|
||||||
const arguments_ = Array.prototype.slice.call(
|
|
||||||
[path.resolve(destinationFolder, '..'), cacheFolder, cacheArtifactName],
|
|
||||||
1,
|
|
||||||
);
|
|
||||||
|
|
||||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
|
||||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
if (CloudRunner.buildParameters.cachePullOverrideCommand) {
|
|
||||||
await CloudRunnerSystem.Run(formatFunction(CloudRunner.buildParameters.cachePullOverrideCommand));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (await fileExists(`${cacheSelection}.tar`)) {
|
|
||||||
const resultsFolder = `results${CloudRunner.buildParameters.buildGuid}`;
|
const resultsFolder = `results${CloudRunner.buildParameters.buildGuid}`;
|
||||||
await CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
await CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
||||||
RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.tar`);
|
RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.tar${compressionSuffix}`);
|
||||||
const fullResultsFolder = path.join(cacheFolder, resultsFolder);
|
const fullResultsFolder = path.join(cacheFolder, resultsFolder);
|
||||||
await CloudRunnerSystem.Run(`tar -xf ${cacheSelection}.tar -C ${fullResultsFolder}`);
|
await CloudRunnerSystem.Run(`tar -xf ${cacheSelection}.tar${compressionSuffix} -C ${fullResultsFolder}`);
|
||||||
RemoteClientLogger.log(`cache item extracted to ${fullResultsFolder}`);
|
RemoteClientLogger.log(`cache item extracted to ${fullResultsFolder}`);
|
||||||
assert(await fileExists(fullResultsFolder), `cache extraction results folder exists`);
|
assert(await fileExists(fullResultsFolder), `cache extraction results folder exists`);
|
||||||
const destinationParentFolder = path.resolve(destinationFolder, '..');
|
const destinationParentFolder = path.resolve(destinationFolder, '..');
|
||||||
@@ -152,15 +157,18 @@ export class Caching {
|
|||||||
} else {
|
} else {
|
||||||
RemoteClientLogger.logWarning(`cache item ${cacheArtifactName} doesn't exist ${destinationFolder}`);
|
RemoteClientLogger.logWarning(`cache item ${cacheArtifactName} doesn't exist ${destinationFolder}`);
|
||||||
if (cacheSelection !== ``) {
|
if (cacheSelection !== ``) {
|
||||||
RemoteClientLogger.logWarning(`cache item ${cacheArtifactName}.tar doesn't exist ${destinationFolder}`);
|
RemoteClientLogger.logWarning(
|
||||||
|
`cache item ${cacheArtifactName}.tar${compressionSuffix} doesn't exist ${destinationFolder}`,
|
||||||
|
);
|
||||||
|
await CloudRunnerSystem.Run(`tree ${cacheFolder}`);
|
||||||
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
process.chdir(`${startPath}`);
|
process.chdir(startPath);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
process.chdir(`${startPath}`);
|
process.chdir(startPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async handleCachePurging() {
|
public static async handleCachePurging() {
|
||||||
|
|||||||
@@ -9,34 +9,43 @@ import { assert } from 'console';
|
|||||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||||
import { CliFunction } from '../../cli/cli-functions-repository';
|
import { CliFunction } from '../../cli/cli-functions-repository';
|
||||||
import { CloudRunnerSystem } from '../services/cloud-runner-system';
|
import { CloudRunnerSystem } from '../services/cloud-runner-system';
|
||||||
|
import YAML from 'yaml';
|
||||||
|
|
||||||
export class RemoteClient {
|
export class RemoteClient {
|
||||||
public static async bootstrapRepository() {
|
public static async bootstrapRepository() {
|
||||||
try {
|
try {
|
||||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute}`);
|
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute)}`);
|
||||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.repoPathAbsolute}`);
|
await CloudRunnerSystem.Run(
|
||||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.cacheFolderFull}`);
|
`mkdir -p ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.cacheFolderForCacheKeyFull)}`,
|
||||||
process.chdir(CloudRunnerFolders.repoPathAbsolute);
|
);
|
||||||
|
process.chdir(CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute));
|
||||||
await RemoteClient.cloneRepoWithoutLFSFiles();
|
await RemoteClient.cloneRepoWithoutLFSFiles();
|
||||||
await RemoteClient.sizeOfFolder('repo before lfs cache pull', CloudRunnerFolders.repoPathAbsolute);
|
RemoteClient.replaceLargePackageReferencesWithSharedReferences();
|
||||||
|
await RemoteClient.sizeOfFolder(
|
||||||
|
'repo before lfs cache pull',
|
||||||
|
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute),
|
||||||
|
);
|
||||||
const lfsHashes = await LfsHashing.createLFSHashFiles();
|
const lfsHashes = await LfsHashing.createLFSHashFiles();
|
||||||
if (fs.existsSync(CloudRunnerFolders.libraryFolderAbsolute)) {
|
if (fs.existsSync(CloudRunnerFolders.libraryFolderAbsolute)) {
|
||||||
RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
||||||
}
|
}
|
||||||
await Caching.PullFromCache(
|
await Caching.PullFromCache(
|
||||||
CloudRunnerFolders.lfsCacheFolderFull,
|
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.lfsCacheFolderFull),
|
||||||
CloudRunnerFolders.lfsFolderAbsolute,
|
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.lfsFolderAbsolute),
|
||||||
`${lfsHashes.lfsGuidSum}`,
|
`${lfsHashes.lfsGuidSum}`,
|
||||||
);
|
);
|
||||||
await RemoteClient.sizeOfFolder('repo after lfs cache pull', CloudRunnerFolders.repoPathAbsolute);
|
await RemoteClient.sizeOfFolder('repo after lfs cache pull', CloudRunnerFolders.repoPathAbsolute);
|
||||||
await RemoteClient.pullLatestLFS();
|
await RemoteClient.pullLatestLFS();
|
||||||
await RemoteClient.sizeOfFolder('repo before lfs git pull', CloudRunnerFolders.repoPathAbsolute);
|
await RemoteClient.sizeOfFolder('repo before lfs git pull', CloudRunnerFolders.repoPathAbsolute);
|
||||||
await Caching.PushToCache(
|
await Caching.PushToCache(
|
||||||
CloudRunnerFolders.lfsCacheFolderFull,
|
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.lfsCacheFolderFull),
|
||||||
CloudRunnerFolders.lfsFolderAbsolute,
|
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.lfsFolderAbsolute),
|
||||||
`${lfsHashes.lfsGuidSum}`,
|
`${lfsHashes.lfsGuidSum}`,
|
||||||
);
|
);
|
||||||
await Caching.PullFromCache(CloudRunnerFolders.libraryCacheFolderFull, CloudRunnerFolders.libraryFolderAbsolute);
|
await Caching.PullFromCache(
|
||||||
|
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.libraryCacheFolderFull),
|
||||||
|
CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.libraryFolderAbsolute),
|
||||||
|
);
|
||||||
await RemoteClient.sizeOfFolder('repo after library cache pull', CloudRunnerFolders.repoPathAbsolute);
|
await RemoteClient.sizeOfFolder('repo after library cache pull', CloudRunnerFolders.repoPathAbsolute);
|
||||||
await Caching.handleCachePurging();
|
await Caching.handleCachePurging();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -45,37 +54,69 @@ export class RemoteClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static async sizeOfFolder(message: string, folder: string) {
|
private static async sizeOfFolder(message: string, folder: string) {
|
||||||
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
if (CloudRunner.buildParameters.cloudRunnerDebug) {
|
||||||
CloudRunnerLogger.log(`Size of ${message}`);
|
CloudRunnerLogger.log(`Size of ${message}`);
|
||||||
await CloudRunnerSystem.Run(`du -sh ${folder}`);
|
await CloudRunnerSystem.Run(`du -sh ${folder}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async cloneRepoWithoutLFSFiles() {
|
private static async cloneRepoWithoutLFSFiles() {
|
||||||
|
process.chdir(`${CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute}`);
|
||||||
|
|
||||||
|
if (
|
||||||
|
CloudRunner.buildParameters.retainWorkspace &&
|
||||||
|
fs.existsSync(path.join(CloudRunnerFolders.repoPathAbsolute, `.git`))
|
||||||
|
) {
|
||||||
|
process.chdir(CloudRunnerFolders.repoPathAbsolute);
|
||||||
|
RemoteClientLogger.log(
|
||||||
|
`${CloudRunnerFolders.repoPathAbsolute} repo exists - skipping clone - retained workspace mode ${CloudRunner.buildParameters.retainWorkspace}`,
|
||||||
|
);
|
||||||
|
await CloudRunnerSystem.Run(`git fetch && git reset --hard ${CloudRunner.buildParameters.gitSha}`);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fs.existsSync(CloudRunnerFolders.repoPathAbsolute)) {
|
||||||
|
RemoteClientLogger.log(`${CloudRunnerFolders.repoPathAbsolute} repo exists cleaning up`);
|
||||||
|
await CloudRunnerSystem.Run(`rm -r ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute)}`);
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
process.chdir(`${CloudRunnerFolders.repoPathAbsolute}`);
|
|
||||||
RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
||||||
await CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
|
await CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
|
||||||
RemoteClientLogger.log(`Cloning the repository being built:`);
|
RemoteClientLogger.log(`Cloning the repository being built:`);
|
||||||
await CloudRunnerSystem.Run(`git config --global filter.lfs.smudge "git-lfs smudge --skip -- %f"`);
|
await CloudRunnerSystem.Run(`git config --global filter.lfs.smudge "git-lfs smudge --skip -- %f"`);
|
||||||
await CloudRunnerSystem.Run(`git config --global filter.lfs.process "git-lfs filter-process --skip"`);
|
await CloudRunnerSystem.Run(`git config --global filter.lfs.process "git-lfs filter-process --skip"`);
|
||||||
await CloudRunnerSystem.Run(
|
await CloudRunnerSystem.Run(
|
||||||
`git clone -q ${CloudRunnerFolders.targetBuildRepoUrl} ${path.resolve(
|
`git clone -q ${CloudRunnerFolders.targetBuildRepoUrl} ${path.basename(CloudRunnerFolders.repoPathAbsolute)}`,
|
||||||
`..`,
|
|
||||||
path.basename(CloudRunnerFolders.repoPathAbsolute),
|
|
||||||
)}`,
|
|
||||||
);
|
);
|
||||||
|
process.chdir(CloudRunnerFolders.repoPathAbsolute);
|
||||||
await CloudRunnerSystem.Run(`git lfs install`);
|
await CloudRunnerSystem.Run(`git lfs install`);
|
||||||
assert(fs.existsSync(`.git`), 'git folder exists');
|
assert(fs.existsSync(`.git`), 'git folder exists');
|
||||||
RemoteClientLogger.log(`${CloudRunner.buildParameters.branch}`);
|
RemoteClientLogger.log(`${CloudRunner.buildParameters.branch}`);
|
||||||
await CloudRunnerSystem.Run(`git checkout ${CloudRunner.buildParameters.branch}`);
|
await CloudRunnerSystem.Run(`git checkout ${CloudRunner.buildParameters.branch}`);
|
||||||
|
await CloudRunnerSystem.Run(`git checkout ${CloudRunner.buildParameters.gitSha}`);
|
||||||
assert(fs.existsSync(path.join(`.git`, `lfs`)), 'LFS folder should not exist before caching');
|
assert(fs.existsSync(path.join(`.git`, `lfs`)), 'LFS folder should not exist before caching');
|
||||||
RemoteClientLogger.log(`Checked out ${process.env.GITHUB_SHA}`);
|
RemoteClientLogger.log(`Checked out ${CloudRunner.buildParameters.branch}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
await CloudRunnerSystem.Run(`tree -L 2 ${CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute}`);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static replaceLargePackageReferencesWithSharedReferences() {
|
||||||
|
if (CloudRunner.buildParameters.useSharedLargePackages) {
|
||||||
|
const filePath = path.join(CloudRunnerFolders.projectPathAbsolute, `Packages/manifest.json`);
|
||||||
|
let manifest = fs.readFileSync(filePath, 'utf8');
|
||||||
|
manifest = manifest.replace(/LargeContent/g, '../../../LargeContent');
|
||||||
|
fs.writeFileSync(filePath, manifest);
|
||||||
|
if (CloudRunner.buildParameters.cloudRunnerDebug) {
|
||||||
|
CloudRunnerLogger.log(`Package Manifest`);
|
||||||
|
CloudRunnerLogger.log(manifest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static async pullLatestLFS() {
|
private static async pullLatestLFS() {
|
||||||
process.chdir(CloudRunnerFolders.repoPathAbsolute);
|
process.chdir(CloudRunnerFolders.repoPathAbsolute);
|
||||||
await CloudRunnerSystem.Run(`git config --global filter.lfs.smudge "git-lfs smudge -- %f"`);
|
await CloudRunnerSystem.Run(`git config --global filter.lfs.smudge "git-lfs smudge -- %f"`);
|
||||||
@@ -85,13 +126,36 @@ export class RemoteClient {
|
|||||||
assert(fs.existsSync(CloudRunnerFolders.lfsFolderAbsolute));
|
assert(fs.existsSync(CloudRunnerFolders.lfsFolderAbsolute));
|
||||||
}
|
}
|
||||||
|
|
||||||
@CliFunction(`remote-cli`, `sets up a repository, usually before a game-ci build`)
|
@CliFunction(`remote-cli-pre-build`, `sets up a repository, usually before a game-ci build`)
|
||||||
static async runRemoteClientJob() {
|
static async runRemoteClientJob() {
|
||||||
const buildParameter = JSON.parse(process.env.BUILD_PARAMETERS || '{}');
|
// await CloudRunnerSystem.Run(`tree -L 2 ${CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute}`);
|
||||||
RemoteClientLogger.log(`Build Params:
|
RemoteClient.handleRetainedWorkspace();
|
||||||
${JSON.stringify(buildParameter, undefined, 4)}
|
|
||||||
`);
|
// await CloudRunnerSystem.Run(`tree -L 2 ${CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute}`);
|
||||||
CloudRunner.buildParameters = buildParameter;
|
|
||||||
await RemoteClient.bootstrapRepository();
|
await RemoteClient.bootstrapRepository();
|
||||||
|
|
||||||
|
// await CloudRunnerSystem.Run(`tree -L 2 ${CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute}`);
|
||||||
|
await RemoteClient.runCustomHookFiles(`before-build`);
|
||||||
|
|
||||||
|
// await CloudRunnerSystem.Run(`tree -L 2 ${CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute}`);
|
||||||
|
}
|
||||||
|
static async runCustomHookFiles(hookLifecycle: string) {
|
||||||
|
RemoteClientLogger.log(`RunCustomHookFiles: ${hookLifecycle}`);
|
||||||
|
const gameCiCustomHooksPath = path.join(CloudRunnerFolders.repoPathAbsolute, `game-ci`, `hooks`);
|
||||||
|
const files = fs.readdirSync(gameCiCustomHooksPath);
|
||||||
|
for (const file of files) {
|
||||||
|
const fileContents = fs.readFileSync(path.join(gameCiCustomHooksPath, file), `utf8`);
|
||||||
|
const fileContentsObject = YAML.parse(fileContents.toString());
|
||||||
|
if (fileContentsObject.hook === hookLifecycle) {
|
||||||
|
RemoteClientLogger.log(`Active Hook File ${file} \n \n file contents: \n ${fileContents}`);
|
||||||
|
await CloudRunnerSystem.Run(fileContentsObject.commands);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
static handleRetainedWorkspace() {
|
||||||
|
if (!CloudRunner.buildParameters.retainWorkspace) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
RemoteClientLogger.log(`Retained Workspace: ${CloudRunner.lockedWorkspace}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,43 +0,0 @@
|
|||||||
import { BuildParameters } from '../..';
|
|
||||||
import YAML from 'yaml';
|
|
||||||
import CloudRunnerSecret from './cloud-runner-secret';
|
|
||||||
import CloudRunner from '../cloud-runner';
|
|
||||||
|
|
||||||
export class CloudRunnerBuildCommandProcessor {
|
|
||||||
public static ProcessCommands(commands: string, buildParameters: BuildParameters): string {
|
|
||||||
const hooks = CloudRunnerBuildCommandProcessor.getHooks(buildParameters.customJobHooks).filter((x) =>
|
|
||||||
x.step.includes(`all`),
|
|
||||||
);
|
|
||||||
|
|
||||||
return `echo "---"
|
|
||||||
echo "start cloud runner init"
|
|
||||||
${CloudRunner.buildParameters.cloudRunnerIntegrationTests ? '' : '#'} printenv
|
|
||||||
echo "start of cloud runner job"
|
|
||||||
${hooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
|
||||||
${commands}
|
|
||||||
${hooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
|
||||||
echo "end of cloud runner job"
|
|
||||||
echo "---${buildParameters.logId}"`;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static getHooks(customJobHooks): Hook[] {
|
|
||||||
const experimentHooks = customJobHooks;
|
|
||||||
let output = new Array<Hook>();
|
|
||||||
if (experimentHooks && experimentHooks !== '') {
|
|
||||||
try {
|
|
||||||
output = YAML.parse(experimentHooks);
|
|
||||||
} catch (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return output.filter((x) => x.step !== undefined && x.hook !== undefined && x.hook.length > 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
export class Hook {
|
|
||||||
public commands;
|
|
||||||
public secrets: CloudRunnerSecret[] = new Array<CloudRunnerSecret>();
|
|
||||||
public name;
|
|
||||||
public hook!: string[];
|
|
||||||
public step!: string[];
|
|
||||||
}
|
|
||||||
118
src/model/cloud-runner/services/cloud-runner-custom-hooks.ts
Normal file
118
src/model/cloud-runner/services/cloud-runner-custom-hooks.ts
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
import { BuildParameters, Input } from '../..';
|
||||||
|
import YAML from 'yaml';
|
||||||
|
import CloudRunnerSecret from './cloud-runner-secret';
|
||||||
|
import { RemoteClientLogger } from '../remote-client/remote-client-logger';
|
||||||
|
import path from 'path';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
|
||||||
|
// import CloudRunnerLogger from './cloud-runner-logger';
|
||||||
|
|
||||||
|
export class CloudRunnerCustomHooks {
|
||||||
|
// TODO also accept hooks as yaml files in the repo
|
||||||
|
public static ApplyHooksToCommands(commands: string, buildParameters: BuildParameters): string {
|
||||||
|
const hooks = CloudRunnerCustomHooks.getHooks(buildParameters.customJobHooks).filter((x) => x.step.includes(`all`));
|
||||||
|
|
||||||
|
return `echo "---"
|
||||||
|
echo "start cloud runner init"
|
||||||
|
${CloudRunnerOptions.cloudRunnerDebugEnv ? `printenv` : `#`}
|
||||||
|
echo "start of cloud runner job"
|
||||||
|
${hooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||||
|
${commands}
|
||||||
|
${hooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||||
|
echo "end of cloud runner job"
|
||||||
|
echo "---${buildParameters.logId}"`;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static getHooks(customJobHooks): Hook[] {
|
||||||
|
const experimentHooks = customJobHooks;
|
||||||
|
let output = new Array<Hook>();
|
||||||
|
if (experimentHooks && experimentHooks !== '') {
|
||||||
|
try {
|
||||||
|
output = YAML.parse(experimentHooks);
|
||||||
|
} catch (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return output.filter((x) => x.step !== undefined && x.hook !== undefined && x.hook.length > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
static GetCustomHooksFromFiles(hookLifecycle: string): Hook[] {
|
||||||
|
const results: Hook[] = [];
|
||||||
|
RemoteClientLogger.log(`GetCustomStepFiles: ${hookLifecycle}`);
|
||||||
|
try {
|
||||||
|
const gameCiCustomStepsPath = path.join(process.cwd(), `game-ci`, `hooks`);
|
||||||
|
const files = fs.readdirSync(gameCiCustomStepsPath);
|
||||||
|
for (const file of files) {
|
||||||
|
if (!CloudRunnerOptions.customHookFiles.includes(file.replace(`.yaml`, ``))) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const fileContents = fs.readFileSync(path.join(gameCiCustomStepsPath, file), `utf8`);
|
||||||
|
const fileContentsObject = CloudRunnerCustomHooks.ParseHooks(fileContents)[0];
|
||||||
|
if (fileContentsObject.hook.includes(hookLifecycle)) {
|
||||||
|
results.push(fileContentsObject);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
RemoteClientLogger.log(`Failed Getting: ${hookLifecycle} \n ${JSON.stringify(error, undefined, 4)}`);
|
||||||
|
}
|
||||||
|
RemoteClientLogger.log(`Active Steps From Files: \n ${JSON.stringify(results, undefined, 4)}`);
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ConvertYamlSecrets(object) {
|
||||||
|
if (object.secrets === undefined) {
|
||||||
|
object.secrets = [];
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
object.secrets = object.secrets.map((x) => {
|
||||||
|
return {
|
||||||
|
ParameterKey: x.name,
|
||||||
|
EnvironmentVariable: Input.ToEnvVarFormat(x.name),
|
||||||
|
ParameterValue: x.value,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ParseHooks(steps: string): Hook[] {
|
||||||
|
if (steps === '') {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// if (CloudRunner.buildParameters?.cloudRunnerIntegrationTests) {
|
||||||
|
|
||||||
|
// CloudRunnerLogger.log(`Parsing build hooks: ${steps}`);
|
||||||
|
|
||||||
|
// }
|
||||||
|
const isArray = steps.replace(/\s/g, ``)[0] === `-`;
|
||||||
|
const object: Hook[] = isArray ? YAML.parse(steps) : [YAML.parse(steps)];
|
||||||
|
for (const hook of object) {
|
||||||
|
CloudRunnerCustomHooks.ConvertYamlSecrets(hook);
|
||||||
|
if (hook.secrets === undefined) {
|
||||||
|
hook.secrets = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (object === undefined) {
|
||||||
|
throw new Error(`Failed to parse ${steps}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return object;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static getSecrets(hooks) {
|
||||||
|
const secrets = hooks.map((x) => x.secrets).filter((x) => x !== undefined && x.length > 0);
|
||||||
|
|
||||||
|
// eslint-disable-next-line unicorn/no-array-reduce
|
||||||
|
return secrets.length > 0 ? secrets.reduce((x, y) => [...x, ...y]) : [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export class Hook {
|
||||||
|
public commands;
|
||||||
|
public secrets: CloudRunnerSecret[] = new Array<CloudRunnerSecret>();
|
||||||
|
public name;
|
||||||
|
public hook!: string[];
|
||||||
|
public step!: string[];
|
||||||
|
}
|
||||||
269
src/model/cloud-runner/services/cloud-runner-custom-steps.ts
Normal file
269
src/model/cloud-runner/services/cloud-runner-custom-steps.ts
Normal file
@@ -0,0 +1,269 @@
|
|||||||
|
import YAML from 'yaml';
|
||||||
|
import CloudRunner from '../cloud-runner';
|
||||||
|
import * as core from '@actions/core';
|
||||||
|
import { CustomWorkflow } from '../workflows/custom-workflow';
|
||||||
|
import { RemoteClientLogger } from '../remote-client/remote-client-logger';
|
||||||
|
import path from 'path';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import Input from '../../input';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import CloudRunnerLogger from './cloud-runner-logger';
|
||||||
|
import { CustomStep } from './custom-step';
|
||||||
|
|
||||||
|
export class CloudRunnerCustomSteps {
|
||||||
|
static GetCustomStepsFromFiles(hookLifecycle: string): CustomStep[] {
|
||||||
|
const results: CustomStep[] = [];
|
||||||
|
RemoteClientLogger.log(
|
||||||
|
`GetCustomStepFiles: ${hookLifecycle} CustomStepFiles: ${CloudRunnerOptions.customStepFiles}`,
|
||||||
|
);
|
||||||
|
try {
|
||||||
|
const gameCiCustomStepsPath = path.join(process.cwd(), `game-ci`, `steps`);
|
||||||
|
const files = fs.readdirSync(gameCiCustomStepsPath);
|
||||||
|
for (const file of files) {
|
||||||
|
if (!CloudRunnerOptions.customStepFiles.includes(file.replace(`.yaml`, ``))) {
|
||||||
|
RemoteClientLogger.log(`Skipping CustomStepFile: ${file}`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const fileContents = fs.readFileSync(path.join(gameCiCustomStepsPath, file), `utf8`);
|
||||||
|
const fileContentsObject = CloudRunnerCustomSteps.ParseSteps(fileContents)[0];
|
||||||
|
if (fileContentsObject.hook === hookLifecycle) {
|
||||||
|
results.push(fileContentsObject);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
RemoteClientLogger.log(`Failed Getting: ${hookLifecycle} \n ${JSON.stringify(error, undefined, 4)}`);
|
||||||
|
}
|
||||||
|
RemoteClientLogger.log(`Active Steps From Files: \n ${JSON.stringify(results, undefined, 4)}`);
|
||||||
|
|
||||||
|
const builtInCustomSteps: CustomStep[] = CloudRunnerCustomSteps.ParseSteps(
|
||||||
|
`- name: aws-s3-upload-build
|
||||||
|
image: amazon/aws-cli
|
||||||
|
hook: after
|
||||||
|
commands: |
|
||||||
|
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||||
|
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||||
|
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||||
|
aws s3 cp /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${
|
||||||
|
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
|
||||||
|
} s3://${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/$CACHE_KEY/build/build-$BUILD_GUID.tar${
|
||||||
|
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
|
||||||
|
}
|
||||||
|
rm /data/cache/$CACHE_KEY/build/build-${CloudRunner.buildParameters.buildGuid}.tar${
|
||||||
|
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
|
||||||
|
}
|
||||||
|
secrets:
|
||||||
|
- name: awsAccessKeyId
|
||||||
|
value: ${process.env.AWS_ACCESS_KEY_ID || ``}
|
||||||
|
- name: awsSecretAccessKey
|
||||||
|
value: ${process.env.AWS_SECRET_ACCESS_KEY || ``}
|
||||||
|
- name: awsDefaultRegion
|
||||||
|
value: ${process.env.AWS_REGION || ``}
|
||||||
|
- name: aws-s3-pull-build
|
||||||
|
image: amazon/aws-cli
|
||||||
|
commands: |
|
||||||
|
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||||
|
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||||
|
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||||
|
aws s3 ls ${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/ || true
|
||||||
|
aws s3 ls ${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/$CACHE_KEY/build || true
|
||||||
|
aws s3 cp s3://${
|
||||||
|
CloudRunner.buildParameters.awsBaseStackName
|
||||||
|
}/cloud-runner-cache/$CACHE_KEY/build/build-$BUILD_GUID_TARGET.tar${
|
||||||
|
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
|
||||||
|
} /data/cache/$CACHE_KEY/build/build-$BUILD_GUID_TARGET.tar${
|
||||||
|
CloudRunner.buildParameters.useLz4Compression ? '.lz4' : ''
|
||||||
|
}
|
||||||
|
secrets:
|
||||||
|
- name: awsAccessKeyId
|
||||||
|
- name: awsSecretAccessKey
|
||||||
|
- name: awsDefaultRegion
|
||||||
|
- name: BUILD_GUID_TARGET
|
||||||
|
- name: steam-deploy-client
|
||||||
|
image: steamcmd/steamcmd
|
||||||
|
commands: |
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y curl tar coreutils git tree > /dev/null
|
||||||
|
curl -s https://gist.githubusercontent.com/frostebite/1d56f5505b36b403b64193b7a6e54cdc/raw/fa6639ed4ef750c4268ea319d63aa80f52712ffb/deploy-client-steam.sh | bash
|
||||||
|
secrets:
|
||||||
|
- name: STEAM_USERNAME
|
||||||
|
- name: STEAM_PASSWORD
|
||||||
|
- name: STEAM_APPID
|
||||||
|
- name: STEAM_SSFN_FILE_NAME
|
||||||
|
- name: STEAM_SSFN_FILE_CONTENTS
|
||||||
|
- name: STEAM_CONFIG_VDF_1
|
||||||
|
- name: STEAM_CONFIG_VDF_2
|
||||||
|
- name: STEAM_CONFIG_VDF_3
|
||||||
|
- name: STEAM_CONFIG_VDF_4
|
||||||
|
- name: BUILD_GUID_TARGET
|
||||||
|
- name: RELEASE_BRANCH
|
||||||
|
- name: steam-deploy-project
|
||||||
|
image: steamcmd/steamcmd
|
||||||
|
commands: |
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y curl tar coreutils git tree > /dev/null
|
||||||
|
curl -s https://gist.githubusercontent.com/frostebite/969da6a41002a0e901174124b643709f/raw/02403e53fb292026cba81ddcf4ff35fc1eba111d/steam-deploy-project.sh | bash
|
||||||
|
secrets:
|
||||||
|
- name: STEAM_USERNAME
|
||||||
|
- name: STEAM_PASSWORD
|
||||||
|
- name: STEAM_APPID
|
||||||
|
- name: STEAM_SSFN_FILE_NAME
|
||||||
|
- name: STEAM_SSFN_FILE_CONTENTS
|
||||||
|
- name: STEAM_CONFIG_VDF_1
|
||||||
|
- name: STEAM_CONFIG_VDF_2
|
||||||
|
- name: STEAM_CONFIG_VDF_3
|
||||||
|
- name: STEAM_CONFIG_VDF_4
|
||||||
|
- name: BUILD_GUID_2
|
||||||
|
- name: RELEASE_BRANCH
|
||||||
|
- name: aws-s3-upload-cache
|
||||||
|
image: amazon/aws-cli
|
||||||
|
hook: after
|
||||||
|
commands: |
|
||||||
|
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||||
|
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||||
|
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||||
|
aws s3 cp --recursive /data/cache/$CACHE_KEY/lfs s3://${
|
||||||
|
CloudRunner.buildParameters.awsBaseStackName
|
||||||
|
}/cloud-runner-cache/$CACHE_KEY/lfs
|
||||||
|
rm -r /data/cache/$CACHE_KEY/lfs
|
||||||
|
aws s3 cp --recursive /data/cache/$CACHE_KEY/Library s3://${
|
||||||
|
CloudRunner.buildParameters.awsBaseStackName
|
||||||
|
}/cloud-runner-cache/$CACHE_KEY/Library
|
||||||
|
rm -r /data/cache/$CACHE_KEY/Library
|
||||||
|
secrets:
|
||||||
|
- name: awsAccessKeyId
|
||||||
|
value: ${process.env.AWS_ACCESS_KEY_ID || ``}
|
||||||
|
- name: awsSecretAccessKey
|
||||||
|
value: ${process.env.AWS_SECRET_ACCESS_KEY || ``}
|
||||||
|
- name: awsDefaultRegion
|
||||||
|
value: ${process.env.AWS_REGION || ``}
|
||||||
|
- name: aws-s3-pull-cache
|
||||||
|
image: amazon/aws-cli
|
||||||
|
hook: before
|
||||||
|
commands: |
|
||||||
|
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||||
|
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||||
|
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||||
|
aws s3 ls ${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/ || true
|
||||||
|
aws s3 ls ${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/$CACHE_KEY/ || true
|
||||||
|
BUCKET1="${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/$CACHE_KEY/Library/"
|
||||||
|
aws s3 ls $BUCKET1 || true
|
||||||
|
OBJECT1="$(aws s3 ls $BUCKET1 | sort | tail -n 1 | awk '{print $4}' || '')"
|
||||||
|
aws s3 cp s3://$BUCKET1$OBJECT1 /data/cache/$CACHE_KEY/Library/ || true
|
||||||
|
BUCKET2="${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/$CACHE_KEY/lfs/"
|
||||||
|
aws s3 ls $BUCKET2 || true
|
||||||
|
OBJECT2="$(aws s3 ls $BUCKET2 | sort | tail -n 1 | awk '{print $4}' || '')"
|
||||||
|
aws s3 cp s3://$BUCKET2$OBJECT2 /data/cache/$CACHE_KEY/lfs/ || true
|
||||||
|
secrets:
|
||||||
|
- name: awsAccessKeyId
|
||||||
|
value: ${process.env.AWS_ACCESS_KEY_ID || ``}
|
||||||
|
- name: awsSecretAccessKey
|
||||||
|
value: ${process.env.AWS_SECRET_ACCESS_KEY || ``}
|
||||||
|
- name: awsDefaultRegion
|
||||||
|
value: ${process.env.AWS_REGION || ``}
|
||||||
|
- name: debug-cache
|
||||||
|
image: ubuntu
|
||||||
|
hook: after
|
||||||
|
commands: |
|
||||||
|
apt-get update > /dev/null
|
||||||
|
${CloudRunnerOptions.cloudRunnerDebugTree ? `apt-get install -y tree > /dev/null` : `#`}
|
||||||
|
${CloudRunnerOptions.cloudRunnerDebugTree ? `tree -L 3 /data/cache` : `#`}
|
||||||
|
secrets:
|
||||||
|
- name: awsAccessKeyId
|
||||||
|
value: ${process.env.AWS_ACCESS_KEY_ID || ``}
|
||||||
|
- name: awsSecretAccessKey
|
||||||
|
value: ${process.env.AWS_SECRET_ACCESS_KEY || ``}
|
||||||
|
- name: awsDefaultRegion
|
||||||
|
value: ${process.env.AWS_REGION || ``}`,
|
||||||
|
).filter((x) => CloudRunnerOptions.customStepFiles.includes(x.name) && x.hook === hookLifecycle);
|
||||||
|
if (builtInCustomSteps.length > 0) {
|
||||||
|
results.push(...builtInCustomSteps);
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ConvertYamlSecrets(object) {
|
||||||
|
if (object.secrets === undefined) {
|
||||||
|
object.secrets = [];
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
object.secrets = object.secrets.map((x) => {
|
||||||
|
return {
|
||||||
|
ParameterKey: x.name,
|
||||||
|
EnvironmentVariable: Input.ToEnvVarFormat(x.name),
|
||||||
|
ParameterValue: x.value,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ParseSteps(steps: string): CustomStep[] {
|
||||||
|
if (steps === '') {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
const isArray = steps.replace(/\s/g, ``)[0] === `-`;
|
||||||
|
const object: CustomStep[] = isArray ? YAML.parse(steps) : [YAML.parse(steps)];
|
||||||
|
for (const step of object) {
|
||||||
|
CloudRunnerCustomSteps.ConvertYamlSecrets(step);
|
||||||
|
if (step.secrets === undefined) {
|
||||||
|
step.secrets = [];
|
||||||
|
} else {
|
||||||
|
for (const secret of step.secrets) {
|
||||||
|
if (secret.ParameterValue === undefined && process.env[secret.EnvironmentVariable] !== undefined) {
|
||||||
|
if (CloudRunner.buildParameters?.cloudRunnerDebug) {
|
||||||
|
CloudRunnerLogger.log(`Injecting custom step ${step.name} from env var ${secret.ParameterKey}`);
|
||||||
|
}
|
||||||
|
secret.ParameterValue = process.env[secret.ParameterKey] || ``;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (step.image === undefined) {
|
||||||
|
step.image = `ubuntu`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (object === undefined) {
|
||||||
|
throw new Error(`Failed to parse ${steps}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return object;
|
||||||
|
}
|
||||||
|
|
||||||
|
static async RunPostBuildSteps(cloudRunnerStepState) {
|
||||||
|
let output = ``;
|
||||||
|
const steps: CustomStep[] = [
|
||||||
|
...CloudRunnerCustomSteps.ParseSteps(CloudRunner.buildParameters.postBuildSteps),
|
||||||
|
...CloudRunnerCustomSteps.GetCustomStepsFromFiles(`after`),
|
||||||
|
];
|
||||||
|
|
||||||
|
if (steps.length > 0) {
|
||||||
|
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('post build steps');
|
||||||
|
output += await CustomWorkflow.runCustomJob(
|
||||||
|
steps,
|
||||||
|
cloudRunnerStepState.environment,
|
||||||
|
cloudRunnerStepState.secrets,
|
||||||
|
);
|
||||||
|
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||||
|
}
|
||||||
|
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
static async RunPreBuildSteps(cloudRunnerStepState) {
|
||||||
|
let output = ``;
|
||||||
|
const steps: CustomStep[] = [
|
||||||
|
...CloudRunnerCustomSteps.ParseSteps(CloudRunner.buildParameters.preBuildSteps),
|
||||||
|
...CloudRunnerCustomSteps.GetCustomStepsFromFiles(`before`),
|
||||||
|
];
|
||||||
|
|
||||||
|
if (steps.length > 0) {
|
||||||
|
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('pre build steps');
|
||||||
|
output += await CustomWorkflow.runCustomJob(
|
||||||
|
steps,
|
||||||
|
cloudRunnerStepState.environment,
|
||||||
|
cloudRunnerStepState.secrets,
|
||||||
|
);
|
||||||
|
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||||
|
}
|
||||||
|
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,16 +1,27 @@
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { CloudRunner } from '../..';
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import CloudRunner from './../cloud-runner';
|
||||||
|
|
||||||
export class CloudRunnerFolders {
|
export class CloudRunnerFolders {
|
||||||
public static readonly repositoryFolder = 'repo';
|
public static readonly repositoryFolder = 'repo';
|
||||||
|
|
||||||
|
public static ToLinuxFolder(folder: string) {
|
||||||
|
return folder.replace(/\\/g, `/`);
|
||||||
|
}
|
||||||
|
|
||||||
// Only the following paths that do not start a path.join with another "Full" suffixed property need to start with an absolute /
|
// Only the following paths that do not start a path.join with another "Full" suffixed property need to start with an absolute /
|
||||||
|
|
||||||
public static get uniqueCloudRunnerJobFolderAbsolute(): string {
|
public static get uniqueCloudRunnerJobFolderAbsolute(): string {
|
||||||
return path.join(`/`, CloudRunnerFolders.buildVolumeFolder, CloudRunner.buildParameters.buildGuid);
|
return CloudRunner.buildParameters && CloudRunner.buildParameters.retainWorkspace && CloudRunner.lockedWorkspace
|
||||||
|
? path.join(`/`, CloudRunnerFolders.buildVolumeFolder, CloudRunner.lockedWorkspace)
|
||||||
|
: path.join(`/`, CloudRunnerFolders.buildVolumeFolder, CloudRunner.buildParameters.buildGuid);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static get cacheFolderFull(): string {
|
public static get cacheFolderForAllFull(): string {
|
||||||
|
return path.join('/', CloudRunnerFolders.buildVolumeFolder, CloudRunnerFolders.cacheFolder);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static get cacheFolderForCacheKeyFull(): string {
|
||||||
return path.join(
|
return path.join(
|
||||||
'/',
|
'/',
|
||||||
CloudRunnerFolders.buildVolumeFolder,
|
CloudRunnerFolders.buildVolumeFolder,
|
||||||
@@ -20,7 +31,12 @@ export class CloudRunnerFolders {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static get builderPathAbsolute(): string {
|
public static get builderPathAbsolute(): string {
|
||||||
return path.join(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute, `builder`);
|
return path.join(
|
||||||
|
CloudRunnerOptions.useSharedBuilder
|
||||||
|
? `/${CloudRunnerFolders.buildVolumeFolder}`
|
||||||
|
: CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute,
|
||||||
|
`builder`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static get repoPathAbsolute(): string {
|
public static get repoPathAbsolute(): string {
|
||||||
@@ -48,11 +64,11 @@ export class CloudRunnerFolders {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static get lfsCacheFolderFull() {
|
public static get lfsCacheFolderFull() {
|
||||||
return path.join(CloudRunnerFolders.cacheFolderFull, `lfs`);
|
return path.join(CloudRunnerFolders.cacheFolderForCacheKeyFull, `lfs`);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static get libraryCacheFolderFull() {
|
public static get libraryCacheFolderFull() {
|
||||||
return path.join(CloudRunnerFolders.cacheFolderFull, `Library`);
|
return path.join(CloudRunnerFolders.cacheFolderForCacheKeyFull, `Library`);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static get unityBuilderRepoUrl(): string {
|
public static get unityBuilderRepoUrl(): string {
|
||||||
|
|||||||
@@ -0,0 +1,10 @@
|
|||||||
|
import Input from '../../input';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
|
||||||
|
class CloudRunnerOptionsReader {
|
||||||
|
static GetProperties() {
|
||||||
|
return [...Object.getOwnPropertyNames(Input), ...Object.getOwnPropertyNames(CloudRunnerOptions)];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default CloudRunnerOptionsReader;
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import Input from '../../input';
|
import Input from '../../input';
|
||||||
import { GenericInputReader } from '../../input-readers/generic-input-reader';
|
import { GenericInputReader } from '../../input-readers/generic-input-reader';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
|
||||||
const formatFunction = (value, arguments_) => {
|
const formatFunction = (value, arguments_) => {
|
||||||
for (const element of arguments_) {
|
for (const element of arguments_) {
|
||||||
@@ -12,6 +13,8 @@ const formatFunction = (value, arguments_) => {
|
|||||||
class CloudRunnerQueryOverride {
|
class CloudRunnerQueryOverride {
|
||||||
static queryOverrides: any;
|
static queryOverrides: any;
|
||||||
|
|
||||||
|
// TODO accept premade secret sources or custom secret source definition yamls
|
||||||
|
|
||||||
public static query(key, alternativeKey) {
|
public static query(key, alternativeKey) {
|
||||||
if (CloudRunnerQueryOverride.queryOverrides && CloudRunnerQueryOverride.queryOverrides[key] !== undefined) {
|
if (CloudRunnerQueryOverride.queryOverrides && CloudRunnerQueryOverride.queryOverrides[key] !== undefined) {
|
||||||
return CloudRunnerQueryOverride.queryOverrides[key];
|
return CloudRunnerQueryOverride.queryOverrides[key];
|
||||||
@@ -28,11 +31,11 @@ class CloudRunnerQueryOverride {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static shouldUseOverride(query) {
|
private static shouldUseOverride(query) {
|
||||||
if (Input.readInputOverrideCommand() !== '') {
|
if (CloudRunnerOptions.readInputOverrideCommand() !== '') {
|
||||||
if (Input.readInputFromOverrideList() !== '') {
|
if (CloudRunnerOptions.readInputFromOverrideList() !== '') {
|
||||||
const doesInclude =
|
const doesInclude =
|
||||||
Input.readInputFromOverrideList().split(',').includes(query) ||
|
CloudRunnerOptions.readInputFromOverrideList().split(',').includes(query) ||
|
||||||
Input.readInputFromOverrideList().split(',').includes(Input.ToEnvVarFormat(query));
|
CloudRunnerOptions.readInputFromOverrideList().split(',').includes(Input.ToEnvVarFormat(query));
|
||||||
|
|
||||||
return doesInclude ? true : false;
|
return doesInclude ? true : false;
|
||||||
} else {
|
} else {
|
||||||
@@ -46,11 +49,13 @@ class CloudRunnerQueryOverride {
|
|||||||
throw new Error(`Should not be trying to run override query on ${query}`);
|
throw new Error(`Should not be trying to run override query on ${query}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return await GenericInputReader.Run(formatFunction(Input.readInputOverrideCommand(), [{ key: 0, value: query }]));
|
return await GenericInputReader.Run(
|
||||||
|
formatFunction(CloudRunnerOptions.readInputOverrideCommand(), [{ key: 0, value: query }]),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async PopulateQueryOverrideInput() {
|
public static async PopulateQueryOverrideInput() {
|
||||||
const queries = Input.readInputFromOverrideList().split(',');
|
const queries = CloudRunnerOptions.readInputFromOverrideList().split(',');
|
||||||
CloudRunnerQueryOverride.queryOverrides = new Array();
|
CloudRunnerQueryOverride.queryOverrides = new Array();
|
||||||
for (const element of queries) {
|
for (const element of queries) {
|
||||||
if (CloudRunnerQueryOverride.shouldUseOverride(element)) {
|
if (CloudRunnerQueryOverride.shouldUseOverride(element)) {
|
||||||
|
|||||||
@@ -2,6 +2,20 @@ import { exec } from 'child_process';
|
|||||||
import { RemoteClientLogger } from '../remote-client/remote-client-logger';
|
import { RemoteClientLogger } from '../remote-client/remote-client-logger';
|
||||||
|
|
||||||
export class CloudRunnerSystem {
|
export class CloudRunnerSystem {
|
||||||
|
public static async RunAndReadLines(command: string): Promise<string[]> {
|
||||||
|
const result = await CloudRunnerSystem.Run(command, false, true);
|
||||||
|
|
||||||
|
return result
|
||||||
|
.split(`\n`)
|
||||||
|
.map((x) => x.replace(`\r`, ``))
|
||||||
|
.filter((x) => x !== ``)
|
||||||
|
.map((x) => {
|
||||||
|
const lineValues = x.split(` `);
|
||||||
|
|
||||||
|
return lineValues[lineValues.length - 1];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
public static async Run(command: string, suppressError = false, suppressLogs = false) {
|
public static async Run(command: string, suppressError = false, suppressLogs = false) {
|
||||||
for (const element of command.split(`\n`)) {
|
for (const element of command.split(`\n`)) {
|
||||||
if (!suppressLogs) {
|
if (!suppressLogs) {
|
||||||
|
|||||||
9
src/model/cloud-runner/services/custom-step.ts
Normal file
9
src/model/cloud-runner/services/custom-step.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
import CloudRunnerSecret from './cloud-runner-secret';
|
||||||
|
|
||||||
|
export class CustomStep {
|
||||||
|
public commands;
|
||||||
|
public secrets: CloudRunnerSecret[] = new Array<CloudRunnerSecret>();
|
||||||
|
public name;
|
||||||
|
public image: string = `ubuntu`;
|
||||||
|
public hook!: string;
|
||||||
|
}
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
import Input from '../../input';
|
|
||||||
import { CloudRunnerSystem } from './cloud-runner-system';
|
|
||||||
|
|
||||||
class DependencyOverrideService {
|
|
||||||
public static async CheckHealth() {
|
|
||||||
if (Input.checkDependencyHealthOverride) {
|
|
||||||
try {
|
|
||||||
await CloudRunnerSystem.Run(Input.checkDependencyHealthOverride);
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
public static async TryStartDependencies() {
|
|
||||||
if (Input.startDependenciesOverride) {
|
|
||||||
await CloudRunnerSystem.Run(Input.startDependenciesOverride);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
export default DependencyOverrideService;
|
|
||||||
@@ -2,6 +2,7 @@ import CloudRunnerLogger from './cloud-runner-logger';
|
|||||||
import * as core from '@actions/core';
|
import * as core from '@actions/core';
|
||||||
import CloudRunner from '../cloud-runner';
|
import CloudRunner from '../cloud-runner';
|
||||||
import { CloudRunnerStatics } from '../cloud-runner-statics';
|
import { CloudRunnerStatics } from '../cloud-runner-statics';
|
||||||
|
import GitHub from '../../github';
|
||||||
|
|
||||||
export class FollowLogStreamService {
|
export class FollowLogStreamService {
|
||||||
public static handleIteration(message, shouldReadLogs, shouldCleanup, output) {
|
public static handleIteration(message, shouldReadLogs, shouldCleanup, output) {
|
||||||
@@ -9,25 +10,27 @@ export class FollowLogStreamService {
|
|||||||
CloudRunnerLogger.log('End of log transmission received');
|
CloudRunnerLogger.log('End of log transmission received');
|
||||||
shouldReadLogs = false;
|
shouldReadLogs = false;
|
||||||
} else if (message.includes('Rebuilding Library because the asset database could not be found!')) {
|
} else if (message.includes('Rebuilding Library because the asset database could not be found!')) {
|
||||||
|
GitHub.updateGitHubCheck(`Library was not found, importing new Library`, ``);
|
||||||
core.warning('LIBRARY NOT FOUND!');
|
core.warning('LIBRARY NOT FOUND!');
|
||||||
core.setOutput('library-found', 'false');
|
core.setOutput('library-found', 'false');
|
||||||
} else if (message.includes('Build succeeded')) {
|
} else if (message.includes('Build succeeded')) {
|
||||||
|
GitHub.updateGitHubCheck(`Build succeeded`, `Build succeeded`);
|
||||||
core.setOutput('build-result', 'success');
|
core.setOutput('build-result', 'success');
|
||||||
} else if (message.includes('Build fail')) {
|
} else if (message.includes('Build fail')) {
|
||||||
|
GitHub.updateGitHubCheck(`Build failed`, `Build failed`);
|
||||||
core.setOutput('build-result', 'failed');
|
core.setOutput('build-result', 'failed');
|
||||||
core.setFailed('unity build failed');
|
core.setFailed('unity build failed');
|
||||||
core.error('BUILD FAILED!');
|
core.error('BUILD FAILED!');
|
||||||
} else if (CloudRunner.buildParameters.cloudRunnerIntegrationTests && message.includes(': Listening for Jobs')) {
|
} else if (CloudRunner.buildParameters.cloudRunnerDebug && message.includes(': Listening for Jobs')) {
|
||||||
core.setOutput('cloud runner stop watching', 'true');
|
core.setOutput('cloud runner stop watching', 'true');
|
||||||
shouldReadLogs = false;
|
shouldReadLogs = false;
|
||||||
shouldCleanup = false;
|
shouldCleanup = false;
|
||||||
core.warning('cloud runner stop watching');
|
core.warning('cloud runner stop watching');
|
||||||
}
|
}
|
||||||
message = `[${CloudRunnerStatics.logPrefix}] ${message}`;
|
if (CloudRunner.buildParameters.cloudRunnerDebug) {
|
||||||
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
output += `${message}\n`;
|
||||||
output += message;
|
|
||||||
}
|
}
|
||||||
CloudRunnerLogger.log(message);
|
CloudRunnerLogger.log(`[${CloudRunnerStatics.logPrefix}] ${message}`);
|
||||||
|
|
||||||
return { shouldReadLogs, shouldCleanup, output };
|
return { shouldReadLogs, shouldCleanup, output };
|
||||||
}
|
}
|
||||||
|
|||||||
287
src/model/cloud-runner/services/shared-workspace-locking.ts
Normal file
287
src/model/cloud-runner/services/shared-workspace-locking.ts
Normal file
@@ -0,0 +1,287 @@
|
|||||||
|
import { CloudRunnerSystem } from './cloud-runner-system';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import CloudRunnerLogger from './cloud-runner-logger';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import BuildParameters from '../../build-parameters';
|
||||||
|
import CloudRunner from '../cloud-runner';
|
||||||
|
export class SharedWorkspaceLocking {
|
||||||
|
private static get workspaceBucketRoot() {
|
||||||
|
return `s3://${CloudRunner.buildParameters.awsBaseStackName}/`;
|
||||||
|
}
|
||||||
|
private static get workspaceRoot() {
|
||||||
|
return `${SharedWorkspaceLocking.workspaceBucketRoot}locks/`;
|
||||||
|
}
|
||||||
|
public static async GetAllWorkspaces(buildParametersContext: BuildParameters): Promise<string[]> {
|
||||||
|
if (!(await SharedWorkspaceLocking.DoesWorkspaceTopLevelExist(buildParametersContext))) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
await SharedWorkspaceLocking.ReadLines(
|
||||||
|
`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/`,
|
||||||
|
)
|
||||||
|
).map((x) => x.replace(`/`, ``));
|
||||||
|
}
|
||||||
|
public static async DoesWorkspaceTopLevelExist(buildParametersContext: BuildParameters) {
|
||||||
|
await SharedWorkspaceLocking.ReadLines(`aws s3 ls ${SharedWorkspaceLocking.workspaceBucketRoot}`);
|
||||||
|
|
||||||
|
return (await SharedWorkspaceLocking.ReadLines(`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}`))
|
||||||
|
.map((x) => x.replace(`/`, ``))
|
||||||
|
.includes(buildParametersContext.cacheKey);
|
||||||
|
}
|
||||||
|
public static async GetAllLocks(workspace: string, buildParametersContext: BuildParameters): Promise<string[]> {
|
||||||
|
if (!(await SharedWorkspaceLocking.DoesWorkspaceExist(workspace, buildParametersContext))) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
await SharedWorkspaceLocking.ReadLines(
|
||||||
|
`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${workspace}/`,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.map((x) => x.replace(`/`, ``))
|
||||||
|
.filter((x) => x.includes(`_lock`));
|
||||||
|
}
|
||||||
|
public static async GetOrCreateLockedWorkspace(
|
||||||
|
workspace: string,
|
||||||
|
runId: string,
|
||||||
|
buildParametersContext: BuildParameters,
|
||||||
|
) {
|
||||||
|
if (!CloudRunnerOptions.retainWorkspaces) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (await SharedWorkspaceLocking.DoesWorkspaceTopLevelExist(buildParametersContext)) {
|
||||||
|
const workspaces = await SharedWorkspaceLocking.GetFreeWorkspaces(buildParametersContext);
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`run agent ${runId} is trying to access a workspace, free: ${JSON.stringify(workspaces)}`,
|
||||||
|
);
|
||||||
|
for (const element of workspaces) {
|
||||||
|
await new Promise((promise) => setTimeout(promise, 1000));
|
||||||
|
const lockResult = await SharedWorkspaceLocking.LockWorkspace(element, runId, buildParametersContext);
|
||||||
|
CloudRunnerLogger.log(`run agent: ${runId} try lock workspace: ${element} result: ${lockResult}`);
|
||||||
|
|
||||||
|
if (lockResult) {
|
||||||
|
CloudRunner.lockedWorkspace = element;
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const createResult = await SharedWorkspaceLocking.CreateWorkspace(workspace, buildParametersContext, runId);
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`run agent ${runId} didn't find a free workspace so created: ${workspace} createWorkspaceSuccess: ${createResult}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return createResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async DoesWorkspaceExist(workspace: string, buildParametersContext: BuildParameters) {
|
||||||
|
return (await SharedWorkspaceLocking.GetAllWorkspaces(buildParametersContext)).includes(workspace);
|
||||||
|
}
|
||||||
|
public static async HasWorkspaceLock(
|
||||||
|
workspace: string,
|
||||||
|
runId: string,
|
||||||
|
buildParametersContext: BuildParameters,
|
||||||
|
): Promise<boolean> {
|
||||||
|
if (!(await SharedWorkspaceLocking.DoesWorkspaceExist(workspace, buildParametersContext))) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const locks = (await SharedWorkspaceLocking.GetAllLocks(workspace, buildParametersContext))
|
||||||
|
.map((x) => {
|
||||||
|
return {
|
||||||
|
name: x,
|
||||||
|
timestamp: Number(x.split(`_`)[0]),
|
||||||
|
};
|
||||||
|
})
|
||||||
|
.sort((x) => x.timestamp);
|
||||||
|
const lockMatches = locks.filter((x) => x.name.includes(runId));
|
||||||
|
const includesRunLock = lockMatches.length > 0 && locks.indexOf(lockMatches[0]) === 0;
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`Checking has workspace lock, runId: ${runId}, workspace: ${workspace}, success: ${includesRunLock} \n- Num of locks created by Run Agent: ${
|
||||||
|
lockMatches.length
|
||||||
|
} Num of Locks: ${locks.length}, Time ordered index for Run Agent: ${locks.indexOf(lockMatches[0])} \n \n`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return includesRunLock;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async GetFreeWorkspaces(buildParametersContext: BuildParameters): Promise<string[]> {
|
||||||
|
const result: string[] = [];
|
||||||
|
const workspaces = await SharedWorkspaceLocking.GetAllWorkspaces(buildParametersContext);
|
||||||
|
for (const element of workspaces) {
|
||||||
|
await new Promise((promise) => setTimeout(promise, 1500));
|
||||||
|
const isLocked = await SharedWorkspaceLocking.IsWorkspaceLocked(element, buildParametersContext);
|
||||||
|
const isBelowMax = await SharedWorkspaceLocking.IsWorkspaceBelowMax(element, buildParametersContext);
|
||||||
|
if (!isLocked && isBelowMax) {
|
||||||
|
result.push(element);
|
||||||
|
CloudRunnerLogger.log(`workspace ${element} is free`);
|
||||||
|
} else {
|
||||||
|
CloudRunnerLogger.log(`workspace ${element} is NOT free ${!isLocked} ${isBelowMax}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async IsWorkspaceBelowMax(
|
||||||
|
workspace: string,
|
||||||
|
buildParametersContext: BuildParameters,
|
||||||
|
): Promise<boolean> {
|
||||||
|
const workspaces = await SharedWorkspaceLocking.GetAllWorkspaces(buildParametersContext);
|
||||||
|
if (workspace === ``) {
|
||||||
|
return (
|
||||||
|
workspaces.length < buildParametersContext.maxRetainedWorkspaces ||
|
||||||
|
buildParametersContext.maxRetainedWorkspaces === 0
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const ordered: any[] = [];
|
||||||
|
for (const ws of workspaces) {
|
||||||
|
ordered.push({
|
||||||
|
name: ws,
|
||||||
|
timestamp: await SharedWorkspaceLocking.GetWorkspaceTimestamp(ws, buildParametersContext),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
ordered.sort((x) => x.timestamp);
|
||||||
|
const matches = ordered.filter((x) => x.name.includes(workspace));
|
||||||
|
const isWorkspaceBelowMax =
|
||||||
|
matches.length > 0 &&
|
||||||
|
(ordered.indexOf(matches[0]) < buildParametersContext.maxRetainedWorkspaces ||
|
||||||
|
buildParametersContext.maxRetainedWorkspaces === 0);
|
||||||
|
|
||||||
|
return isWorkspaceBelowMax;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async GetWorkspaceTimestamp(
|
||||||
|
workspace: string,
|
||||||
|
buildParametersContext: BuildParameters,
|
||||||
|
): Promise<Number> {
|
||||||
|
if (workspace.split(`_`).length > 0) {
|
||||||
|
return Number(workspace.split(`_`)[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(await SharedWorkspaceLocking.DoesWorkspaceExist(workspace, buildParametersContext))) {
|
||||||
|
throw new Error("Workspace doesn't exist, can't call get all locks");
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
await SharedWorkspaceLocking.ReadLines(
|
||||||
|
`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${workspace}/`,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.map((x) => x.replace(`/`, ``))
|
||||||
|
.filter((x) => x.includes(`_workspace`))
|
||||||
|
.map((x) => Number(x))[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async IsWorkspaceLocked(workspace: string, buildParametersContext: BuildParameters): Promise<boolean> {
|
||||||
|
if (!(await SharedWorkspaceLocking.DoesWorkspaceExist(workspace, buildParametersContext))) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const files = await SharedWorkspaceLocking.ReadLines(
|
||||||
|
`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${workspace}/`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const workspaceFileDoesNotExists =
|
||||||
|
files.filter((x) => {
|
||||||
|
return x.includes(`_workspace`);
|
||||||
|
}).length === 0;
|
||||||
|
|
||||||
|
const lockFilesExist =
|
||||||
|
files.filter((x) => {
|
||||||
|
return x.includes(`_lock`);
|
||||||
|
}).length > 0;
|
||||||
|
|
||||||
|
return workspaceFileDoesNotExists || lockFilesExist;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async CreateWorkspace(
|
||||||
|
workspace: string,
|
||||||
|
buildParametersContext: BuildParameters,
|
||||||
|
lockId: string = ``,
|
||||||
|
): Promise<boolean> {
|
||||||
|
if (lockId !== ``) {
|
||||||
|
await SharedWorkspaceLocking.LockWorkspace(workspace, lockId, buildParametersContext);
|
||||||
|
}
|
||||||
|
const timestamp = Date.now();
|
||||||
|
const file = `${timestamp}_workspace`;
|
||||||
|
fs.writeFileSync(file, '');
|
||||||
|
await CloudRunnerSystem.Run(
|
||||||
|
`aws s3 cp ./${file} ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${workspace}/${file}`,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
fs.rmSync(file);
|
||||||
|
|
||||||
|
const workspaces = await SharedWorkspaceLocking.ReadLines(
|
||||||
|
`aws s3 ls ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/`,
|
||||||
|
);
|
||||||
|
|
||||||
|
CloudRunnerLogger.log(`All workspaces ${workspaces}`);
|
||||||
|
if (!(await SharedWorkspaceLocking.IsWorkspaceBelowMax(workspace, buildParametersContext))) {
|
||||||
|
CloudRunnerLogger.log(`Workspace is below max ${workspaces} ${buildParametersContext.maxRetainedWorkspaces}`);
|
||||||
|
await SharedWorkspaceLocking.CleanupWorkspace(workspace, buildParametersContext);
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async LockWorkspace(
|
||||||
|
workspace: string,
|
||||||
|
runId: string,
|
||||||
|
buildParametersContext: BuildParameters,
|
||||||
|
): Promise<boolean> {
|
||||||
|
const file = `${Date.now()}_${runId}_lock`;
|
||||||
|
fs.writeFileSync(file, '');
|
||||||
|
await CloudRunnerSystem.Run(
|
||||||
|
`aws s3 cp ./${file} ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${workspace}/${file}`,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
fs.rmSync(file);
|
||||||
|
|
||||||
|
return SharedWorkspaceLocking.HasWorkspaceLock(workspace, runId, buildParametersContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async ReleaseWorkspace(
|
||||||
|
workspace: string,
|
||||||
|
runId: string,
|
||||||
|
buildParametersContext: BuildParameters,
|
||||||
|
): Promise<boolean> {
|
||||||
|
const file = (await SharedWorkspaceLocking.GetAllLocks(workspace, buildParametersContext)).filter((x) =>
|
||||||
|
x.includes(`_${runId}_lock`),
|
||||||
|
);
|
||||||
|
CloudRunnerLogger.log(`Deleting lock ${workspace}/${file}`);
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`aws s3 rm ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${workspace}/${file}`,
|
||||||
|
);
|
||||||
|
await CloudRunnerSystem.Run(
|
||||||
|
`aws s3 rm ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${workspace}/${file}`,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
|
||||||
|
return !SharedWorkspaceLocking.HasWorkspaceLock(workspace, runId, buildParametersContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async CleanupWorkspace(workspace: string, buildParametersContext: BuildParameters) {
|
||||||
|
await CloudRunnerSystem.Run(
|
||||||
|
`aws s3 rm ${SharedWorkspaceLocking.workspaceRoot}${buildParametersContext.cacheKey}/${workspace} --recursive`,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async ReadLines(command: string): Promise<string[]> {
|
||||||
|
return CloudRunnerSystem.RunAndReadLines(command);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default SharedWorkspaceLocking;
|
||||||
@@ -1,72 +1,137 @@
|
|||||||
import { CloudRunner, Input } from '../..';
|
import { Input } from '../..';
|
||||||
import ImageEnvironmentFactory from '../../image-environment-factory';
|
|
||||||
import CloudRunnerEnvironmentVariable from './cloud-runner-environment-variable';
|
import CloudRunnerEnvironmentVariable from './cloud-runner-environment-variable';
|
||||||
import { CloudRunnerBuildCommandProcessor } from './cloud-runner-build-command-process';
|
import { CloudRunnerCustomHooks } from './cloud-runner-custom-hooks';
|
||||||
import CloudRunnerSecret from './cloud-runner-secret';
|
import CloudRunnerSecret from './cloud-runner-secret';
|
||||||
import CloudRunnerQueryOverride from './cloud-runner-query-override';
|
import CloudRunnerQueryOverride from './cloud-runner-query-override';
|
||||||
|
import CloudRunnerOptionsReader from './cloud-runner-options-reader';
|
||||||
|
import BuildParameters from '../../build-parameters';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import * as core from '@actions/core';
|
||||||
|
|
||||||
export class TaskParameterSerializer {
|
export class TaskParameterSerializer {
|
||||||
public static readBuildEnvironmentVariables(): CloudRunnerEnvironmentVariable[] {
|
static readonly blocked = new Set(['0', 'length', 'prototype', '', 'unityVersion']);
|
||||||
return [
|
public static createCloudRunnerEnvironmentVariables(
|
||||||
{
|
buildParameters: BuildParameters,
|
||||||
name: 'ContainerMemory',
|
): CloudRunnerEnvironmentVariable[] {
|
||||||
value: CloudRunner.buildParameters.cloudRunnerMemory,
|
const result = this.uniqBy(
|
||||||
},
|
[
|
||||||
{
|
{
|
||||||
name: 'ContainerCpu',
|
name: 'ContainerMemory',
|
||||||
value: CloudRunner.buildParameters.cloudRunnerCpu,
|
value: buildParameters.cloudRunnerMemory,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'BUILD_TARGET',
|
name: 'ContainerCpu',
|
||||||
value: CloudRunner.buildParameters.targetPlatform,
|
value: buildParameters.cloudRunnerCpu,
|
||||||
},
|
},
|
||||||
...TaskParameterSerializer.serializeBuildParamsAndInput,
|
{
|
||||||
];
|
name: 'BUILD_TARGET',
|
||||||
}
|
value: buildParameters.targetPlatform,
|
||||||
private static get serializeBuildParamsAndInput() {
|
},
|
||||||
let array = new Array();
|
...TaskParameterSerializer.serializeFromObject(buildParameters),
|
||||||
array = TaskParameterSerializer.readBuildParameters(array);
|
...TaskParameterSerializer.readInput(),
|
||||||
array = TaskParameterSerializer.readInput(array);
|
...CloudRunnerCustomHooks.getSecrets(CloudRunnerCustomHooks.getHooks(buildParameters.customJobHooks)),
|
||||||
const configurableHooks = CloudRunnerBuildCommandProcessor.getHooks(CloudRunner.buildParameters.customJobHooks);
|
]
|
||||||
const secrets = configurableHooks.map((x) => x.secrets).filter((x) => x !== undefined && x.length > 0);
|
.filter(
|
||||||
if (secrets.length > 0) {
|
(x) =>
|
||||||
// eslint-disable-next-line unicorn/no-array-reduce
|
!TaskParameterSerializer.blocked.has(x.name) &&
|
||||||
array.push(secrets.reduce((x, y) => [...x, ...y]));
|
x.value !== '' &&
|
||||||
}
|
x.value !== undefined &&
|
||||||
|
x.name !== `CUSTOM_JOB` &&
|
||||||
|
x.name !== `GAMECI_CUSTOM_JOB` &&
|
||||||
|
x.value !== `undefined`,
|
||||||
|
)
|
||||||
|
.map((x) => {
|
||||||
|
x.name = TaskParameterSerializer.ToEnvVarFormat(x.name);
|
||||||
|
x.value = `${x.value}`;
|
||||||
|
|
||||||
array = array.filter(
|
if (buildParameters.cloudRunnerDebug && Number(x.name) === Number.NaN) {
|
||||||
(x) => x.value !== undefined && x.name !== '0' && x.value !== '' && x.name !== 'prototype' && x.name !== 'length',
|
core.info(`[ERROR] found a number in task param serializer ${JSON.stringify(x)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return x;
|
||||||
|
}),
|
||||||
|
(item) => item.name,
|
||||||
);
|
);
|
||||||
array = array.map((x) => {
|
|
||||||
x.name = Input.ToEnvVarFormat(x.name);
|
|
||||||
x.value = `${x.value}`;
|
|
||||||
|
|
||||||
return x;
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
static uniqBy(a, key) {
|
||||||
|
const seen = {};
|
||||||
|
|
||||||
|
return a.filter(function (item) {
|
||||||
|
const k = key(item);
|
||||||
|
|
||||||
|
return seen.hasOwnProperty(k) ? false : (seen[k] = true);
|
||||||
});
|
});
|
||||||
|
|
||||||
return array;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static readBuildParameters(array: any[]) {
|
public static readBuildParameterFromEnvironment(): BuildParameters {
|
||||||
const keys = Object.keys(CloudRunner.buildParameters);
|
const buildParameters = new BuildParameters();
|
||||||
|
const keys = [
|
||||||
|
...new Set(
|
||||||
|
Object.getOwnPropertyNames(process.env)
|
||||||
|
.filter((x) => !this.blocked.has(x) && x.startsWith('GAMECI_'))
|
||||||
|
.map((x) => TaskParameterSerializer.UndoEnvVarFormat(x)),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
for (const element of keys) {
|
for (const element of keys) {
|
||||||
array.push({
|
if (element !== `customJob`) {
|
||||||
name: element,
|
buildParameters[element] = process.env[`GAMECI_${TaskParameterSerializer.ToEnvVarFormat(element)}`];
|
||||||
value: CloudRunner.buildParameters[element],
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
|
return buildParameters;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static readInput() {
|
||||||
|
return TaskParameterSerializer.serializeFromType(Input);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ToEnvVarFormat(input): string {
|
||||||
|
return CloudRunnerOptions.ToEnvVarFormat(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static UndoEnvVarFormat(element): string {
|
||||||
|
return this.camelize(element.replace('GAMECI_', '').toLowerCase().replace(/_+/g, ' '));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static camelize(string) {
|
||||||
|
return string
|
||||||
|
.replace(/^\w|[A-Z]|\b\w/g, function (word, index) {
|
||||||
|
return index === 0 ? word.toLowerCase() : word.toUpperCase();
|
||||||
|
})
|
||||||
|
.replace(/\s+/g, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
private static serializeFromObject(buildParameters) {
|
||||||
|
const array: any[] = [];
|
||||||
|
const keys = Object.getOwnPropertyNames(buildParameters).filter((x) => !this.blocked.has(x));
|
||||||
|
for (const element of keys) {
|
||||||
|
array.push(
|
||||||
|
{
|
||||||
|
name: `GAMECI_${TaskParameterSerializer.ToEnvVarFormat(element)}`,
|
||||||
|
value: buildParameters[element],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: element,
|
||||||
|
value: buildParameters[element],
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
array.push({ name: 'buildParameters', value: JSON.stringify(CloudRunner.buildParameters) });
|
|
||||||
|
|
||||||
return array;
|
return array;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static readInput(array: any[]) {
|
private static serializeFromType(type) {
|
||||||
const input = Object.getOwnPropertyNames(Input);
|
const array: any[] = [];
|
||||||
|
const input = CloudRunnerOptionsReader.GetProperties();
|
||||||
for (const element of input) {
|
for (const element of input) {
|
||||||
if (typeof Input[element] !== 'function' && array.filter((x) => x.name === element).length === 0) {
|
if (typeof type[element] !== 'function' && array.filter((x) => x.name === element).length === 0) {
|
||||||
array.push({
|
array.push({
|
||||||
name: element,
|
name: element,
|
||||||
value: `${Input[element]}`,
|
value: `${type[element]}`,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -79,17 +144,8 @@ export class TaskParameterSerializer {
|
|||||||
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_SERIAL');
|
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_SERIAL');
|
||||||
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_EMAIL');
|
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_EMAIL');
|
||||||
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_PASSWORD');
|
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_PASSWORD');
|
||||||
array.push(
|
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_LICENSE');
|
||||||
...ImageEnvironmentFactory.getEnvironmentVariables(CloudRunner.buildParameters)
|
array = TaskParameterSerializer.tryAddInput(array, 'GIT_PRIVATE_TOKEN');
|
||||||
.filter((x) => array.every((y) => y.ParameterKey !== x.name))
|
|
||||||
.map((x) => {
|
|
||||||
return {
|
|
||||||
ParameterKey: x.name,
|
|
||||||
EnvironmentVariable: x.name,
|
|
||||||
ParameterValue: x.value,
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
return array;
|
return array;
|
||||||
}
|
}
|
||||||
@@ -102,7 +158,7 @@ export class TaskParameterSerializer {
|
|||||||
s;
|
s;
|
||||||
private static tryAddInput(array, key): CloudRunnerSecret[] {
|
private static tryAddInput(array, key): CloudRunnerSecret[] {
|
||||||
const value = TaskParameterSerializer.getValue(key);
|
const value = TaskParameterSerializer.getValue(key);
|
||||||
if (value !== undefined && value !== '') {
|
if (value !== undefined && value !== '' && value !== 'null') {
|
||||||
array.push({
|
array.push({
|
||||||
ParameterKey: key,
|
ParameterKey: key,
|
||||||
EnvironmentVariable: key,
|
EnvironmentVariable: key,
|
||||||
|
|||||||
@@ -0,0 +1,33 @@
|
|||||||
|
import { BuildParameters, ImageTag } from '../..';
|
||||||
|
import CloudRunner from '../cloud-runner';
|
||||||
|
import UnityVersioning from '../../unity-versioning';
|
||||||
|
import { Cli } from '../../cli/cli';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import setups from './cloud-runner-suite.test';
|
||||||
|
|
||||||
|
async function CreateParameters(overrides) {
|
||||||
|
if (overrides) Cli.options = overrides;
|
||||||
|
|
||||||
|
return BuildParameters.create();
|
||||||
|
}
|
||||||
|
describe('Cloud Runner Async Workflows', () => {
|
||||||
|
setups();
|
||||||
|
it('Responds', () => {});
|
||||||
|
|
||||||
|
if (CloudRunnerOptions.cloudRunnerDebug && CloudRunnerOptions.cloudRunnerCluster !== `local-docker`) {
|
||||||
|
it('Async Workflows', async () => {
|
||||||
|
// Setup parameters
|
||||||
|
const buildParameter = await CreateParameters({
|
||||||
|
versioning: 'None',
|
||||||
|
projectPath: 'test-project',
|
||||||
|
unityVersion: UnityVersioning.read('test-project'),
|
||||||
|
asyncCloudRunner: `true`,
|
||||||
|
githubChecks: `true`,
|
||||||
|
});
|
||||||
|
const baseImage = new ImageTag(buildParameter);
|
||||||
|
|
||||||
|
// Run the job
|
||||||
|
await CloudRunner.run(buildParameter, baseImage.toString());
|
||||||
|
}, 1_000_000_000);
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
import { BuildParameters } from '../..';
|
||||||
|
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
|
||||||
|
import UnityVersioning from '../../unity-versioning';
|
||||||
|
import { Cli } from '../../cli/cli';
|
||||||
|
import GitHub from '../../github';
|
||||||
|
import setups from './cloud-runner-suite.test';
|
||||||
|
|
||||||
|
async function CreateParameters(overrides) {
|
||||||
|
if (overrides) {
|
||||||
|
Cli.options = overrides;
|
||||||
|
}
|
||||||
|
const originalValue = GitHub.githubInputEnabled;
|
||||||
|
GitHub.githubInputEnabled = false;
|
||||||
|
const results = await BuildParameters.create();
|
||||||
|
GitHub.githubInputEnabled = originalValue;
|
||||||
|
delete Cli.options;
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
describe('Cloud Runner Environment Serializer', () => {
|
||||||
|
setups();
|
||||||
|
const testSecretName = 'testSecretName';
|
||||||
|
const testSecretValue = 'testSecretValue';
|
||||||
|
it('Cloud Runner Parameter Serialization', async () => {
|
||||||
|
// Setup parameters
|
||||||
|
const buildParameter = await CreateParameters({
|
||||||
|
versioning: 'None',
|
||||||
|
projectPath: 'test-project',
|
||||||
|
unityVersion: UnityVersioning.read('test-project'),
|
||||||
|
customJob: `
|
||||||
|
- name: 'step 1'
|
||||||
|
image: 'alpine'
|
||||||
|
commands: 'printenv'
|
||||||
|
secrets:
|
||||||
|
- name: '${testSecretName}'
|
||||||
|
value: '${testSecretValue}'
|
||||||
|
`,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = TaskParameterSerializer.createCloudRunnerEnvironmentVariables(buildParameter);
|
||||||
|
expect(result.find((x) => Number.parseInt(x.name)) !== undefined).toBeFalsy();
|
||||||
|
const result2 = TaskParameterSerializer.createCloudRunnerEnvironmentVariables(buildParameter);
|
||||||
|
expect(result2.find((x) => Number.parseInt(x.name)) !== undefined).toBeFalsy();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -2,19 +2,16 @@ import fs from 'fs';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import BuildParameters from '../../build-parameters';
|
import BuildParameters from '../../build-parameters';
|
||||||
import { Cli } from '../../cli/cli';
|
import { Cli } from '../../cli/cli';
|
||||||
import Input from '../../input';
|
|
||||||
import UnityVersioning from '../../unity-versioning';
|
import UnityVersioning from '../../unity-versioning';
|
||||||
import CloudRunner from '../cloud-runner';
|
import CloudRunner from '../cloud-runner';
|
||||||
import { CloudRunnerSystem } from '../services/cloud-runner-system';
|
import { CloudRunnerSystem } from '../services/cloud-runner-system';
|
||||||
import { Caching } from './caching';
|
import { Caching } from '../remote-client/caching';
|
||||||
import { v4 as uuidv4 } from 'uuid';
|
import { v4 as uuidv4 } from 'uuid';
|
||||||
|
import GitHub from '../../github';
|
||||||
describe('Cloud Runner Caching', () => {
|
describe('Cloud Runner (Remote Client) Caching', () => {
|
||||||
it('responds', () => {});
|
it('responds', () => {});
|
||||||
});
|
|
||||||
describe('Cloud Runner Caching', () => {
|
|
||||||
if (process.platform === 'linux') {
|
if (process.platform === 'linux') {
|
||||||
it('Simple caching works', async () => {
|
it.skip('Simple caching works', async () => {
|
||||||
Cli.options = {
|
Cli.options = {
|
||||||
versioning: 'None',
|
versioning: 'None',
|
||||||
projectPath: 'test-project',
|
projectPath: 'test-project',
|
||||||
@@ -22,7 +19,7 @@ describe('Cloud Runner Caching', () => {
|
|||||||
targetPlatform: 'StandaloneLinux64',
|
targetPlatform: 'StandaloneLinux64',
|
||||||
cacheKey: `test-case-${uuidv4()}`,
|
cacheKey: `test-case-${uuidv4()}`,
|
||||||
};
|
};
|
||||||
Input.githubInputEnabled = false;
|
GitHub.githubInputEnabled = false;
|
||||||
const buildParameter = await BuildParameters.create();
|
const buildParameter = await BuildParameters.create();
|
||||||
CloudRunner.buildParameters = buildParameter;
|
CloudRunner.buildParameters = buildParameter;
|
||||||
|
|
||||||
@@ -46,8 +43,6 @@ describe('Cloud Runner Caching', () => {
|
|||||||
`${Cli.options.cacheKey}`,
|
`${Cli.options.cacheKey}`,
|
||||||
);
|
);
|
||||||
await CloudRunnerSystem.Run(`du -h ${__dirname}`);
|
await CloudRunnerSystem.Run(`du -h ${__dirname}`);
|
||||||
await CloudRunnerSystem.Run(`tree ${testFolder}`);
|
|
||||||
await CloudRunnerSystem.Run(`tree ${cacheFolder}`);
|
|
||||||
|
|
||||||
// Compare validity to original hash
|
// Compare validity to original hash
|
||||||
expect(fs.readFileSync(path.resolve(testFolder, 'test.txt'), { encoding: 'utf8' }).toString()).toContain(
|
expect(fs.readFileSync(path.resolve(testFolder, 'test.txt'), { encoding: 'utf8' }).toString()).toContain(
|
||||||
@@ -56,7 +51,7 @@ describe('Cloud Runner Caching', () => {
|
|||||||
fs.rmdirSync(testFolder, { recursive: true });
|
fs.rmdirSync(testFolder, { recursive: true });
|
||||||
fs.rmdirSync(cacheFolder, { recursive: true });
|
fs.rmdirSync(cacheFolder, { recursive: true });
|
||||||
|
|
||||||
Input.githubInputEnabled = true;
|
GitHub.githubInputEnabled = true;
|
||||||
delete Cli.options;
|
delete Cli.options;
|
||||||
}, 1000000);
|
}, 1000000);
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,78 @@
|
|||||||
|
import CloudRunner from '../cloud-runner';
|
||||||
|
import { BuildParameters, ImageTag } from '../..';
|
||||||
|
import UnityVersioning from '../../unity-versioning';
|
||||||
|
import { Cli } from '../../cli/cli';
|
||||||
|
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||||
|
import { v4 as uuidv4 } from 'uuid';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import setups from './cloud-runner-suite.test';
|
||||||
|
import { CloudRunnerCustomSteps } from '../services/cloud-runner-custom-steps';
|
||||||
|
|
||||||
|
async function CreateParameters(overrides) {
|
||||||
|
if (overrides) {
|
||||||
|
Cli.options = overrides;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await BuildParameters.create();
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Cloud Runner Custom Hooks And Steps', () => {
|
||||||
|
it('Responds', () => {});
|
||||||
|
setups();
|
||||||
|
it('Check parsing and reading of steps', async () => {
|
||||||
|
const yamlString = `hook: before
|
||||||
|
commands: echo "test"`;
|
||||||
|
const yamlString2 = `- hook: before
|
||||||
|
commands: echo "test"`;
|
||||||
|
const overrides = {
|
||||||
|
versioning: 'None',
|
||||||
|
projectPath: 'test-project',
|
||||||
|
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||||
|
targetPlatform: 'StandaloneLinux64',
|
||||||
|
cacheKey: `test-case-${uuidv4()}`,
|
||||||
|
};
|
||||||
|
CloudRunner.setup(await CreateParameters(overrides));
|
||||||
|
const stringObject = CloudRunnerCustomSteps.ParseSteps(yamlString);
|
||||||
|
const stringObject2 = CloudRunnerCustomSteps.ParseSteps(yamlString2);
|
||||||
|
|
||||||
|
CloudRunnerLogger.log(yamlString);
|
||||||
|
CloudRunnerLogger.log(JSON.stringify(stringObject, undefined, 4));
|
||||||
|
|
||||||
|
expect(stringObject.length).toBe(1);
|
||||||
|
expect(stringObject[0].hook).toBe(`before`);
|
||||||
|
expect(stringObject2.length).toBe(1);
|
||||||
|
expect(stringObject2[0].hook).toBe(`before`);
|
||||||
|
|
||||||
|
const getCustomStepsFromFiles = CloudRunnerCustomSteps.GetCustomStepsFromFiles(`before`);
|
||||||
|
CloudRunnerLogger.log(JSON.stringify(getCustomStepsFromFiles, undefined, 4));
|
||||||
|
});
|
||||||
|
if (CloudRunnerOptions.cloudRunnerDebug && CloudRunnerOptions.cloudRunnerCluster !== `k8s`) {
|
||||||
|
it('Run build once - check for pre and post custom hooks run contents', async () => {
|
||||||
|
const overrides = {
|
||||||
|
versioning: 'None',
|
||||||
|
projectPath: 'test-project',
|
||||||
|
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||||
|
targetPlatform: 'StandaloneLinux64',
|
||||||
|
cacheKey: `test-case-${uuidv4()}`,
|
||||||
|
customStepFiles: `my-test-step-pre-build,my-test-step-post-build`,
|
||||||
|
};
|
||||||
|
const buildParameter2 = await CreateParameters(overrides);
|
||||||
|
const baseImage2 = new ImageTag(buildParameter2);
|
||||||
|
const results2 = await CloudRunner.run(buildParameter2, baseImage2.toString());
|
||||||
|
CloudRunnerLogger.log(`run 2 succeeded`);
|
||||||
|
|
||||||
|
const build2ContainsBuildSucceeded = results2.includes('Build succeeded');
|
||||||
|
const build2ContainsPreBuildHookRunMessage = results2.includes('before-build hook test!');
|
||||||
|
const build2ContainsPostBuildHookRunMessage = results2.includes('after-build hook test!');
|
||||||
|
|
||||||
|
const build2ContainsPreBuildStepMessage = results2.includes('before-build step test!');
|
||||||
|
const build2ContainsPostBuildStepMessage = results2.includes('after-build step test!');
|
||||||
|
|
||||||
|
expect(build2ContainsBuildSucceeded).toBeTruthy();
|
||||||
|
expect(build2ContainsPreBuildHookRunMessage).toBeTruthy();
|
||||||
|
expect(build2ContainsPostBuildHookRunMessage).toBeTruthy();
|
||||||
|
expect(build2ContainsPreBuildStepMessage).toBeTruthy();
|
||||||
|
expect(build2ContainsPostBuildStepMessage).toBeTruthy();
|
||||||
|
}, 1_000_000_000);
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -0,0 +1,78 @@
|
|||||||
|
import CloudRunner from '../cloud-runner';
|
||||||
|
import { BuildParameters, ImageTag } from '../..';
|
||||||
|
import UnityVersioning from '../../unity-versioning';
|
||||||
|
import { Cli } from '../../cli/cli';
|
||||||
|
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||||
|
import { v4 as uuidv4 } from 'uuid';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import setups from './cloud-runner-suite.test';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
|
||||||
|
async function CreateParameters(overrides) {
|
||||||
|
if (overrides) {
|
||||||
|
Cli.options = overrides;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await BuildParameters.create();
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Cloud Runner Caching', () => {
|
||||||
|
it('Responds', () => {});
|
||||||
|
setups();
|
||||||
|
if (CloudRunnerOptions.cloudRunnerDebug) {
|
||||||
|
it('Run one build it should not use cache, run subsequent build which should use cache', async () => {
|
||||||
|
const overrides = {
|
||||||
|
versioning: 'None',
|
||||||
|
projectPath: 'test-project',
|
||||||
|
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||||
|
targetPlatform: 'StandaloneLinux64',
|
||||||
|
cacheKey: `test-case-${uuidv4()}`,
|
||||||
|
customStepFiles: `debug-cache`,
|
||||||
|
};
|
||||||
|
if (CloudRunnerOptions.cloudRunnerCluster === `k8s`) {
|
||||||
|
overrides.customStepFiles += `,aws-s3-pull-cache,aws-s3-upload-cache`;
|
||||||
|
}
|
||||||
|
const buildParameter = await CreateParameters(overrides);
|
||||||
|
expect(buildParameter.projectPath).toEqual(overrides.projectPath);
|
||||||
|
|
||||||
|
const baseImage = new ImageTag(buildParameter);
|
||||||
|
const results = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||||
|
const libraryString = 'Rebuilding Library because the asset database could not be found!';
|
||||||
|
const cachePushFail = 'Did not push source folder to cache because it was empty Library';
|
||||||
|
const buildSucceededString = 'Build succeeded';
|
||||||
|
|
||||||
|
expect(results).toContain(libraryString);
|
||||||
|
expect(results).toContain(buildSucceededString);
|
||||||
|
expect(results).not.toContain(cachePushFail);
|
||||||
|
|
||||||
|
CloudRunnerLogger.log(`run 1 succeeded`);
|
||||||
|
|
||||||
|
if (CloudRunnerOptions.cloudRunnerCluster === `local-docker`) {
|
||||||
|
const cacheFolderExists = fs.existsSync(`cloud-runner-cache/cache/${overrides.cacheKey}`);
|
||||||
|
expect(cacheFolderExists).toBeTruthy();
|
||||||
|
}
|
||||||
|
const buildParameter2 = await CreateParameters(overrides);
|
||||||
|
|
||||||
|
buildParameter2.cacheKey = buildParameter.cacheKey;
|
||||||
|
const baseImage2 = new ImageTag(buildParameter2);
|
||||||
|
const results2 = await CloudRunner.run(buildParameter2, baseImage2.toString());
|
||||||
|
CloudRunnerLogger.log(`run 2 succeeded`);
|
||||||
|
|
||||||
|
const build2ContainsCacheKey = results2.includes(buildParameter.cacheKey);
|
||||||
|
const build2ContainsBuildSucceeded = results2.includes(buildSucceededString);
|
||||||
|
const build2NotContainsNoLibraryMessage = !results2.includes(libraryString);
|
||||||
|
const build2NotContainsZeroLibraryCacheFilesMessage = !results2.includes(
|
||||||
|
'There is 0 files/dir in the cache pulled contents for Library',
|
||||||
|
);
|
||||||
|
const build2NotContainsZeroLFSCacheFilesMessage = !results2.includes(
|
||||||
|
'There is 0 files/dir in the cache pulled contents for LFS',
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(build2ContainsCacheKey).toBeTruthy();
|
||||||
|
expect(build2ContainsBuildSucceeded).toBeTruthy();
|
||||||
|
expect(build2NotContainsZeroLibraryCacheFilesMessage).toBeTruthy();
|
||||||
|
expect(build2NotContainsZeroLFSCacheFilesMessage).toBeTruthy();
|
||||||
|
expect(build2NotContainsNoLibraryMessage).toBeTruthy();
|
||||||
|
}, 1_000_000_000);
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -0,0 +1,94 @@
|
|||||||
|
import CloudRunner from '../cloud-runner';
|
||||||
|
import { BuildParameters, ImageTag } from '../..';
|
||||||
|
import UnityVersioning from '../../unity-versioning';
|
||||||
|
import { Cli } from '../../cli/cli';
|
||||||
|
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||||
|
import { v4 as uuidv4 } from 'uuid';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import setups from './cloud-runner-suite.test';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
|
||||||
|
import SharedWorkspaceLocking from '../services/shared-workspace-locking';
|
||||||
|
|
||||||
|
async function CreateParameters(overrides) {
|
||||||
|
if (overrides) {
|
||||||
|
Cli.options = overrides;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await BuildParameters.create();
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Cloud Runner Retain Workspace', () => {
|
||||||
|
it('Responds', () => {});
|
||||||
|
setups();
|
||||||
|
if (CloudRunnerOptions.cloudRunnerDebug) {
|
||||||
|
it('Run one build it should not already be retained, run subsequent build which should use retained workspace', async () => {
|
||||||
|
const overrides = {
|
||||||
|
versioning: 'None',
|
||||||
|
projectPath: 'test-project',
|
||||||
|
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||||
|
targetPlatform: 'StandaloneLinux64',
|
||||||
|
cacheKey: `test-case-${uuidv4()}`,
|
||||||
|
retainWorkspaces: true,
|
||||||
|
};
|
||||||
|
const buildParameter = await CreateParameters(overrides);
|
||||||
|
expect(buildParameter.projectPath).toEqual(overrides.projectPath);
|
||||||
|
|
||||||
|
const baseImage = new ImageTag(buildParameter);
|
||||||
|
const results = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||||
|
const libraryString = 'Rebuilding Library because the asset database could not be found!';
|
||||||
|
const cachePushFail = 'Did not push source folder to cache because it was empty Library';
|
||||||
|
const buildSucceededString = 'Build succeeded';
|
||||||
|
|
||||||
|
expect(results).toContain(libraryString);
|
||||||
|
expect(results).toContain(buildSucceededString);
|
||||||
|
expect(results).not.toContain(cachePushFail);
|
||||||
|
|
||||||
|
if (CloudRunnerOptions.cloudRunnerCluster === `local-docker`) {
|
||||||
|
const cacheFolderExists = fs.existsSync(`cloud-runner-cache/cache/${overrides.cacheKey}`);
|
||||||
|
expect(cacheFolderExists).toBeTruthy();
|
||||||
|
}
|
||||||
|
|
||||||
|
CloudRunnerLogger.log(`run 1 succeeded`);
|
||||||
|
const buildParameter2 = await CreateParameters(overrides);
|
||||||
|
|
||||||
|
buildParameter2.cacheKey = buildParameter.cacheKey;
|
||||||
|
const baseImage2 = new ImageTag(buildParameter2);
|
||||||
|
const results2 = await CloudRunner.run(buildParameter2, baseImage2.toString());
|
||||||
|
CloudRunnerLogger.log(`run 2 succeeded`);
|
||||||
|
|
||||||
|
const build2ContainsCacheKey = results2.includes(buildParameter.cacheKey);
|
||||||
|
const build2ContainsBuildGuid1FromRetainedWorkspace = results2.includes(buildParameter.buildGuid);
|
||||||
|
const build2ContainsRetainedWorkspacePhrase = results2.includes(`Retained Workspace:`);
|
||||||
|
const build2ContainsWorkspaceExistsAlreadyPhrase = results2.includes(`Retained Workspace Already Exists!`);
|
||||||
|
const build2ContainsBuildSucceeded = results2.includes(buildSucceededString);
|
||||||
|
const build2NotContainsNoLibraryMessage = !results2.includes(libraryString);
|
||||||
|
const build2NotContainsZeroLibraryCacheFilesMessage = !results2.includes(
|
||||||
|
'There is 0 files/dir in the cache pulled contents for Library',
|
||||||
|
);
|
||||||
|
const build2NotContainsZeroLFSCacheFilesMessage = !results2.includes(
|
||||||
|
'There is 0 files/dir in the cache pulled contents for LFS',
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(build2ContainsCacheKey).toBeTruthy();
|
||||||
|
expect(build2ContainsRetainedWorkspacePhrase).toBeTruthy();
|
||||||
|
expect(build2ContainsWorkspaceExistsAlreadyPhrase).toBeTruthy();
|
||||||
|
expect(build2ContainsBuildGuid1FromRetainedWorkspace).toBeTruthy();
|
||||||
|
expect(build2ContainsBuildSucceeded).toBeTruthy();
|
||||||
|
expect(build2NotContainsZeroLibraryCacheFilesMessage).toBeTruthy();
|
||||||
|
expect(build2NotContainsZeroLFSCacheFilesMessage).toBeTruthy();
|
||||||
|
expect(build2NotContainsNoLibraryMessage).toBeTruthy();
|
||||||
|
}, 1_000_000_000);
|
||||||
|
afterAll(async () => {
|
||||||
|
await SharedWorkspaceLocking.CleanupWorkspace(CloudRunner.lockedWorkspace || ``, CloudRunner.buildParameters);
|
||||||
|
if (
|
||||||
|
fs.existsSync(`./cloud-runner-cache/${path.basename(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute)}`)
|
||||||
|
) {
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`Cleaning up ./cloud-runner-cache/${path.basename(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
import CloudRunner from '../cloud-runner';
|
||||||
|
import { BuildParameters, ImageTag } from '../..';
|
||||||
|
import UnityVersioning from '../../unity-versioning';
|
||||||
|
import { Cli } from '../../cli/cli';
|
||||||
|
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||||
|
import { v4 as uuidv4 } from 'uuid';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import setups from './cloud-runner-suite.test';
|
||||||
|
import { CloudRunnerSystem } from '../services/cloud-runner-system';
|
||||||
|
|
||||||
|
async function CreateParameters(overrides) {
|
||||||
|
if (overrides) {
|
||||||
|
Cli.options = overrides;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await BuildParameters.create();
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Cloud Runner pre-built S3 steps', () => {
|
||||||
|
it('Responds', () => {});
|
||||||
|
setups();
|
||||||
|
if (CloudRunnerOptions.cloudRunnerDebug && CloudRunnerOptions.cloudRunnerCluster !== `local-docker`) {
|
||||||
|
it('Run build and prebuilt s3 cache pull, cache push and upload build', async () => {
|
||||||
|
const overrides = {
|
||||||
|
versioning: 'None',
|
||||||
|
projectPath: 'test-project',
|
||||||
|
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||||
|
targetPlatform: 'StandaloneLinux64',
|
||||||
|
cacheKey: `test-case-${uuidv4()}`,
|
||||||
|
customStepFiles: `aws-s3-pull-cache,aws-s3-upload-cache,aws-s3-upload-build`,
|
||||||
|
};
|
||||||
|
const buildParameter2 = await CreateParameters(overrides);
|
||||||
|
const baseImage2 = new ImageTag(buildParameter2);
|
||||||
|
const results2 = await CloudRunner.run(buildParameter2, baseImage2.toString());
|
||||||
|
CloudRunnerLogger.log(`run 2 succeeded`);
|
||||||
|
|
||||||
|
const build2ContainsBuildSucceeded = results2.includes('Build succeeded');
|
||||||
|
expect(build2ContainsBuildSucceeded).toBeTruthy();
|
||||||
|
|
||||||
|
const results = await CloudRunnerSystem.RunAndReadLines(
|
||||||
|
`aws s3 ls s3://${CloudRunner.buildParameters.awsBaseStackName}/cloud-runner-cache/${buildParameter2.cacheKey}/`,
|
||||||
|
);
|
||||||
|
CloudRunnerLogger.log(results.join(`,`));
|
||||||
|
}, 1_000_000_000);
|
||||||
|
}
|
||||||
|
});
|
||||||
25
src/model/cloud-runner/tests/cloud-runner-suite.test.ts
Normal file
25
src/model/cloud-runner/tests/cloud-runner-suite.test.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import { Cli } from '../../cli/cli';
|
||||||
|
import GitHub from '../../github';
|
||||||
|
|
||||||
|
describe('Cloud Runner', () => {
|
||||||
|
it('Responds', () => {});
|
||||||
|
});
|
||||||
|
|
||||||
|
const setups = () => {
|
||||||
|
beforeAll(() => {
|
||||||
|
GitHub.githubInputEnabled = false;
|
||||||
|
});
|
||||||
|
beforeEach(() => {
|
||||||
|
Cli.options = {};
|
||||||
|
});
|
||||||
|
afterEach(() => {
|
||||||
|
if (Cli.options !== undefined) {
|
||||||
|
delete Cli.options;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
afterAll(() => {
|
||||||
|
GitHub.githubInputEnabled = true;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
export default setups;
|
||||||
@@ -0,0 +1,77 @@
|
|||||||
|
import { BuildParameters, ImageTag } from '../..';
|
||||||
|
import CloudRunner from '../cloud-runner';
|
||||||
|
import Input from '../../input';
|
||||||
|
import { CloudRunnerStatics } from '../cloud-runner-statics';
|
||||||
|
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
|
||||||
|
import UnityVersioning from '../../unity-versioning';
|
||||||
|
import { Cli } from '../../cli/cli';
|
||||||
|
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import setups from './cloud-runner-suite.test';
|
||||||
|
|
||||||
|
async function CreateParameters(overrides) {
|
||||||
|
if (overrides) Cli.options = overrides;
|
||||||
|
|
||||||
|
return BuildParameters.create();
|
||||||
|
}
|
||||||
|
describe('Cloud Runner Sync Environments', () => {
|
||||||
|
setups();
|
||||||
|
const testSecretName = 'testSecretName';
|
||||||
|
const testSecretValue = 'testSecretValue';
|
||||||
|
it('Responds', () => {});
|
||||||
|
|
||||||
|
if (CloudRunnerOptions.cloudRunnerDebug) {
|
||||||
|
it('All build parameters sent to cloud runner as env vars', async () => {
|
||||||
|
// Setup parameters
|
||||||
|
const buildParameter = await CreateParameters({
|
||||||
|
versioning: 'None',
|
||||||
|
projectPath: 'test-project',
|
||||||
|
unityVersion: UnityVersioning.read('test-project'),
|
||||||
|
customJob: `
|
||||||
|
- name: 'step 1'
|
||||||
|
image: 'ubuntu'
|
||||||
|
commands: 'printenv'
|
||||||
|
secrets:
|
||||||
|
- name: '${testSecretName}'
|
||||||
|
value: '${testSecretValue}'
|
||||||
|
`,
|
||||||
|
});
|
||||||
|
const baseImage = new ImageTag(buildParameter);
|
||||||
|
|
||||||
|
// Run the job
|
||||||
|
const file = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||||
|
|
||||||
|
// Assert results
|
||||||
|
// expect(file).toContain(JSON.stringify(buildParameter));
|
||||||
|
expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`);
|
||||||
|
const environmentVariables = TaskParameterSerializer.createCloudRunnerEnvironmentVariables(buildParameter);
|
||||||
|
const secrets = TaskParameterSerializer.readDefaultSecrets().map((x) => {
|
||||||
|
return {
|
||||||
|
name: x.EnvironmentVariable,
|
||||||
|
value: x.ParameterValue,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
const combined = [...environmentVariables, ...secrets]
|
||||||
|
.filter((element) => element.value !== undefined && element.value !== '' && typeof element.value !== 'function')
|
||||||
|
.map((x) => {
|
||||||
|
if (typeof x.value === `string`) {
|
||||||
|
x.value = x.value.replace(/\s+/g, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
return x;
|
||||||
|
})
|
||||||
|
.filter((element) => {
|
||||||
|
return !['UNITY_LICENSE', 'CUSTOM_JOB'].includes(element.name);
|
||||||
|
});
|
||||||
|
const newLinePurgedFile = file
|
||||||
|
.replace(/\s+/g, '')
|
||||||
|
.replace(new RegExp(`\\[${CloudRunnerStatics.logPrefix}\\]`, 'g'), '');
|
||||||
|
for (const element of combined) {
|
||||||
|
expect(newLinePurgedFile).toContain(`${element.name}`);
|
||||||
|
CloudRunnerLogger.log(`Contains ${element.name}`);
|
||||||
|
const fullNameEqualValue = `${element.name}=${element.value}`;
|
||||||
|
expect(newLinePurgedFile).toContain(fullNameEqualValue);
|
||||||
|
}
|
||||||
|
}, 1_000_000_000);
|
||||||
|
}
|
||||||
|
});
|
||||||
101
src/model/cloud-runner/tests/shared-workspace-locking.test.ts
Normal file
101
src/model/cloud-runner/tests/shared-workspace-locking.test.ts
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
import SharedWorkspaceLocking from '../services/shared-workspace-locking';
|
||||||
|
import { Cli } from '../../cli/cli';
|
||||||
|
import setups from './cloud-runner-suite.test';
|
||||||
|
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||||
|
import { v4 as uuidv4 } from 'uuid';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import UnityVersioning from '../../unity-versioning';
|
||||||
|
import BuildParameters from '../../build-parameters';
|
||||||
|
import CloudRunner from '../cloud-runner';
|
||||||
|
|
||||||
|
async function CreateParameters(overrides) {
|
||||||
|
if (overrides) {
|
||||||
|
Cli.options = overrides;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await BuildParameters.create();
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Cloud Runner Locking', () => {
|
||||||
|
setups();
|
||||||
|
it('Responds', () => {});
|
||||||
|
if (CloudRunnerOptions.cloudRunnerDebug) {
|
||||||
|
it(`Simple Locking Flow`, async () => {
|
||||||
|
Cli.options.retainWorkspaces = true;
|
||||||
|
const overrides: any = {
|
||||||
|
versioning: 'None',
|
||||||
|
projectPath: 'test-project',
|
||||||
|
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||||
|
targetPlatform: 'StandaloneLinux64',
|
||||||
|
cacheKey: `test-case-${uuidv4()}`,
|
||||||
|
};
|
||||||
|
const buildParameters = await CreateParameters(overrides);
|
||||||
|
|
||||||
|
const newWorkspaceName = `test-workspace-${uuidv4()}`;
|
||||||
|
const runId = uuidv4();
|
||||||
|
CloudRunner.buildParameters = buildParameters;
|
||||||
|
await SharedWorkspaceLocking.CreateWorkspace(newWorkspaceName, buildParameters);
|
||||||
|
const isExpectedUnlockedBeforeLocking =
|
||||||
|
(await SharedWorkspaceLocking.IsWorkspaceLocked(newWorkspaceName, buildParameters)) === false;
|
||||||
|
expect(isExpectedUnlockedBeforeLocking).toBeTruthy();
|
||||||
|
await SharedWorkspaceLocking.LockWorkspace(newWorkspaceName, runId, buildParameters);
|
||||||
|
const isExpectedLockedAfterLocking =
|
||||||
|
(await SharedWorkspaceLocking.IsWorkspaceLocked(newWorkspaceName, buildParameters)) === true;
|
||||||
|
expect(isExpectedLockedAfterLocking).toBeTruthy();
|
||||||
|
const locksBeforeRelease = await SharedWorkspaceLocking.GetAllLocks(newWorkspaceName, buildParameters);
|
||||||
|
CloudRunnerLogger.log(JSON.stringify(locksBeforeRelease, undefined, 4));
|
||||||
|
expect(locksBeforeRelease.length).toBe(1);
|
||||||
|
await SharedWorkspaceLocking.ReleaseWorkspace(newWorkspaceName, runId, buildParameters);
|
||||||
|
const locks = await SharedWorkspaceLocking.GetAllLocks(newWorkspaceName, buildParameters);
|
||||||
|
expect(locks.length).toBe(0);
|
||||||
|
const isExpectedLockedAfterReleasing =
|
||||||
|
(await SharedWorkspaceLocking.IsWorkspaceLocked(newWorkspaceName, buildParameters)) === false;
|
||||||
|
expect(isExpectedLockedAfterReleasing).toBeTruthy();
|
||||||
|
}, 150000);
|
||||||
|
it.skip('All Locking Actions', async () => {
|
||||||
|
Cli.options.retainWorkspaces = true;
|
||||||
|
const overrides: any = {
|
||||||
|
versioning: 'None',
|
||||||
|
projectPath: 'test-project',
|
||||||
|
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||||
|
targetPlatform: 'StandaloneLinux64',
|
||||||
|
cacheKey: `test-case-${uuidv4()}`,
|
||||||
|
};
|
||||||
|
const buildParameters = await CreateParameters(overrides);
|
||||||
|
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`GetAllWorkspaces ${JSON.stringify(await SharedWorkspaceLocking.GetAllWorkspaces(buildParameters))}`,
|
||||||
|
);
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`GetFreeWorkspaces ${JSON.stringify(await SharedWorkspaceLocking.GetFreeWorkspaces(buildParameters))}`,
|
||||||
|
);
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`IsWorkspaceLocked ${JSON.stringify(
|
||||||
|
await SharedWorkspaceLocking.IsWorkspaceLocked(`test-workspace-${uuidv4()}`, buildParameters),
|
||||||
|
)}`,
|
||||||
|
);
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`GetFreeWorkspaces ${JSON.stringify(await SharedWorkspaceLocking.GetFreeWorkspaces(buildParameters))}`,
|
||||||
|
);
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`LockWorkspace ${JSON.stringify(
|
||||||
|
await SharedWorkspaceLocking.LockWorkspace(`test-workspace-${uuidv4()}`, uuidv4(), buildParameters),
|
||||||
|
)}`,
|
||||||
|
);
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`CreateLockableWorkspace ${JSON.stringify(
|
||||||
|
await SharedWorkspaceLocking.CreateWorkspace(`test-workspace-${uuidv4()}`, buildParameters),
|
||||||
|
)}`,
|
||||||
|
);
|
||||||
|
CloudRunnerLogger.log(
|
||||||
|
`GetLockedWorkspace ${JSON.stringify(
|
||||||
|
await SharedWorkspaceLocking.GetOrCreateLockedWorkspace(
|
||||||
|
`test-workspace-${uuidv4()}`,
|
||||||
|
uuidv4(),
|
||||||
|
buildParameters,
|
||||||
|
),
|
||||||
|
)}`,
|
||||||
|
);
|
||||||
|
}, 3000000);
|
||||||
|
}
|
||||||
|
});
|
||||||
60
src/model/cloud-runner/workflows/async-workflow.ts
Normal file
60
src/model/cloud-runner/workflows/async-workflow.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||||
|
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||||
|
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||||
|
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
|
||||||
|
import CloudRunner from '../cloud-runner';
|
||||||
|
|
||||||
|
export class AsyncWorkflow {
|
||||||
|
public static async runAsyncWorkflow(
|
||||||
|
environmentVariables: CloudRunnerEnvironmentVariable[],
|
||||||
|
secrets: CloudRunnerSecret[],
|
||||||
|
): Promise<string> {
|
||||||
|
try {
|
||||||
|
CloudRunnerLogger.log(`Cloud Runner is running async mode`);
|
||||||
|
|
||||||
|
let output = '';
|
||||||
|
|
||||||
|
output += await CloudRunner.Provider.runTaskInWorkflow(
|
||||||
|
CloudRunner.buildParameters.buildGuid,
|
||||||
|
`ubuntu`,
|
||||||
|
`apt-get update > /dev/null
|
||||||
|
apt-get install -y curl tar tree npm git git-lfs jq git > /dev/null
|
||||||
|
mkdir /builder
|
||||||
|
printenv
|
||||||
|
git config --global advice.detachedHead false
|
||||||
|
git config --global filter.lfs.smudge "git-lfs smudge --skip -- %f"
|
||||||
|
git config --global filter.lfs.process "git-lfs filter-process --skip"
|
||||||
|
git clone -q -b ${CloudRunner.buildParameters.cloudRunnerBranch} ${CloudRunnerFolders.unityBuilderRepoUrl} /builder
|
||||||
|
git clone -q -b ${CloudRunner.buildParameters.branch} ${CloudRunnerFolders.targetBuildRepoUrl} /repo
|
||||||
|
cd /repo
|
||||||
|
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
|
||||||
|
unzip awscliv2.zip
|
||||||
|
./aws/install
|
||||||
|
aws --version
|
||||||
|
node /builder/dist/index.js -m async-workflow`,
|
||||||
|
`/${CloudRunnerFolders.buildVolumeFolder}`,
|
||||||
|
`/${CloudRunnerFolders.buildVolumeFolder}/`,
|
||||||
|
environmentVariables,
|
||||||
|
[
|
||||||
|
...secrets,
|
||||||
|
...[
|
||||||
|
{
|
||||||
|
ParameterKey: `AWS_ACCESS_KEY_ID`,
|
||||||
|
EnvironmentVariable: `AWS_ACCESS_KEY_ID`,
|
||||||
|
ParameterValue: process.env.AWS_ACCESS_KEY_ID || ``,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ParameterKey: `AWS_SECRET_ACCESS_KEY`,
|
||||||
|
EnvironmentVariable: `AWS_SECRET_ACCESS_KEY`,
|
||||||
|
ParameterValue: process.env.AWS_SECRET_ACCESS_KEY || ``,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
return output;
|
||||||
|
} catch (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,32 +1,31 @@
|
|||||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||||
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
|
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
|
||||||
import { CloudRunnerStepState } from '../cloud-runner-step-state';
|
import { CloudRunnerStepState } from '../cloud-runner-step-state';
|
||||||
import { CustomWorkflow } from './custom-workflow';
|
|
||||||
import { WorkflowInterface } from './workflow-interface';
|
import { WorkflowInterface } from './workflow-interface';
|
||||||
import * as core from '@actions/core';
|
import * as core from '@actions/core';
|
||||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
import { CloudRunnerCustomHooks } from '../services/cloud-runner-custom-hooks';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import CloudRunner from '../cloud-runner';
|
import CloudRunner from '../cloud-runner';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import { CloudRunnerCustomSteps } from '../services/cloud-runner-custom-steps';
|
||||||
|
|
||||||
export class BuildAutomationWorkflow implements WorkflowInterface {
|
export class BuildAutomationWorkflow implements WorkflowInterface {
|
||||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||||
try {
|
try {
|
||||||
return await BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image);
|
return await BuildAutomationWorkflow.standardBuildAutomation(cloudRunnerStepState.image, cloudRunnerStepState);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static async standardBuildAutomation(baseImage: any) {
|
private static async standardBuildAutomation(baseImage: any, cloudRunnerStepState: CloudRunnerStepState) {
|
||||||
|
// TODO accept post and pre build steps as yaml files in the repo
|
||||||
try {
|
try {
|
||||||
CloudRunnerLogger.log(`Cloud Runner is running standard build automation`);
|
CloudRunnerLogger.log(`Cloud Runner is running standard build automation`);
|
||||||
|
|
||||||
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('pre build steps');
|
|
||||||
let output = '';
|
let output = '';
|
||||||
if (CloudRunner.buildParameters.preBuildSteps !== '') {
|
|
||||||
output += await CustomWorkflow.runCustomJob(CloudRunner.buildParameters.preBuildSteps);
|
output += await CloudRunnerCustomSteps.RunPreBuildSteps(cloudRunnerStepState);
|
||||||
}
|
|
||||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
|
||||||
CloudRunnerLogger.logWithTime('Configurable pre build step(s) time');
|
CloudRunnerLogger.logWithTime('Configurable pre build step(s) time');
|
||||||
|
|
||||||
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('build');
|
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('build');
|
||||||
@@ -34,23 +33,19 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
|||||||
CloudRunnerLogger.logLine(` `);
|
CloudRunnerLogger.logLine(` `);
|
||||||
CloudRunnerLogger.logLine('Starting build automation job');
|
CloudRunnerLogger.logLine('Starting build automation job');
|
||||||
|
|
||||||
output += await CloudRunner.Provider.runTask(
|
output += await CloudRunner.Provider.runTaskInWorkflow(
|
||||||
CloudRunner.buildParameters.buildGuid,
|
CloudRunner.buildParameters.buildGuid,
|
||||||
baseImage.toString(),
|
baseImage.toString(),
|
||||||
BuildAutomationWorkflow.BuildWorkflow,
|
BuildAutomationWorkflow.BuildWorkflow,
|
||||||
`/${CloudRunnerFolders.buildVolumeFolder}`,
|
`/${CloudRunnerFolders.buildVolumeFolder}`,
|
||||||
`/${CloudRunnerFolders.buildVolumeFolder}/`,
|
`/${CloudRunnerFolders.buildVolumeFolder}/`,
|
||||||
CloudRunner.cloudRunnerEnvironmentVariables,
|
cloudRunnerStepState.environment,
|
||||||
CloudRunner.defaultSecrets,
|
cloudRunnerStepState.secrets,
|
||||||
);
|
);
|
||||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||||
CloudRunnerLogger.logWithTime('Build time');
|
CloudRunnerLogger.logWithTime('Build time');
|
||||||
|
|
||||||
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('post build steps');
|
output += await CloudRunnerCustomSteps.RunPostBuildSteps(cloudRunnerStepState);
|
||||||
if (CloudRunner.buildParameters.postBuildSteps !== '') {
|
|
||||||
output += await CustomWorkflow.runCustomJob(CloudRunner.buildParameters.postBuildSteps);
|
|
||||||
}
|
|
||||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
|
||||||
CloudRunnerLogger.logWithTime('Configurable post build step(s) time');
|
CloudRunnerLogger.logWithTime('Configurable post build step(s) time');
|
||||||
|
|
||||||
CloudRunnerLogger.log(`Cloud Runner finished running standard build automation`);
|
CloudRunnerLogger.log(`Cloud Runner finished running standard build automation`);
|
||||||
@@ -62,62 +57,83 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static get BuildWorkflow() {
|
private static get BuildWorkflow() {
|
||||||
const setupHooks = CloudRunnerBuildCommandProcessor.getHooks(CloudRunner.buildParameters.customJobHooks).filter(
|
const setupHooks = CloudRunnerCustomHooks.getHooks(CloudRunner.buildParameters.customJobHooks).filter((x) =>
|
||||||
(x) => x.step.includes(`setup`),
|
x.step.includes(`setup`),
|
||||||
);
|
);
|
||||||
const buildHooks = CloudRunnerBuildCommandProcessor.getHooks(CloudRunner.buildParameters.customJobHooks).filter(
|
const buildHooks = CloudRunnerCustomHooks.getHooks(CloudRunner.buildParameters.customJobHooks).filter((x) =>
|
||||||
(x) => x.step.includes(`build`),
|
x.step.includes(`build`),
|
||||||
|
);
|
||||||
|
const builderPath = CloudRunnerFolders.ToLinuxFolder(
|
||||||
|
path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', `index.js`),
|
||||||
);
|
);
|
||||||
const builderPath = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', `index.js`).replace(/\\/g, `/`);
|
|
||||||
|
|
||||||
return `apt-get update > /dev/null
|
return `apt-get update > /dev/null
|
||||||
apt-get install -y tar tree npm git-lfs jq git > /dev/null
|
apt-get install -y curl tar tree npm git-lfs jq git > /dev/null
|
||||||
npm install -g n > /dev/null
|
npm i -g n > /dev/null
|
||||||
n stable > /dev/null
|
n 16.15.1 > /dev/null
|
||||||
|
npm --version
|
||||||
|
node --version
|
||||||
|
${BuildAutomationWorkflow.TreeCommand}
|
||||||
${setupHooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
${setupHooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||||
export GITHUB_WORKSPACE="${CloudRunnerFolders.repoPathAbsolute.replace(/\\/g, `/`)}"
|
export GITHUB_WORKSPACE="${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.repoPathAbsolute)}"
|
||||||
${BuildAutomationWorkflow.setupCommands(builderPath)}
|
${BuildAutomationWorkflow.setupCommands(builderPath)}
|
||||||
${setupHooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
${setupHooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||||
|
${BuildAutomationWorkflow.TreeCommand}
|
||||||
${buildHooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
${buildHooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||||
${BuildAutomationWorkflow.BuildCommands(builderPath, CloudRunner.buildParameters.buildGuid)}
|
${BuildAutomationWorkflow.BuildCommands(builderPath)}
|
||||||
${buildHooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}`;
|
${buildHooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||||
|
${BuildAutomationWorkflow.TreeCommand}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static setupCommands(builderPath) {
|
private static setupCommands(builderPath) {
|
||||||
return `export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
const commands = `mkdir -p ${CloudRunnerFolders.ToLinuxFolder(
|
||||||
echo "game ci cloud runner clone"
|
CloudRunnerFolders.builderPathAbsolute,
|
||||||
mkdir -p ${CloudRunnerFolders.builderPathAbsolute.replace(/\\/g, `/`)}
|
)} && git clone -q -b ${CloudRunner.buildParameters.cloudRunnerBranch} ${
|
||||||
git clone -q -b ${CloudRunner.buildParameters.cloudRunnerBranch} ${
|
|
||||||
CloudRunnerFolders.unityBuilderRepoUrl
|
CloudRunnerFolders.unityBuilderRepoUrl
|
||||||
} "${CloudRunnerFolders.builderPathAbsolute.replace(/\\/g, `/`)}"
|
} "${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.builderPathAbsolute)}" && chmod +x ${builderPath}`;
|
||||||
chmod +x ${builderPath}
|
|
||||||
echo "game ci cloud runner bootstrap"
|
const retainedWorkspaceCommands = `if [ -e "${CloudRunnerFolders.ToLinuxFolder(
|
||||||
node ${builderPath} -m remote-cli`;
|
CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute,
|
||||||
|
)}" ] && [ -e "${CloudRunnerFolders.ToLinuxFolder(
|
||||||
|
path.join(CloudRunnerFolders.repoPathAbsolute, `.git`),
|
||||||
|
)}" ]; then echo "Retained Workspace Already Exists!" ; fi`;
|
||||||
|
|
||||||
|
const cloneBuilderCommands = `if [ -e "${CloudRunnerFolders.ToLinuxFolder(
|
||||||
|
CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute,
|
||||||
|
)}" ] && [ -e "${CloudRunnerFolders.ToLinuxFolder(
|
||||||
|
path.join(CloudRunnerFolders.builderPathAbsolute, `.git`),
|
||||||
|
)}" ]; then echo "Builder Already Exists!"; else ${commands}; fi`;
|
||||||
|
|
||||||
|
return `export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||||
|
echo "downloading game-ci..."
|
||||||
|
${retainedWorkspaceCommands}
|
||||||
|
${cloneBuilderCommands}
|
||||||
|
echo "bootstrap game ci cloud runner..."
|
||||||
|
node ${builderPath} -m remote-cli-pre-build`;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static BuildCommands(builderPath, guid) {
|
private static BuildCommands(builderPath) {
|
||||||
const linuxCacheFolder = CloudRunnerFolders.cacheFolderFull.replace(/\\/g, `/`);
|
|
||||||
const distFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist');
|
const distFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist');
|
||||||
const ubuntuPlatformsFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', 'platforms', 'ubuntu');
|
const ubuntuPlatformsFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', 'platforms', 'ubuntu');
|
||||||
|
|
||||||
return `echo "game ci cloud runner init"
|
return `echo "game ci cloud runner initalized"
|
||||||
mkdir -p ${`${CloudRunnerFolders.projectBuildFolderAbsolute}/build`.replace(/\\/g, `/`)}
|
mkdir -p ${`${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.projectBuildFolderAbsolute)}/build`}
|
||||||
cd ${CloudRunnerFolders.projectPathAbsolute}
|
cd ${CloudRunnerFolders.ToLinuxFolder(CloudRunnerFolders.projectPathAbsolute)}
|
||||||
cp -r "${path.join(distFolder, 'default-build-script').replace(/\\/g, `/`)}" "/UnityBuilderAction"
|
cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(distFolder, 'default-build-script'))}" "/UnityBuilderAction"
|
||||||
cp -r "${path.join(ubuntuPlatformsFolder, 'entrypoint.sh').replace(/\\/g, `/`)}" "/entrypoint.sh"
|
cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(ubuntuPlatformsFolder, 'entrypoint.sh'))}" "/entrypoint.sh"
|
||||||
cp -r "${path.join(ubuntuPlatformsFolder, 'steps').replace(/\\/g, `/`)}" "/steps"
|
cp -r "${CloudRunnerFolders.ToLinuxFolder(path.join(ubuntuPlatformsFolder, 'steps'))}" "/steps"
|
||||||
chmod -R +x "/entrypoint.sh"
|
chmod -R +x "/entrypoint.sh"
|
||||||
chmod -R +x "/steps"
|
chmod -R +x "/steps"
|
||||||
echo "game ci cloud runner start"
|
echo "game ci start"
|
||||||
/entrypoint.sh
|
/entrypoint.sh
|
||||||
echo "game ci cloud runner push library to cache"
|
echo "game ci caching results"
|
||||||
chmod +x ${builderPath}
|
chmod +x ${builderPath}
|
||||||
node ${builderPath} -m cache-push --cachePushFrom ${
|
node ${builderPath} -m remote-cli-post-build`;
|
||||||
CloudRunnerFolders.libraryFolderAbsolute
|
}
|
||||||
} --artifactName lib-${guid} --cachePushTo ${linuxCacheFolder}/Library
|
|
||||||
echo "game ci cloud runner push build to cache"
|
private static get TreeCommand(): string {
|
||||||
node ${builderPath} -m cache-push --cachePushFrom ${
|
return CloudRunnerOptions.cloudRunnerDebugTree
|
||||||
CloudRunnerFolders.projectBuildFolderAbsolute
|
? `tree -L 2 ${CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute} && tree -L 2 ${CloudRunnerFolders.cacheFolderForCacheKeyFull} && du -h -s /${CloudRunnerFolders.buildVolumeFolder}/ && du -h -s ${CloudRunnerFolders.cacheFolderForAllFull}`
|
||||||
} --artifactName build-${guid} --cachePushTo ${`${linuxCacheFolder}/build`.replace(/\\/g, `/`)}`;
|
: `#`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,41 +1,45 @@
|
|||||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||||
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
|
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
|
||||||
import YAML from 'yaml';
|
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||||
import { CloudRunner, Input } from '../..';
|
import { CloudRunnerCustomSteps } from '../services/cloud-runner-custom-steps';
|
||||||
|
import { CustomStep } from '../services/custom-step';
|
||||||
|
import CloudRunner from '../cloud-runner';
|
||||||
|
|
||||||
export class CustomWorkflow {
|
export class CustomWorkflow {
|
||||||
public static async runCustomJob(buildSteps) {
|
public static async runCustomJobFromString(
|
||||||
|
buildSteps: string,
|
||||||
|
environmentVariables: CloudRunnerEnvironmentVariable[],
|
||||||
|
secrets: CloudRunnerSecret[],
|
||||||
|
): Promise<string> {
|
||||||
|
return await CustomWorkflow.runCustomJob(
|
||||||
|
CloudRunnerCustomSteps.ParseSteps(buildSteps),
|
||||||
|
environmentVariables,
|
||||||
|
secrets,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async runCustomJob(
|
||||||
|
buildSteps: CustomStep[],
|
||||||
|
environmentVariables: CloudRunnerEnvironmentVariable[],
|
||||||
|
secrets: CloudRunnerSecret[],
|
||||||
|
) {
|
||||||
try {
|
try {
|
||||||
CloudRunnerLogger.log(`Cloud Runner is running in custom job mode`);
|
CloudRunnerLogger.log(`Cloud Runner is running in custom job mode`);
|
||||||
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
|
||||||
CloudRunnerLogger.log(`Parsing build steps: ${buildSteps}`);
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
buildSteps = YAML.parse(buildSteps);
|
|
||||||
} catch (error) {
|
|
||||||
CloudRunnerLogger.log(`failed to parse a custom job "${buildSteps}"`);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
let output = '';
|
let output = '';
|
||||||
for (const step of buildSteps) {
|
|
||||||
const stepSecrets: CloudRunnerSecret[] = step.secrets.map((x) => {
|
|
||||||
const secret: CloudRunnerSecret = {
|
|
||||||
ParameterKey: x.name,
|
|
||||||
EnvironmentVariable: Input.ToEnvVarFormat(x.name),
|
|
||||||
ParameterValue: x.value,
|
|
||||||
};
|
|
||||||
|
|
||||||
return secret;
|
// if (CloudRunner.buildParameters?.cloudRunnerDebug) {
|
||||||
});
|
// CloudRunnerLogger.log(`Custom Job Description \n${JSON.stringify(buildSteps, undefined, 4)}`);
|
||||||
output += await CloudRunner.Provider.runTask(
|
// }
|
||||||
|
for (const step of buildSteps) {
|
||||||
|
output += await CloudRunner.Provider.runTaskInWorkflow(
|
||||||
CloudRunner.buildParameters.buildGuid,
|
CloudRunner.buildParameters.buildGuid,
|
||||||
step['image'],
|
step.image,
|
||||||
step['commands'],
|
step.commands,
|
||||||
`/${CloudRunnerFolders.buildVolumeFolder}`,
|
`/${CloudRunnerFolders.buildVolumeFolder}`,
|
||||||
`/${CloudRunnerFolders.buildVolumeFolder}/`,
|
`/${CloudRunnerFolders.projectPathAbsolute}/`,
|
||||||
CloudRunner.cloudRunnerEnvironmentVariables,
|
environmentVariables,
|
||||||
[...CloudRunner.defaultSecrets, ...stepSecrets],
|
[...secrets, ...step.secrets],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,24 +3,30 @@ import { CustomWorkflow } from './custom-workflow';
|
|||||||
import { WorkflowInterface } from './workflow-interface';
|
import { WorkflowInterface } from './workflow-interface';
|
||||||
import { BuildAutomationWorkflow } from './build-automation-workflow';
|
import { BuildAutomationWorkflow } from './build-automation-workflow';
|
||||||
import CloudRunner from '../cloud-runner';
|
import CloudRunner from '../cloud-runner';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner-options';
|
||||||
|
import { AsyncWorkflow } from './async-workflow';
|
||||||
|
|
||||||
export class WorkflowCompositionRoot implements WorkflowInterface {
|
export class WorkflowCompositionRoot implements WorkflowInterface {
|
||||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||||
try {
|
try {
|
||||||
return await WorkflowCompositionRoot.runJob(cloudRunnerStepState.image.toString());
|
if (CloudRunnerOptions.asyncCloudRunner) {
|
||||||
} catch (error) {
|
return await AsyncWorkflow.runAsyncWorkflow(cloudRunnerStepState.environment, cloudRunnerStepState.secrets);
|
||||||
throw error;
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static async runJob(baseImage: any) {
|
|
||||||
try {
|
|
||||||
if (CloudRunner.buildParameters.customJob !== '') {
|
if (CloudRunner.buildParameters.customJob !== '') {
|
||||||
return await CustomWorkflow.runCustomJob(CloudRunner.buildParameters.customJob);
|
return await CustomWorkflow.runCustomJobFromString(
|
||||||
|
CloudRunner.buildParameters.customJob,
|
||||||
|
cloudRunnerStepState.environment,
|
||||||
|
cloudRunnerStepState.secrets,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return await new BuildAutomationWorkflow().run(
|
return await new BuildAutomationWorkflow().run(
|
||||||
new CloudRunnerStepState(baseImage, CloudRunner.cloudRunnerEnvironmentVariables, CloudRunner.defaultSecrets),
|
new CloudRunnerStepState(
|
||||||
|
cloudRunnerStepState.image.toString(),
|
||||||
|
cloudRunnerStepState.environment,
|
||||||
|
cloudRunnerStepState.secrets,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
|
|||||||
@@ -4,30 +4,50 @@ import { existsSync, mkdirSync } from 'fs';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
class Docker {
|
class Docker {
|
||||||
static async run(image, parameters, silent = false) {
|
static async run(
|
||||||
|
image,
|
||||||
|
parameters,
|
||||||
|
silent = false,
|
||||||
|
overrideCommands = '',
|
||||||
|
additionalVariables: any[] = [],
|
||||||
|
options: any = false,
|
||||||
|
entrypointBash: boolean = false,
|
||||||
|
) {
|
||||||
let runCommand = '';
|
let runCommand = '';
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'linux':
|
case 'linux':
|
||||||
runCommand = this.getLinuxCommand(image, parameters);
|
runCommand = this.getLinuxCommand(image, parameters, overrideCommands, additionalVariables, entrypointBash);
|
||||||
break;
|
break;
|
||||||
case 'win32':
|
case 'win32':
|
||||||
runCommand = this.getWindowsCommand(image, parameters);
|
runCommand = this.getWindowsCommand(image, parameters);
|
||||||
}
|
}
|
||||||
await exec(runCommand, undefined, { silent });
|
if (options !== false) {
|
||||||
|
options.silent = silent;
|
||||||
|
await exec(runCommand, undefined, options);
|
||||||
|
} else {
|
||||||
|
await exec(runCommand, undefined, { silent });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static getLinuxCommand(image, parameters): string {
|
static getLinuxCommand(
|
||||||
|
image,
|
||||||
|
parameters,
|
||||||
|
overrideCommands = '',
|
||||||
|
additionalVariables: any[] = [],
|
||||||
|
entrypointBash: boolean = false,
|
||||||
|
): string {
|
||||||
const { workspace, actionFolder, runnerTempPath, sshAgent, gitPrivateToken } = parameters;
|
const { workspace, actionFolder, runnerTempPath, sshAgent, gitPrivateToken } = parameters;
|
||||||
|
|
||||||
const githubHome = path.join(runnerTempPath, '_github_home');
|
const githubHome = path.join(runnerTempPath, '_github_home');
|
||||||
if (!existsSync(githubHome)) mkdirSync(githubHome);
|
if (!existsSync(githubHome)) mkdirSync(githubHome);
|
||||||
const githubWorkflow = path.join(runnerTempPath, '_github_workflow');
|
const githubWorkflow = path.join(runnerTempPath, '_github_workflow');
|
||||||
if (!existsSync(githubWorkflow)) mkdirSync(githubWorkflow);
|
if (!existsSync(githubWorkflow)) mkdirSync(githubWorkflow);
|
||||||
|
const commandPrefix = image === `alpine` ? `/bin/sh` : `/bin/bash`;
|
||||||
|
|
||||||
return `docker run \
|
return `docker run \
|
||||||
--workdir /github/workspace \
|
--workdir /github/workspace \
|
||||||
--rm \
|
--rm \
|
||||||
${ImageEnvironmentFactory.getEnvVarString(parameters)} \
|
${ImageEnvironmentFactory.getEnvVarString(parameters, additionalVariables)} \
|
||||||
--env UNITY_SERIAL \
|
--env UNITY_SERIAL \
|
||||||
--env GITHUB_WORKSPACE=/github/workspace \
|
--env GITHUB_WORKSPACE=/github/workspace \
|
||||||
${gitPrivateToken ? `--env GIT_PRIVATE_TOKEN="${gitPrivateToken}"` : ''} \
|
${gitPrivateToken ? `--env GIT_PRIVATE_TOKEN="${gitPrivateToken}"` : ''} \
|
||||||
@@ -38,17 +58,20 @@ class Docker {
|
|||||||
--volume "${actionFolder}/default-build-script:/UnityBuilderAction:z" \
|
--volume "${actionFolder}/default-build-script:/UnityBuilderAction:z" \
|
||||||
--volume "${actionFolder}/platforms/ubuntu/steps:/steps:z" \
|
--volume "${actionFolder}/platforms/ubuntu/steps:/steps:z" \
|
||||||
--volume "${actionFolder}/platforms/ubuntu/entrypoint.sh:/entrypoint.sh:z" \
|
--volume "${actionFolder}/platforms/ubuntu/entrypoint.sh:/entrypoint.sh:z" \
|
||||||
|
--volume "${actionFolder}/unity-config:/usr/share/unity3d/config/:z" \
|
||||||
${sshAgent ? `--volume ${sshAgent}:/ssh-agent` : ''} \
|
${sshAgent ? `--volume ${sshAgent}:/ssh-agent` : ''} \
|
||||||
${sshAgent ? '--volume /home/runner/.ssh/known_hosts:/root/.ssh/known_hosts:ro' : ''} \
|
${sshAgent ? '--volume /home/runner/.ssh/known_hosts:/root/.ssh/known_hosts:ro' : ''} \
|
||||||
|
${entrypointBash ? `--entrypoint ${commandPrefix}` : ``} \
|
||||||
${image} \
|
${image} \
|
||||||
/bin/bash -c /entrypoint.sh`;
|
${entrypointBash ? `-c` : `${commandPrefix} -c`} \
|
||||||
|
"${overrideCommands !== '' ? overrideCommands : `/entrypoint.sh`}"`;
|
||||||
}
|
}
|
||||||
|
|
||||||
static getWindowsCommand(image: any, parameters: any): string {
|
static getWindowsCommand(image: any, parameters: any): string {
|
||||||
const { workspace, actionFolder, unitySerial, gitPrivateToken } = parameters;
|
const { workspace, actionFolder, unitySerial, gitPrivateToken } = parameters;
|
||||||
|
|
||||||
return `docker run \
|
return `docker run \
|
||||||
--workdir /github/workspace \
|
--workdir c:/github/workspace \
|
||||||
--rm \
|
--rm \
|
||||||
${ImageEnvironmentFactory.getEnvVarString(parameters)} \
|
${ImageEnvironmentFactory.getEnvVarString(parameters)} \
|
||||||
--env UNITY_SERIAL="${unitySerial}" \
|
--env UNITY_SERIAL="${unitySerial}" \
|
||||||
|
|||||||
168
src/model/github.ts
Normal file
168
src/model/github.ts
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
import CloudRunnerLogger from './cloud-runner/services/cloud-runner-logger';
|
||||||
|
import CloudRunner from './cloud-runner/cloud-runner';
|
||||||
|
import CloudRunnerOptions from './cloud-runner/cloud-runner-options';
|
||||||
|
import * as core from '@actions/core';
|
||||||
|
import { Octokit } from '@octokit/core';
|
||||||
|
class GitHub {
|
||||||
|
private static readonly asyncChecksApiWorkflowName = `Async Checks API`;
|
||||||
|
public static githubInputEnabled: boolean = true;
|
||||||
|
private static longDescriptionContent: string = ``;
|
||||||
|
private static startedDate: string;
|
||||||
|
private static endedDate: string;
|
||||||
|
private static get octokitDefaultToken() {
|
||||||
|
return new Octokit({
|
||||||
|
auth: process.env.GITHUB_TOKEN,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
private static get octokitPAT() {
|
||||||
|
return new Octokit({
|
||||||
|
auth: CloudRunner.buildParameters.gitPrivateToken,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
private static get sha() {
|
||||||
|
return CloudRunner.buildParameters.gitSha;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static get checkName() {
|
||||||
|
return `Cloud Runner (${CloudRunner.buildParameters.buildGuid})`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static get nameReadable() {
|
||||||
|
return GitHub.checkName;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static get checkRunId() {
|
||||||
|
return CloudRunner.githubCheckId;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static get owner() {
|
||||||
|
return CloudRunnerOptions.githubOwner;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static get repo() {
|
||||||
|
return CloudRunnerOptions.githubRepoName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async createGitHubCheck(summary) {
|
||||||
|
if (!CloudRunnerOptions.githubChecks) {
|
||||||
|
return ``;
|
||||||
|
}
|
||||||
|
GitHub.startedDate = new Date().toISOString();
|
||||||
|
|
||||||
|
CloudRunnerLogger.log(`POST /repos/${GitHub.owner}/${GitHub.repo}/check-runs`);
|
||||||
|
|
||||||
|
const data = {
|
||||||
|
owner: GitHub.owner,
|
||||||
|
repo: GitHub.repo,
|
||||||
|
name: GitHub.checkName,
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
head_sha: GitHub.sha,
|
||||||
|
status: 'queued',
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
external_id: CloudRunner.buildParameters.buildGuid,
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
started_at: GitHub.startedDate,
|
||||||
|
output: {
|
||||||
|
title: GitHub.nameReadable,
|
||||||
|
summary,
|
||||||
|
text: '',
|
||||||
|
images: [
|
||||||
|
{
|
||||||
|
alt: 'Game-CI',
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
image_url: 'https://game.ci/assets/images/game-ci-brand-logo-wordmark.svg',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const result = await GitHub.createGitHubCheckRequest(data);
|
||||||
|
|
||||||
|
return result.data.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async updateGitHubCheck(longDescription, summary, result = `neutral`, status = `in_progress`) {
|
||||||
|
if (!CloudRunnerOptions.githubChecks) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
GitHub.longDescriptionContent += `\n${longDescription}`;
|
||||||
|
|
||||||
|
const data: any = {
|
||||||
|
owner: GitHub.owner,
|
||||||
|
repo: GitHub.repo,
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
check_run_id: GitHub.checkRunId,
|
||||||
|
name: GitHub.checkName,
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
head_sha: GitHub.sha,
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
started_at: GitHub.startedDate,
|
||||||
|
status,
|
||||||
|
output: {
|
||||||
|
title: GitHub.nameReadable,
|
||||||
|
summary,
|
||||||
|
text: GitHub.longDescriptionContent,
|
||||||
|
annotations: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
if (status === `completed`) {
|
||||||
|
if (GitHub.endedDate !== undefined) {
|
||||||
|
GitHub.endedDate = new Date().toISOString();
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
data.completed_at = GitHub.endedDate || GitHub.startedDate;
|
||||||
|
data.conclusion = result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await CloudRunnerOptions.asyncCloudRunner) {
|
||||||
|
await GitHub.runUpdateAsyncChecksWorkflow(data, `update`);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await GitHub.updateGitHubCheckRequest(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async updateGitHubCheckRequest(data) {
|
||||||
|
return await GitHub.octokitDefaultToken.request(`PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}`, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async createGitHubCheckRequest(data) {
|
||||||
|
return await GitHub.octokitDefaultToken.request(`POST /repos/{owner}/{repo}/check-runs`, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async runUpdateAsyncChecksWorkflow(data, mode) {
|
||||||
|
if (mode === `create`) {
|
||||||
|
throw new Error(`Not supported: only use update`);
|
||||||
|
}
|
||||||
|
const workflowsResult = await GitHub.octokitDefaultToken.request(
|
||||||
|
`GET /repos/${GitHub.owner}/${GitHub.repo}/actions/workflows`,
|
||||||
|
{
|
||||||
|
owner: GitHub.owner,
|
||||||
|
repo: GitHub.repo,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
const workflows = workflowsResult.data.workflows;
|
||||||
|
let selectedId = ``;
|
||||||
|
for (let index = 0; index < workflowsResult.data.total_count; index++) {
|
||||||
|
if (workflows[index].name === GitHub.asyncChecksApiWorkflowName) {
|
||||||
|
selectedId = workflows[index].id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (selectedId === ``) {
|
||||||
|
core.info(JSON.stringify(workflows));
|
||||||
|
throw new Error(`no workflow with name "${GitHub.asyncChecksApiWorkflowName}"`);
|
||||||
|
}
|
||||||
|
await GitHub.octokitPAT.request(`POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches`, {
|
||||||
|
owner: GitHub.owner,
|
||||||
|
repo: GitHub.repo,
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
workflow_id: selectedId,
|
||||||
|
ref: CloudRunnerOptions.branch,
|
||||||
|
inputs: {
|
||||||
|
checksObject: JSON.stringify({ data, mode }),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default GitHub;
|
||||||
@@ -7,8 +7,8 @@ class Parameter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
class ImageEnvironmentFactory {
|
class ImageEnvironmentFactory {
|
||||||
public static getEnvVarString(parameters) {
|
public static getEnvVarString(parameters, additionalVariables: any[] = []) {
|
||||||
const environmentVariables = ImageEnvironmentFactory.getEnvironmentVariables(parameters);
|
const environmentVariables = ImageEnvironmentFactory.getEnvironmentVariables(parameters, additionalVariables);
|
||||||
let string = '';
|
let string = '';
|
||||||
for (const p of environmentVariables) {
|
for (const p of environmentVariables) {
|
||||||
if (p.value === '' || p.value === undefined) {
|
if (p.value === '' || p.value === undefined) {
|
||||||
@@ -16,6 +16,7 @@ class ImageEnvironmentFactory {
|
|||||||
}
|
}
|
||||||
if (p.name !== 'ANDROID_KEYSTORE_BASE64' && p.value.toString().includes(`\n`)) {
|
if (p.name !== 'ANDROID_KEYSTORE_BASE64' && p.value.toString().includes(`\n`)) {
|
||||||
string += `--env ${p.name} `;
|
string += `--env ${p.name} `;
|
||||||
|
process.env[p.name] = p.value.toString();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -24,13 +25,14 @@ class ImageEnvironmentFactory {
|
|||||||
|
|
||||||
return string;
|
return string;
|
||||||
}
|
}
|
||||||
public static getEnvironmentVariables(parameters: BuildParameters) {
|
public static getEnvironmentVariables(parameters: BuildParameters, additionalVariables: any[] = []) {
|
||||||
const environmentVariables: Parameter[] = [
|
let environmentVariables: Parameter[] = [
|
||||||
{ name: 'UNITY_LICENSE', value: process.env.UNITY_LICENSE || ReadLicense() },
|
{ name: 'UNITY_LICENSE', value: process.env.UNITY_LICENSE || ReadLicense() },
|
||||||
{ name: 'UNITY_LICENSE_FILE', value: process.env.UNITY_LICENSE_FILE },
|
{ name: 'UNITY_LICENSE_FILE', value: process.env.UNITY_LICENSE_FILE },
|
||||||
{ name: 'UNITY_EMAIL', value: process.env.UNITY_EMAIL },
|
{ name: 'UNITY_EMAIL', value: process.env.UNITY_EMAIL },
|
||||||
{ name: 'UNITY_PASSWORD', value: process.env.UNITY_PASSWORD },
|
{ name: 'UNITY_PASSWORD', value: process.env.UNITY_PASSWORD },
|
||||||
{ name: 'UNITY_SERIAL', value: parameters.unitySerial },
|
{ name: 'UNITY_SERIAL', value: parameters.unitySerial },
|
||||||
|
{ name: 'UNITY_LICENSING_SERVER', value: parameters.unityLicensingServer },
|
||||||
{ name: 'UNITY_VERSION', value: parameters.editorVersion },
|
{ name: 'UNITY_VERSION', value: parameters.editorVersion },
|
||||||
{ name: 'USYM_UPLOAD_AUTH_TOKEN', value: process.env.USYM_UPLOAD_AUTH_TOKEN },
|
{ name: 'USYM_UPLOAD_AUTH_TOKEN', value: process.env.USYM_UPLOAD_AUTH_TOKEN },
|
||||||
{ name: 'PROJECT_PATH', value: parameters.projectPath },
|
{ name: 'PROJECT_PATH', value: parameters.projectPath },
|
||||||
@@ -58,7 +60,6 @@ class ImageEnvironmentFactory {
|
|||||||
{ name: 'GITHUB_HEAD_REF', value: process.env.GITHUB_HEAD_REF },
|
{ name: 'GITHUB_HEAD_REF', value: process.env.GITHUB_HEAD_REF },
|
||||||
{ name: 'GITHUB_BASE_REF', value: process.env.GITHUB_BASE_REF },
|
{ name: 'GITHUB_BASE_REF', value: process.env.GITHUB_BASE_REF },
|
||||||
{ name: 'GITHUB_EVENT_NAME', value: process.env.GITHUB_EVENT_NAME },
|
{ name: 'GITHUB_EVENT_NAME', value: process.env.GITHUB_EVENT_NAME },
|
||||||
{ name: 'GITHUB_WORKSPACE', value: '/github/workspace' },
|
|
||||||
{ name: 'GITHUB_ACTION', value: process.env.GITHUB_ACTION },
|
{ name: 'GITHUB_ACTION', value: process.env.GITHUB_ACTION },
|
||||||
{ name: 'GITHUB_EVENT_PATH', value: process.env.GITHUB_EVENT_PATH },
|
{ name: 'GITHUB_EVENT_PATH', value: process.env.GITHUB_EVENT_PATH },
|
||||||
{ name: 'RUNNER_OS', value: process.env.RUNNER_OS },
|
{ name: 'RUNNER_OS', value: process.env.RUNNER_OS },
|
||||||
@@ -66,6 +67,26 @@ class ImageEnvironmentFactory {
|
|||||||
{ name: 'RUNNER_TEMP', value: process.env.RUNNER_TEMP },
|
{ name: 'RUNNER_TEMP', value: process.env.RUNNER_TEMP },
|
||||||
{ name: 'RUNNER_WORKSPACE', value: process.env.RUNNER_WORKSPACE },
|
{ name: 'RUNNER_WORKSPACE', value: process.env.RUNNER_WORKSPACE },
|
||||||
];
|
];
|
||||||
|
if (parameters.cloudRunnerCluster === 'local-docker') {
|
||||||
|
for (const element of additionalVariables) {
|
||||||
|
if (
|
||||||
|
environmentVariables.find(
|
||||||
|
(x) => element !== undefined && element.name !== undefined && x.name === element.name,
|
||||||
|
) === undefined
|
||||||
|
) {
|
||||||
|
environmentVariables.push(element);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const variable of environmentVariables) {
|
||||||
|
if (
|
||||||
|
environmentVariables.find(
|
||||||
|
(x) => variable !== undefined && variable.name !== undefined && x.name === variable.name,
|
||||||
|
) === undefined
|
||||||
|
) {
|
||||||
|
environmentVariables = environmentVariables.filter((x) => x !== variable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
if (parameters.sshAgent) environmentVariables.push({ name: 'SSH_AUTH_SOCK', value: '/ssh-agent' });
|
if (parameters.sshAgent) environmentVariables.push({ name: 'SSH_AUTH_SOCK', value: '/ssh-agent' });
|
||||||
|
|
||||||
return environmentVariables;
|
return environmentVariables;
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import Platform from './platform';
|
import Platform from './platform';
|
||||||
|
|
||||||
import BuildParameters from './build-parameters';
|
import BuildParameters from './build-parameters';
|
||||||
|
import Input from './input';
|
||||||
|
|
||||||
class ImageTag {
|
class ImageTag {
|
||||||
public repository: string;
|
public repository: string;
|
||||||
@@ -83,7 +84,7 @@ class ImageTag {
|
|||||||
case Platform.types.StandaloneWindows:
|
case Platform.types.StandaloneWindows:
|
||||||
case Platform.types.StandaloneWindows64:
|
case Platform.types.StandaloneWindows64:
|
||||||
// Can only build windows-il2cpp on a windows based system
|
// Can only build windows-il2cpp on a windows based system
|
||||||
if (process.platform === 'win32') {
|
if (Input.useIL2Cpp && process.platform === 'win32') {
|
||||||
// Unity versions before 2019.3 do not support il2cpp
|
// Unity versions before 2019.3 do not support il2cpp
|
||||||
if (major >= 2020 || (major === 2019 && minor >= 3)) {
|
if (major >= 2020 || (major === 2019 && minor >= 3)) {
|
||||||
return windowsIl2cpp;
|
return windowsIl2cpp;
|
||||||
@@ -96,7 +97,7 @@ class ImageTag {
|
|||||||
return windows;
|
return windows;
|
||||||
case Platform.types.StandaloneLinux64: {
|
case Platform.types.StandaloneLinux64: {
|
||||||
// Unity versions before 2019.3 do not support il2cpp
|
// Unity versions before 2019.3 do not support il2cpp
|
||||||
if (major >= 2020 || (major === 2019 && minor >= 3)) {
|
if ((Input.useIL2Cpp && major >= 2020) || (major === 2019 && minor >= 3)) {
|
||||||
return linuxIl2cpp;
|
return linuxIl2cpp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
||||||
import Input from '../input';
|
import CloudRunnerOptions from '../cloud-runner/cloud-runner-options';
|
||||||
|
|
||||||
export class GenericInputReader {
|
export class GenericInputReader {
|
||||||
public static async Run(command) {
|
public static async Run(command) {
|
||||||
if (Input.cloudRunnerCluster === 'local') {
|
if (CloudRunnerOptions.cloudRunnerCluster === 'local') {
|
||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,24 @@
|
|||||||
import { GitRepoReader } from './git-repo';
|
import { GitRepoReader } from './git-repo';
|
||||||
|
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner/cloud-runner-options';
|
||||||
|
|
||||||
describe(`git repo tests`, () => {
|
describe(`git repo tests`, () => {
|
||||||
it(`Branch value parsed from CLI to not contain illegal characters`, async () => {
|
it(`Branch value parsed from CLI to not contain illegal characters`, async () => {
|
||||||
expect(await GitRepoReader.GetBranch()).not.toContain(`\n`);
|
expect(await GitRepoReader.GetBranch()).not.toContain(`\n`);
|
||||||
expect(await GitRepoReader.GetBranch()).not.toContain(` `);
|
expect(await GitRepoReader.GetBranch()).not.toContain(` `);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it(`returns valid branch name when using https`, async () => {
|
||||||
|
const mockValue = 'https://github.com/example/example.git';
|
||||||
|
await jest.spyOn(CloudRunnerSystem, 'Run').mockReturnValue(Promise.resolve(mockValue));
|
||||||
|
await jest.spyOn(CloudRunnerOptions, 'cloudRunnerCluster', 'get').mockReturnValue('not-local');
|
||||||
|
expect(await GitRepoReader.GetRemote()).toEqual(`example/example`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it(`returns valid branch name when using ssh`, async () => {
|
||||||
|
const mockValue = 'git@github.com:example/example.git';
|
||||||
|
await jest.spyOn(CloudRunnerSystem, 'Run').mockReturnValue(Promise.resolve(mockValue));
|
||||||
|
await jest.spyOn(CloudRunnerOptions, 'cloudRunnerCluster', 'get').mockReturnValue('not-local');
|
||||||
|
expect(await GitRepoReader.GetRemote()).toEqual(`example/example`);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -2,28 +2,32 @@ import { assert } from 'console';
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
||||||
import CloudRunnerLogger from '../cloud-runner/services/cloud-runner-logger';
|
import CloudRunnerLogger from '../cloud-runner/services/cloud-runner-logger';
|
||||||
|
import CloudRunnerOptions from '../cloud-runner/cloud-runner-options';
|
||||||
import Input from '../input';
|
import Input from '../input';
|
||||||
|
|
||||||
export class GitRepoReader {
|
export class GitRepoReader {
|
||||||
public static async GetRemote() {
|
public static async GetRemote() {
|
||||||
if (Input.cloudRunnerCluster === 'local') {
|
if (CloudRunnerOptions.cloudRunnerCluster === 'local') {
|
||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
assert(fs.existsSync(`.git`));
|
assert(fs.existsSync(`.git`));
|
||||||
const value = (await CloudRunnerSystem.Run(`git remote -v`, false, true)).replace(/ /g, ``);
|
const value = (await CloudRunnerSystem.Run(`cd ${Input.projectPath} && git remote -v`, false, true)).replace(
|
||||||
|
/ /g,
|
||||||
|
``,
|
||||||
|
);
|
||||||
CloudRunnerLogger.log(`value ${value}`);
|
CloudRunnerLogger.log(`value ${value}`);
|
||||||
assert(value.includes('github.com'));
|
assert(value.includes('github.com'));
|
||||||
|
|
||||||
return value.split('github.com/')[1].split('.git')[0];
|
return value.split('github.com')[1].split('.git')[0].slice(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async GetBranch() {
|
public static async GetBranch() {
|
||||||
if (Input.cloudRunnerCluster === 'local') {
|
if (CloudRunnerOptions.cloudRunnerCluster === 'local') {
|
||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
assert(fs.existsSync(`.git`));
|
assert(fs.existsSync(`.git`));
|
||||||
|
|
||||||
return (await CloudRunnerSystem.Run(`git branch --show-current`, false, true))
|
return (await CloudRunnerSystem.Run(`cd ${Input.projectPath} && git branch --show-current`, false, true))
|
||||||
.split('\n')[0]
|
.split('\n')[0]
|
||||||
.replace(/ /g, ``)
|
.replace(/ /g, ``)
|
||||||
.replace('/head', '');
|
.replace('/head', '');
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
||||||
import * as core from '@actions/core';
|
import * as core from '@actions/core';
|
||||||
import Input from '../input';
|
import CloudRunnerOptions from '../cloud-runner/cloud-runner-options';
|
||||||
|
|
||||||
export class GithubCliReader {
|
export class GithubCliReader {
|
||||||
static async GetGitHubAuthToken() {
|
static async GetGitHubAuthToken() {
|
||||||
if (Input.cloudRunnerCluster === 'local') {
|
if (CloudRunnerOptions.cloudRunnerCluster === 'local') {
|
||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import YAML from 'yaml';
|
import YAML from 'yaml';
|
||||||
import Input from '../input';
|
import CloudRunnerOptions from '../cloud-runner/cloud-runner-options';
|
||||||
|
|
||||||
export function ReadLicense() {
|
export function ReadLicense() {
|
||||||
if (Input.cloudRunnerCluster === 'local') {
|
if (CloudRunnerOptions.cloudRunnerCluster === 'local') {
|
||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
const pipelineFile = path.join(__dirname, `.github`, `workflows`, `cloud-runner-k8s-pipeline.yml`);
|
const pipelineFile = path.join(__dirname, `.github`, `workflows`, `cloud-runner-k8s-pipeline.yml`);
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import path from 'path';
|
|||||||
import { Cli } from './cli/cli';
|
import { Cli } from './cli/cli';
|
||||||
import CloudRunnerQueryOverride from './cloud-runner/services/cloud-runner-query-override';
|
import CloudRunnerQueryOverride from './cloud-runner/services/cloud-runner-query-override';
|
||||||
import Platform from './platform';
|
import Platform from './platform';
|
||||||
|
import GitHub from './github';
|
||||||
|
|
||||||
const core = require('@actions/core');
|
const core = require('@actions/core');
|
||||||
|
|
||||||
@@ -14,10 +15,8 @@ const core = require('@actions/core');
|
|||||||
* Todo: rename to UserInput and remove anything that is not direct input from the user / ci workflow
|
* Todo: rename to UserInput and remove anything that is not direct input from the user / ci workflow
|
||||||
*/
|
*/
|
||||||
class Input {
|
class Input {
|
||||||
public static githubInputEnabled: boolean = true;
|
|
||||||
|
|
||||||
public static getInput(query) {
|
public static getInput(query) {
|
||||||
if (Input.githubInputEnabled) {
|
if (GitHub.githubInputEnabled) {
|
||||||
const coreInput = core.getInput(query);
|
const coreInput = core.getInput(query);
|
||||||
if (coreInput && coreInput !== '') {
|
if (coreInput && coreInput !== '') {
|
||||||
return coreInput;
|
return coreInput;
|
||||||
@@ -61,17 +60,6 @@ class Input {
|
|||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
static get cloudRunnerBuilderPlatform() {
|
|
||||||
const input = Input.getInput('cloudRunnerBuilderPlatform');
|
|
||||||
if (input) {
|
|
||||||
return input;
|
|
||||||
}
|
|
||||||
if (Input.cloudRunnerCluster !== 'local') {
|
|
||||||
return 'linux';
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
static get gitSha() {
|
static get gitSha() {
|
||||||
if (Input.getInput(`GITHUB_SHA`)) {
|
if (Input.getInput(`GITHUB_SHA`)) {
|
||||||
@@ -81,6 +69,10 @@ class Input {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static get useIL2Cpp() {
|
||||||
|
return Input.getInput(`useIL2Cpp`) || true;
|
||||||
|
}
|
||||||
|
|
||||||
static get runNumber() {
|
static get runNumber() {
|
||||||
return Input.getInput('GITHUB_RUN_NUMBER') || '0';
|
return Input.getInput('GITHUB_RUN_NUMBER') || '0';
|
||||||
}
|
}
|
||||||
@@ -117,6 +109,10 @@ class Input {
|
|||||||
return Input.getInput('buildsPath') || 'build';
|
return Input.getInput('buildsPath') || 'build';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static get unityLicensingServer() {
|
||||||
|
return Input.getInput('unityLicensingServer') || '';
|
||||||
|
}
|
||||||
|
|
||||||
static get buildMethod() {
|
static get buildMethod() {
|
||||||
return Input.getInput('buildMethod') || ''; // Processed in docker file
|
return Input.getInput('buildMethod') || ''; // Processed in docker file
|
||||||
}
|
}
|
||||||
@@ -175,34 +171,6 @@ class Input {
|
|||||||
return core.getInput('gitPrivateToken') || false;
|
return core.getInput('gitPrivateToken') || false;
|
||||||
}
|
}
|
||||||
|
|
||||||
static get customJob() {
|
|
||||||
return Input.getInput('customJob') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static customJobHooks() {
|
|
||||||
return Input.getInput('customJobHooks') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static cachePushOverrideCommand() {
|
|
||||||
return Input.getInput('cachePushOverrideCommand') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static cachePullOverrideCommand() {
|
|
||||||
return Input.getInput('cachePullOverrideCommand') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static readInputFromOverrideList() {
|
|
||||||
return Input.getInput('readInputFromOverrideList') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static readInputOverrideCommand() {
|
|
||||||
return Input.getInput('readInputOverrideCommand') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get cloudRunnerBranch() {
|
|
||||||
return Input.getInput('cloudRunnerBranch') || 'cloud-runner-develop';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get chownFilesTo() {
|
static get chownFilesTo() {
|
||||||
return Input.getInput('chownFilesTo') || '';
|
return Input.getInput('chownFilesTo') || '';
|
||||||
}
|
}
|
||||||
@@ -213,66 +181,6 @@ class Input {
|
|||||||
return input === 'true';
|
return input === 'true';
|
||||||
}
|
}
|
||||||
|
|
||||||
static get postBuildSteps() {
|
|
||||||
return Input.getInput('postBuildSteps') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get preBuildSteps() {
|
|
||||||
return Input.getInput('preBuildSteps') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get awsBaseStackName() {
|
|
||||||
return Input.getInput('awsBaseStackName') || 'game-ci';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get cloudRunnerCluster() {
|
|
||||||
if (Cli.isCliMode) {
|
|
||||||
return Input.getInput('cloudRunnerCluster') || 'aws';
|
|
||||||
}
|
|
||||||
|
|
||||||
return Input.getInput('cloudRunnerCluster') || 'local';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get cloudRunnerCpu() {
|
|
||||||
return Input.getInput('cloudRunnerCpu');
|
|
||||||
}
|
|
||||||
|
|
||||||
static get cloudRunnerMemory() {
|
|
||||||
return Input.getInput('cloudRunnerMemory');
|
|
||||||
}
|
|
||||||
|
|
||||||
static get kubeConfig() {
|
|
||||||
return Input.getInput('kubeConfig') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get kubeVolume() {
|
|
||||||
return Input.getInput('kubeVolume') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get kubeVolumeSize() {
|
|
||||||
return Input.getInput('kubeVolumeSize') || '5Gi';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get kubeStorageClass(): string {
|
|
||||||
return Input.getInput('kubeStorageClass') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get checkDependencyHealthOverride(): string {
|
|
||||||
return Input.getInput('checkDependencyHealthOverride') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get startDependenciesOverride(): string {
|
|
||||||
return Input.getInput('startDependenciesOverride') || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
static get cacheKey(): string {
|
|
||||||
return Input.getInput('cacheKey') || Input.branch;
|
|
||||||
}
|
|
||||||
|
|
||||||
static get cloudRunnerTests(): boolean {
|
|
||||||
return Input.getInput(`cloudRunnerTests`) || false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static ToEnvVarFormat(input: string) {
|
public static ToEnvVarFormat(input: string) {
|
||||||
if (input.toUpperCase() === input) {
|
if (input.toUpperCase() === input) {
|
||||||
return input;
|
return input;
|
||||||
|
|||||||
@@ -7,3 +7,11 @@ describe('Output', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Output', () => {
|
||||||
|
describe('setAndroidVersionCode', () => {
|
||||||
|
it('does not throw', async () => {
|
||||||
|
await expect(Output.setAndroidVersionCode('1000')).resolves.not.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -4,6 +4,10 @@ class Output {
|
|||||||
static async setBuildVersion(buildVersion) {
|
static async setBuildVersion(buildVersion) {
|
||||||
await core.setOutput('buildVersion', buildVersion);
|
await core.setOutput('buildVersion', buildVersion);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static async setAndroidVersionCode(androidVersionCode) {
|
||||||
|
await core.setOutput('androidVersionCode', androidVersionCode);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default Output;
|
export default Output;
|
||||||
|
|||||||
@@ -1,9 +1,13 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import * as core from '@actions/core';
|
||||||
import { BuildParameters } from '.';
|
import { BuildParameters } from '.';
|
||||||
import { SetupMac, SetupWindows } from './platform-setup/';
|
import { SetupMac, SetupWindows, SetupAndroid } from './platform-setup/';
|
||||||
import ValidateWindows from './platform-validation/validate-windows';
|
import ValidateWindows from './platform-validation/validate-windows';
|
||||||
|
|
||||||
class PlatformSetup {
|
class PlatformSetup {
|
||||||
static async setup(buildParameters: BuildParameters, actionFolder: string) {
|
static async setup(buildParameters: BuildParameters, actionFolder: string) {
|
||||||
|
PlatformSetup.SetupShared(buildParameters, actionFolder);
|
||||||
|
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'win32':
|
case 'win32':
|
||||||
ValidateWindows.validate(buildParameters);
|
ValidateWindows.validate(buildParameters);
|
||||||
@@ -16,6 +20,22 @@ class PlatformSetup {
|
|||||||
// Add other baseOS's here
|
// Add other baseOS's here
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static SetupShared(buildParameters: BuildParameters, actionFolder: string) {
|
||||||
|
const servicesConfigPath = `${actionFolder}/unity-config/services-config.json`;
|
||||||
|
const servicesConfigPathTemplate = `${servicesConfigPath}.template`;
|
||||||
|
if (!fs.existsSync(servicesConfigPathTemplate)) {
|
||||||
|
core.error(`Missing services config ${servicesConfigPathTemplate}`);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let servicesConfig = fs.readFileSync(servicesConfigPathTemplate).toString();
|
||||||
|
servicesConfig = servicesConfig.replace('%URL%', buildParameters.unityLicensingServer);
|
||||||
|
fs.writeFileSync(servicesConfigPath, servicesConfig);
|
||||||
|
|
||||||
|
SetupAndroid.setup(buildParameters);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default PlatformSetup;
|
export default PlatformSetup;
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import SetupWindows from './setup-windows';
|
import SetupWindows from './setup-windows';
|
||||||
import SetupMac from './setup-mac';
|
import SetupMac from './setup-mac';
|
||||||
|
import SetupAndroid from './setup-android';
|
||||||
|
|
||||||
export { SetupWindows, SetupMac };
|
export { SetupWindows, SetupMac, SetupAndroid };
|
||||||
|
|||||||
21
src/model/platform-setup/setup-android.ts
Normal file
21
src/model/platform-setup/setup-android.ts
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { BuildParameters } from '..';
|
||||||
|
|
||||||
|
class SetupAndroid {
|
||||||
|
public static async setup(buildParameters: BuildParameters) {
|
||||||
|
const { targetPlatform, androidKeystoreBase64, androidKeystoreName, projectPath } = buildParameters;
|
||||||
|
|
||||||
|
if (targetPlatform === 'Android' && androidKeystoreBase64 !== '' && androidKeystoreName !== '') {
|
||||||
|
SetupAndroid.setupAndroidRun(androidKeystoreBase64, androidKeystoreName, projectPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static setupAndroidRun(androidKeystoreBase64: string, androidKeystoreName: string, projectPath: string) {
|
||||||
|
const decodedKeystore = Buffer.from(androidKeystoreBase64, 'base64').toString('binary');
|
||||||
|
const githubWorkspace = process.env.GITHUB_WORKSPACE || '';
|
||||||
|
fs.writeFileSync(path.join(githubWorkspace, projectPath, androidKeystoreName), decodedKeystore, 'binary');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default SetupAndroid;
|
||||||
@@ -9,9 +9,11 @@ class SetupMac {
|
|||||||
public static async setup(buildParameters: BuildParameters, actionFolder: string) {
|
public static async setup(buildParameters: BuildParameters, actionFolder: string) {
|
||||||
const unityEditorPath = `/Applications/Unity/Hub/Editor/${buildParameters.editorVersion}/Unity.app/Contents/MacOS/Unity`;
|
const unityEditorPath = `/Applications/Unity/Hub/Editor/${buildParameters.editorVersion}/Unity.app/Contents/MacOS/Unity`;
|
||||||
|
|
||||||
// Only install unity if the editor doesn't already exist
|
if (!fs.existsSync(this.unityHubPath)) {
|
||||||
if (!fs.existsSync(unityEditorPath)) {
|
|
||||||
await SetupMac.installUnityHub();
|
await SetupMac.installUnityHub();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fs.existsSync(unityEditorPath)) {
|
||||||
await SetupMac.installUnity(buildParameters);
|
await SetupMac.installUnity(buildParameters);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -32,11 +34,31 @@ class SetupMac {
|
|||||||
|
|
||||||
private static async installUnity(buildParameters: BuildParameters, silent = false) {
|
private static async installUnity(buildParameters: BuildParameters, silent = false) {
|
||||||
const unityChangeset = await getUnityChangeset(buildParameters.editorVersion);
|
const unityChangeset = await getUnityChangeset(buildParameters.editorVersion);
|
||||||
const command = `${this.unityHubPath} -- --headless install \
|
let command = `${this.unityHubPath} -- --headless install \
|
||||||
--version ${buildParameters.editorVersion} \
|
--version ${buildParameters.editorVersion} \
|
||||||
--changeset ${unityChangeset.changeset} \
|
--changeset ${unityChangeset.changeset} `;
|
||||||
--module mac-il2cpp \
|
|
||||||
--childModules`;
|
switch (buildParameters.targetPlatform) {
|
||||||
|
case 'iOS':
|
||||||
|
command += `--module ios `;
|
||||||
|
break;
|
||||||
|
case 'tvOS':
|
||||||
|
command += '--module tvos ';
|
||||||
|
break;
|
||||||
|
case 'StandaloneOSX':
|
||||||
|
command += `--module mac-il2cpp `;
|
||||||
|
break;
|
||||||
|
case 'Android':
|
||||||
|
command += `--module android `;
|
||||||
|
break;
|
||||||
|
case 'WebGL':
|
||||||
|
command += '--module webgl ';
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported module for target platform: ${buildParameters.targetPlatform}.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
command += `--childModules`;
|
||||||
|
|
||||||
// Ignoring return code because the log seems to overflow the internal buffer which triggers
|
// Ignoring return code because the log seems to overflow the internal buffer which triggers
|
||||||
// a false error
|
// a false error
|
||||||
@@ -52,6 +74,7 @@ class SetupMac {
|
|||||||
process.env.ACTION_FOLDER = actionFolder;
|
process.env.ACTION_FOLDER = actionFolder;
|
||||||
process.env.UNITY_VERSION = buildParameters.editorVersion;
|
process.env.UNITY_VERSION = buildParameters.editorVersion;
|
||||||
process.env.UNITY_SERIAL = buildParameters.unitySerial;
|
process.env.UNITY_SERIAL = buildParameters.unitySerial;
|
||||||
|
process.env.UNITY_LICENSING_SERVER = buildParameters.unityLicensingServer;
|
||||||
process.env.PROJECT_PATH = buildParameters.projectPath;
|
process.env.PROJECT_PATH = buildParameters.projectPath;
|
||||||
process.env.BUILD_TARGET = buildParameters.targetPlatform;
|
process.env.BUILD_TARGET = buildParameters.targetPlatform;
|
||||||
process.env.BUILD_NAME = buildParameters.buildName;
|
process.env.BUILD_NAME = buildParameters.buildName;
|
||||||
|
|||||||
18
yarn.lock
18
yarn.lock
@@ -2937,9 +2937,9 @@ html-escaper@^2.0.0:
|
|||||||
integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==
|
integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==
|
||||||
|
|
||||||
http-cache-semantics@^4.0.0:
|
http-cache-semantics@^4.0.0:
|
||||||
version "4.1.0"
|
version "4.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390"
|
resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a"
|
||||||
integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==
|
integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==
|
||||||
|
|
||||||
http-proxy-agent@^4.0.1:
|
http-proxy-agent@^4.0.1:
|
||||||
version "4.0.1"
|
version "4.0.1"
|
||||||
@@ -3825,9 +3825,9 @@ json5@2.x, json5@^2.1.2:
|
|||||||
minimist "^1.2.5"
|
minimist "^1.2.5"
|
||||||
|
|
||||||
json5@^1.0.1:
|
json5@^1.0.1:
|
||||||
version "1.0.1"
|
version "1.0.2"
|
||||||
resolved "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz"
|
resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593"
|
||||||
integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==
|
integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==
|
||||||
dependencies:
|
dependencies:
|
||||||
minimist "^1.2.0"
|
minimist "^1.2.0"
|
||||||
|
|
||||||
@@ -4049,9 +4049,9 @@ minimatch@^3.0.4:
|
|||||||
brace-expansion "^1.1.7"
|
brace-expansion "^1.1.7"
|
||||||
|
|
||||||
minimist@^1.2.0, minimist@^1.2.5:
|
minimist@^1.2.0, minimist@^1.2.5:
|
||||||
version "1.2.6"
|
version "1.2.7"
|
||||||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44"
|
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18"
|
||||||
integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==
|
integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==
|
||||||
|
|
||||||
minipass@^3.0.0:
|
minipass@^3.0.0:
|
||||||
version "3.1.6"
|
version "3.1.6"
|
||||||
|
|||||||
Reference in New Issue
Block a user