mirror of
https://github.com/game-ci/unity-builder.git
synced 2026-01-29 20:39:07 +08:00
Compare commits
19 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
579daa93a6 | ||
|
|
1adfabf512 | ||
|
|
cdee7d1d9a | ||
|
|
fb633660a4 | ||
|
|
c15f955669 | ||
|
|
2684be2dee | ||
|
|
db2d8b6dbd | ||
|
|
c068855899 | ||
|
|
5b8bf1c8b7 | ||
|
|
47b25cf3b1 | ||
|
|
36891ec921 | ||
|
|
2652cb78a7 | ||
|
|
f77696efae | ||
|
|
4556fc4ff1 | ||
|
|
d066039c26 | ||
|
|
8abce48a48 | ||
|
|
5ae03dfef6 | ||
|
|
4be5d2ddf4 | ||
|
|
a61c02481f |
@@ -1,10 +1,15 @@
|
||||
{
|
||||
"plugins": ["jest", "@typescript-eslint", "prettier", "unicorn"],
|
||||
"extends": ["plugin:unicorn/recommended", "plugin:github/recommended", "prettier"],
|
||||
"extends": ["plugin:unicorn/recommended", "plugin:github/recommended", "plugin:prettier/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 9,
|
||||
"sourceType": "module"
|
||||
"ecmaVersion": 2020,
|
||||
"sourceType": "module",
|
||||
"extraFileExtensions": [".mjs"],
|
||||
"ecmaFeatures": {
|
||||
"impliedStrict": true
|
||||
},
|
||||
"project": "./tsconfig.json"
|
||||
},
|
||||
"env": {
|
||||
"node": true,
|
||||
@@ -12,9 +17,44 @@
|
||||
"jest/globals": true
|
||||
},
|
||||
"rules": {
|
||||
// Error out for code formatting errors
|
||||
"prettier/prettier": "error",
|
||||
"import/no-extraneous-dependencies": 0,
|
||||
// Namespaces or sometimes needed
|
||||
"import/no-namespace": "off",
|
||||
"no-undef": "off" // TODO: REMOVE THIS LINE WHEN UPDATING ESLINT RULES
|
||||
// Properly format comments
|
||||
"spaced-comment": ["error", "always"],
|
||||
"lines-around-comment": [
|
||||
"error",
|
||||
{
|
||||
"beforeBlockComment": true,
|
||||
"beforeLineComment": true,
|
||||
"allowBlockStart": true,
|
||||
"allowObjectStart": true,
|
||||
"allowArrayStart": true,
|
||||
"allowClassStart": true,
|
||||
"ignorePattern": "pragma|ts-ignore"
|
||||
}
|
||||
],
|
||||
// Mandatory spacing
|
||||
"padding-line-between-statements": [
|
||||
"error",
|
||||
{ "blankLine": "always", "prev": "*", "next": "return" },
|
||||
{ "blankLine": "always", "prev": "directive", "next": "*" },
|
||||
{ "blankLine": "any", "prev": "directive", "next": "directive" }
|
||||
],
|
||||
// Enforce camelCase
|
||||
"camelcase": "error",
|
||||
// Allow forOfStatements
|
||||
"no-restricted-syntax": ["error", "ForInStatement", "LabeledStatement", "WithStatement"],
|
||||
// Continue is viable in forOf loops in generators
|
||||
"no-continue": "off",
|
||||
// From experience, named exports are almost always desired. I got tired of this rule
|
||||
"import/prefer-default-export": "off",
|
||||
// Unused vars are useful to keep method signatures consistent and documented
|
||||
"@typescript-eslint/no-unused-vars": "off",
|
||||
// For this project only use kebab-case
|
||||
"unicorn/filename-case": ["error", { "cases": { "kebabCase": true } }],
|
||||
// Allow Array.from(set) mitigate TS2569 which would require '--downlevelIteration'
|
||||
"unicorn/prefer-spread": "off"
|
||||
}
|
||||
}
|
||||
|
||||
4
.github/FUNDING.yml
vendored
4
.github/FUNDING.yml
vendored
@@ -1,8 +1,8 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
github: game-ci
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: game-ci
|
||||
open_collective: # replace with a single OpenCollective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
|
||||
2
.github/workflows/cats.yml
vendored
2
.github/workflows/cats.yml
vendored
@@ -11,6 +11,6 @@ jobs:
|
||||
name: A cat for your effort!
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: ruairidhwm/action-cats@1.0.1
|
||||
- uses: ruairidhwm/action-cats@1.0.2
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
23
.github/workflows/cleanup.yml
vendored
23
.github/workflows/cleanup.yml
vendored
@@ -12,3 +12,26 @@ jobs:
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
expire-in: 21 days
|
||||
cleanupCloudRunner:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
if: github.event.event_type != 'pull_request_target'
|
||||
with:
|
||||
lfs: true
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 12.x
|
||||
- run: yarn
|
||||
- run: yarn run cli --help
|
||||
env:
|
||||
AWS_REGION: eu-west-2
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: eu-west-2
|
||||
- run: yarn run cli -m aws-list-all
|
||||
env:
|
||||
AWS_REGION: eu-west-2
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: eu-west-2
|
||||
|
||||
111
.github/workflows/cloud-runner-aws-pipeline.yml
vendored
111
.github/workflows/cloud-runner-aws-pipeline.yml
vendored
@@ -1,111 +0,0 @@
|
||||
name: Cloud Runner - AWS Tests
|
||||
|
||||
on:
|
||||
push: { branches: [main, cloud-runner-develop] }
|
||||
|
||||
env:
|
||||
GKE_ZONE: 'us-central1'
|
||||
GKE_REGION: 'us-central1'
|
||||
GKE_PROJECT: 'unitykubernetesbuilder'
|
||||
GKE_CLUSTER: 'unity-builder-cluster'
|
||||
GCP_LOGGING: true
|
||||
GCP_PROJECT: unitykubernetesbuilder
|
||||
AWS_REGION: eu-west-2
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: eu-west-2
|
||||
AWS_BASE_STACK_NAME: game-ci-github-pipelines
|
||||
CLOUD_RUNNER_BRANCH: ${{ github.ref }}
|
||||
CLOUD_RUNNER_TESTS: true
|
||||
DEBUG: true
|
||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||
|
||||
jobs:
|
||||
buildForAllPlatforms:
|
||||
name: AWS Fargate Build
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
projectPath:
|
||||
- test-project
|
||||
unityVersion:
|
||||
# - 2019.2.11f1
|
||||
- 2019.3.15f1
|
||||
targetPlatform:
|
||||
#- StandaloneOSX # Build a macOS standalone (Intel 64-bit).
|
||||
- StandaloneWindows64 # Build a Windows 64-bit standalone.
|
||||
- StandaloneLinux64 # Build a Linux 64-bit standalone.
|
||||
#- iOS # Build an iOS player.
|
||||
#- Android # Build an Android .apk.
|
||||
#- WebGL # WebGL.
|
||||
# - StandaloneWindows # Build a Windows standalone.
|
||||
# - WSAPlayer # Build an Windows Store Apps player.
|
||||
# - PS4 # Build a PS4 Standalone.
|
||||
# - XboxOne # Build a Xbox One Standalone.
|
||||
# - tvOS # Build to Apple's tvOS platform.
|
||||
# - Switch # Build a Nintendo Switch player
|
||||
# steps
|
||||
steps:
|
||||
- name: Checkout (default)
|
||||
uses: actions/checkout@v2
|
||||
if: github.event.event_type != 'pull_request_target'
|
||||
with:
|
||||
lfs: true
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-2
|
||||
- run: yarn
|
||||
- run: yarn run cli --help
|
||||
- run: yarn run test-i-aws
|
||||
env:
|
||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||
PROJECT_PATH: ${{ matrix.projectPath }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TARGET_PLATFORM: ${{ matrix.targetPlatform }}
|
||||
- uses: ./
|
||||
id: aws-fargate-unity-build
|
||||
timeout-minutes: 25
|
||||
with:
|
||||
cloudRunnerCluster: aws
|
||||
versioning: None
|
||||
projectPath: ${{ matrix.projectPath }}
|
||||
unityVersion: ${{ matrix.unityVersion }}
|
||||
targetPlatform: ${{ matrix.targetPlatform }}
|
||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
postBuildSteps: |
|
||||
- name: upload
|
||||
image: amazon/aws-cli
|
||||
commands: |
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||
aws s3 ls
|
||||
aws s3 ls game-ci-test-storage
|
||||
ls /data/cache/$CACHE_KEY
|
||||
echo "/data/cache/$CACHE_KEY/build-$BUILD_GUID.zip s3://game-ci-test-storage/$CACHE_KEY/$BUILD_FILE"
|
||||
aws s3 cp /data/cache/$CACHE_KEY/build-$BUILD_GUID.zip s3://game-ci-test-storage/$CACHE_KEY/build-$BUILD_GUID.zip
|
||||
aws s3 cp /data/cache/$CACHE_KEY s3://game-ci-test-storage/$CACHE_KEY/$BUILD_GUID
|
||||
secrets:
|
||||
- name: awsAccessKeyId
|
||||
value: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
- name: awsSecretAccessKey
|
||||
value: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
- name: awsDefaultRegion
|
||||
value: eu-west-2
|
||||
- run: |
|
||||
aws s3 cp s3://game-ci-test-storage/${{ steps.aws-fargate-unity-build.outputs.BRANCH }}/build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.zip build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.zip
|
||||
ls
|
||||
###########################
|
||||
# Upload #
|
||||
###########################
|
||||
# download from cloud storage
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: AWS Build (${{ matrix.targetPlatform }})
|
||||
path: build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.zip
|
||||
retention-days: 14
|
||||
125
.github/workflows/cloud-runner-k8s-pipeline.yml
vendored
125
.github/workflows/cloud-runner-k8s-pipeline.yml
vendored
@@ -1,125 +0,0 @@
|
||||
name: Cloud Runner - K8s Tests
|
||||
|
||||
on:
|
||||
push: { branches: [cloud-runner-develop] }
|
||||
# push: { branches: [main] }
|
||||
# pull_request:
|
||||
# paths-ignore:
|
||||
# - '.github/**'
|
||||
|
||||
env:
|
||||
GKE_ZONE: 'us-central1'
|
||||
GKE_REGION: 'us-central1'
|
||||
GKE_PROJECT: 'unitykubernetesbuilder'
|
||||
GKE_CLUSTER: 'game-ci-github-pipelines'
|
||||
GCP_LOGGING: true
|
||||
GCP_PROJECT: unitykubernetesbuilder
|
||||
GCP_LOG_FILE: ${{ github.workspace }}/cloud-runner-logs.txt
|
||||
AWS_REGION: eu-west-2
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: eu-west-2
|
||||
AWS_BASE_STACK_NAME: game-ci-github-pipelines
|
||||
CLOUD_RUNNER_BRANCH: ${{ github.ref }}
|
||||
CLOUD_RUNNER_TESTS: true
|
||||
DEBUG: true
|
||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||
|
||||
jobs:
|
||||
k8sBuilds:
|
||||
name: K8s (GKE Autopilot) build for ${{ matrix.targetPlatform }} on version ${{ matrix.unityVersion }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
unityVersion:
|
||||
# - 2019.2.11f1
|
||||
- 2019.3.15f1
|
||||
targetPlatform:
|
||||
# - StandaloneWindows64
|
||||
- StandaloneLinux64
|
||||
steps:
|
||||
###########################
|
||||
# Checkout #
|
||||
###########################
|
||||
- uses: actions/checkout@v2
|
||||
if: github.event.event_type != 'pull_request_target'
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
###########################
|
||||
# Setup #
|
||||
###########################
|
||||
- uses: google-github-actions/setup-gcloud@master
|
||||
with:
|
||||
version: '288.0.0'
|
||||
service_account_email: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_EMAIL }}
|
||||
service_account_key: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_KEY }}
|
||||
- name: Get GKE cluster credentials
|
||||
run: gcloud container clusters get-credentials $GKE_CLUSTER --zone $GKE_ZONE --project $GKE_PROJECT
|
||||
|
||||
###########################
|
||||
# Cloud Runner Test Suite #
|
||||
###########################
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 12.x
|
||||
- run: yarn
|
||||
- run: yarn run cli --help
|
||||
- name: Cloud Runner Test Suite
|
||||
run: yarn run test-i-k8s --detectOpenHandles --forceExit
|
||||
env:
|
||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||
PROJECT_PATH: ${{ matrix.projectPath }}
|
||||
TARGET_PLATFORM: ${{ matrix.targetPlatform }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
KUBE_CONFIG: ${{ steps.read-base64.outputs.base64 }}
|
||||
unityVersion: ${{ matrix.unityVersion }}
|
||||
|
||||
###########################
|
||||
# Cloud Runner Build Test #
|
||||
###########################
|
||||
- name: Cloud Runner Build Test
|
||||
uses: ./
|
||||
id: k8s-unity-build
|
||||
timeout-minutes: 30
|
||||
with:
|
||||
cloudRunnerCluster: k8s
|
||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||
targetPlatform: ${{ matrix.targetPlatform }}
|
||||
kubeConfig: ${{ steps.read-base64.outputs.base64 }}
|
||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
projectPath: test-project
|
||||
unityVersion: ${{ matrix.unityVersion }}
|
||||
versioning: None
|
||||
postBuildSteps: |
|
||||
- name: upload
|
||||
image: amazon/aws-cli
|
||||
commands: |
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||
aws s3 ls
|
||||
aws s3 ls game-ci-test-storage
|
||||
ls /data/cache/$BRANCH
|
||||
echo "/data/cache/$BRANCH/build-$BUILD_GUID.zip s3://game-ci-test-storage/$BRANCH/$BUILD_FILE"
|
||||
aws s3 cp /data/cache/$BRANCH/build-$BUILD_GUID.zip s3://game-ci-test-storage/$BRANCH/build-$BUILD_GUID.zip
|
||||
secrets:
|
||||
- name: awsAccessKeyId
|
||||
value: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
- name: awsSecretAccessKey
|
||||
value: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
- name: awsDefaultRegion
|
||||
value: eu-west-2
|
||||
- run: |
|
||||
aws s3 cp s3://game-ci-test-storage/${{ steps.k8s-unity-build.outputs.BRANCH }}/build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.zip build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.zip
|
||||
ls
|
||||
###########################
|
||||
# Upload #
|
||||
###########################
|
||||
# download from cloud storage
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: K8s Build (${{ matrix.targetPlatform }})
|
||||
path: build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.zip
|
||||
retention-days: 14
|
||||
218
.github/workflows/cloud-runner-pipeline.yml
vendored
Normal file
218
.github/workflows/cloud-runner-pipeline.yml
vendored
Normal file
@@ -0,0 +1,218 @@
|
||||
name: Cloud Runner
|
||||
|
||||
on:
|
||||
push: { branches: [cloud-runner-develop, main] }
|
||||
# push: { branches: [main] }
|
||||
# pull_request:
|
||||
# paths-ignore:
|
||||
# - '.github/**'
|
||||
|
||||
env:
|
||||
GKE_ZONE: 'us-central1'
|
||||
GKE_REGION: 'us-central1'
|
||||
GKE_PROJECT: 'unitykubernetesbuilder'
|
||||
GKE_CLUSTER: 'game-ci-github-pipelines'
|
||||
GCP_LOGGING: true
|
||||
GCP_PROJECT: unitykubernetesbuilder
|
||||
GCP_LOG_FILE: ${{ github.workspace }}/cloud-runner-logs.txt
|
||||
AWS_REGION: eu-west-2
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: eu-west-2
|
||||
AWS_BASE_STACK_NAME: game-ci-github-pipelines
|
||||
CLOUD_RUNNER_BRANCH: ${{ github.ref }}
|
||||
CLOUD_RUNNER_TESTS: true
|
||||
DEBUG: true
|
||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||
|
||||
jobs:
|
||||
awsBuild:
|
||||
name: AWS Fargate Build
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
projectPath:
|
||||
- test-project
|
||||
unityVersion:
|
||||
# - 2019.2.11f1
|
||||
- 2019.3.15f1
|
||||
targetPlatform:
|
||||
#- StandaloneOSX # Build a macOS standalone (Intel 64-bit).
|
||||
- StandaloneWindows64 # Build a Windows 64-bit standalone.
|
||||
- StandaloneLinux64 # Build a Linux 64-bit standalone.
|
||||
- WebGL # WebGL.
|
||||
#- iOS # Build an iOS player.
|
||||
#- Android # Build an Android .apk.
|
||||
# - StandaloneWindows # Build a Windows standalone.
|
||||
# - WSAPlayer # Build an Windows Store Apps player.
|
||||
# - PS4 # Build a PS4 Standalone.
|
||||
# - XboxOne # Build a Xbox One Standalone.
|
||||
# - tvOS # Build to Apple's tvOS platform.
|
||||
# - Switch # Build a Nintendo Switch player
|
||||
# steps
|
||||
steps:
|
||||
- name: Checkout (default)
|
||||
uses: actions/checkout@v2
|
||||
if: github.event.event_type != 'pull_request_target'
|
||||
with:
|
||||
lfs: true
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-2
|
||||
- run: yarn
|
||||
- run: yarn run cli --help
|
||||
- run: yarn run test "caching"
|
||||
- run: yarn run test-i-aws
|
||||
env:
|
||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||
PROJECT_PATH: ${{ matrix.projectPath }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TARGET_PLATFORM: ${{ matrix.targetPlatform }}
|
||||
cloudRunnerTests: true
|
||||
versioning: None
|
||||
- uses: ./
|
||||
id: aws-fargate-unity-build
|
||||
timeout-minutes: 25
|
||||
with:
|
||||
cloudRunnerCluster: aws
|
||||
versioning: None
|
||||
projectPath: ${{ matrix.projectPath }}
|
||||
unityVersion: ${{ matrix.unityVersion }}
|
||||
targetPlatform: ${{ matrix.targetPlatform }}
|
||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
postBuildSteps: |
|
||||
- name: upload
|
||||
image: amazon/aws-cli
|
||||
commands: |
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||
aws s3 ls
|
||||
aws s3 ls game-ci-test-storage
|
||||
ls /data/cache/$CACHE_KEY
|
||||
ls /data/cache/$CACHE_KEY/build
|
||||
aws s3 cp /data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar s3://game-ci-test-storage/$CACHE_KEY/build-$BUILD_GUID.tar
|
||||
secrets:
|
||||
- name: awsAccessKeyId
|
||||
value: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
- name: awsSecretAccessKey
|
||||
value: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
- name: awsDefaultRegion
|
||||
value: eu-west-2
|
||||
- run: |
|
||||
aws s3 cp s3://game-ci-test-storage/${{ steps.aws-fargate-unity-build.outputs.CACHE_KEY }}/build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar
|
||||
ls
|
||||
- run: yarn run cli -m aws-garbage-collect
|
||||
###########################
|
||||
# Upload #
|
||||
###########################
|
||||
# download from cloud storage
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: AWS Build (${{ matrix.targetPlatform }})
|
||||
path: build-${{ steps.aws-fargate-unity-build.outputs.BUILD_GUID }}.tar
|
||||
retention-days: 14
|
||||
k8sBuilds:
|
||||
name: K8s (GKE Autopilot) build for ${{ matrix.targetPlatform }} on version ${{ matrix.unityVersion }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
unityVersion:
|
||||
# - 2019.2.11f1
|
||||
- 2019.3.15f1
|
||||
targetPlatform:
|
||||
# - StandaloneWindows64
|
||||
- StandaloneLinux64
|
||||
steps:
|
||||
###########################
|
||||
# Checkout #
|
||||
###########################
|
||||
- uses: actions/checkout@v2
|
||||
if: github.event.event_type != 'pull_request_target'
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
###########################
|
||||
# Setup #
|
||||
###########################
|
||||
- uses: google-github-actions/setup-gcloud@v0
|
||||
with:
|
||||
version: '288.0.0'
|
||||
service_account_email: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_EMAIL }}
|
||||
service_account_key: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_KEY }}
|
||||
- name: Get GKE cluster credentials
|
||||
run: gcloud container clusters get-credentials $GKE_CLUSTER --zone $GKE_ZONE --project $GKE_PROJECT
|
||||
|
||||
###########################
|
||||
# Cloud Runner Test Suite #
|
||||
###########################
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 12.x
|
||||
- run: yarn
|
||||
- run: yarn run cli --help
|
||||
- run: yarn run test "caching"
|
||||
- name: Cloud Runner Test Suite
|
||||
run: yarn run test-i-k8s --detectOpenHandles --forceExit
|
||||
env:
|
||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||
PROJECT_PATH: ${{ matrix.projectPath }}
|
||||
TARGET_PLATFORM: ${{ matrix.targetPlatform }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
KUBE_CONFIG: ${{ steps.read-base64.outputs.base64 }}
|
||||
unityVersion: ${{ matrix.unityVersion }}
|
||||
cloudRunnerTests: true
|
||||
versioning: None
|
||||
|
||||
###########################
|
||||
# Cloud Runner Build Test #
|
||||
###########################
|
||||
- name: Cloud Runner Build Test
|
||||
uses: ./
|
||||
id: k8s-unity-build
|
||||
timeout-minutes: 30
|
||||
with:
|
||||
cloudRunnerCluster: k8s
|
||||
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
|
||||
targetPlatform: ${{ matrix.targetPlatform }}
|
||||
kubeConfig: ${{ steps.read-base64.outputs.base64 }}
|
||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
projectPath: test-project
|
||||
unityVersion: ${{ matrix.unityVersion }}
|
||||
versioning: None
|
||||
postBuildSteps: |
|
||||
- name: upload
|
||||
image: amazon/aws-cli
|
||||
commands: |
|
||||
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID --profile default
|
||||
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY --profile default
|
||||
aws configure set region $AWS_DEFAULT_REGION --profile default
|
||||
aws s3 ls
|
||||
aws s3 ls game-ci-test-storage
|
||||
ls /data/cache/$CACHE_KEY
|
||||
aws s3 cp /data/cache/$CACHE_KEY/build/build-$BUILD_GUID.tar s3://game-ci-test-storage/$CACHE_KEY/build-$BUILD_GUID.tar
|
||||
secrets:
|
||||
- name: awsAccessKeyId
|
||||
value: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
- name: awsSecretAccessKey
|
||||
value: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
- name: awsDefaultRegion
|
||||
value: eu-west-2
|
||||
- run: |
|
||||
aws s3 cp s3://game-ci-test-storage/${{ steps.k8s-unity-build.outputs.CACHE_KEY }}/build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar
|
||||
ls
|
||||
###########################
|
||||
# Upload #
|
||||
###########################
|
||||
# download from cloud storage
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: K8s Build (${{ matrix.targetPlatform }})
|
||||
path: build-${{ steps.k8s-unity-build.outputs.BUILD_GUID }}.tar
|
||||
retention-days: 14
|
||||
6
.github/workflows/integrity-check.yml
vendored
6
.github/workflows/integrity-check.yml
vendored
@@ -12,10 +12,10 @@ jobs:
|
||||
name: Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 12.x
|
||||
node-version: '16'
|
||||
- run: yarn
|
||||
- run: yarn lint
|
||||
- run: yarn test --coverage
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all",
|
||||
"printWidth": 120
|
||||
"printWidth": 120,
|
||||
"proseWrap": "always"
|
||||
}
|
||||
|
||||
@@ -4,13 +4,12 @@
|
||||
|
||||
#### Code of Conduct
|
||||
|
||||
This repository has adopted the Contributor Covenant as it's
|
||||
Code of Conduct. It is expected that participants adhere to it.
|
||||
This repository has adopted the Contributor Covenant as it's Code of Conduct. It is expected that participants adhere to
|
||||
it.
|
||||
|
||||
#### Proposing a Change
|
||||
|
||||
If you are unsure about whether or not a change is desired,
|
||||
you can create an issue. This is useful because it creates
|
||||
If you are unsure about whether or not a change is desired, you can create an issue. This is useful because it creates
|
||||
the possibility for a discussion that's visible to everyone.
|
||||
|
||||
When fixing a bug it is fine to submit a pull request right away.
|
||||
@@ -34,6 +33,11 @@ Please note that commit hooks will run automatically to perform some tasks;
|
||||
- run tests
|
||||
- build distributable files
|
||||
|
||||
#### Windows users
|
||||
|
||||
Make sure your editor and terminal that run the tests are set to `Powershell 7` or above with
|
||||
`Git's Unix tools for Windows` installed. Some tests require you to be able to run `sh` and other unix commands.
|
||||
|
||||
#### License
|
||||
|
||||
By contributing to this repository, you agree that your contributions will be licensed under its MIT license.
|
||||
|
||||
44
action.yml
44
action.yml
@@ -98,6 +98,10 @@ inputs:
|
||||
required: false
|
||||
default: ''
|
||||
description: 'Run a pre build job after the repository setup but before the build job (in yaml format with the keys image, secrets (name, value object array), command line string)'
|
||||
customJobHooks:
|
||||
required: false
|
||||
default: ''
|
||||
description: 'Specify custom commands and trigger hooks (injects commands into jobs)'
|
||||
customJob:
|
||||
required: false
|
||||
default: ''
|
||||
@@ -111,17 +115,29 @@ inputs:
|
||||
required: false
|
||||
description: 'Either local, k8s or aws can be used to run builds on a remote cluster. Additional parameters must be configured.'
|
||||
cloudRunnerCpu:
|
||||
default: '1.0'
|
||||
default: ''
|
||||
required: false
|
||||
description: 'Amount of CPU time to assign the remote build container'
|
||||
cloudRunnerMemory:
|
||||
default: '750M'
|
||||
required: false
|
||||
description: 'Amount of memory to assign the remote build container'
|
||||
githubToken:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'GitHub token for cloning, only needed when kubeconfig is used.'
|
||||
description: 'Amount of memory to assign the remote build container'
|
||||
cachePushOverrideCommand:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'A command run every time a file is pushed to cache, formatted with input file path and remote cache path'
|
||||
cachePullOverrideCommand:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'A command run every time before a file is being pulled from cache, formatted with request cache file and destination path'
|
||||
readInputFromOverrideList:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'Comma separated list of input value names to read from "input override command"'
|
||||
readInputOverrideCommand:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'Extend game ci by specifying a command to execute to pull input from external source e.g cloud provider secret managers'
|
||||
kubeConfig:
|
||||
default: ''
|
||||
required: false
|
||||
@@ -130,10 +146,26 @@ inputs:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'Supply a Persistent Volume Claim name to use for the Unity build.'
|
||||
kubeStorageClass:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'Kubernetes storage class to use for cloud runner jobs, leave empty to install rook cluster.'
|
||||
kubeVolumeSize:
|
||||
default: '5Gi'
|
||||
required: false
|
||||
description: 'Amount of disc space to assign the Kubernetes Persistent Volume'
|
||||
cacheKey:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'Cache key to indicate bucket for cache'
|
||||
checkDependencyHealthOverride:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'Use to specify a way to check depdency services health to enable resilient self-starting jobs'
|
||||
startDependenciesOverride:
|
||||
default: ''
|
||||
required: false
|
||||
description: 'Use to specify a way to start depdency services health to enable resilient self-starting jobs'
|
||||
outputs:
|
||||
volume:
|
||||
description: 'The Persistent Volume (PV) where the build artifacts have been stored by Kubernetes'
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
Cloud Runner platform selected AWS
|
||||
Cloud Runner is running in custom job mode
|
||||
AWS Region: eu-west-2
|
||||
Parsing build steps:
|
||||
- name: 'step 1'
|
||||
image: 'alpine'
|
||||
commands: 'printenv'
|
||||
secrets:
|
||||
- name: 'testSecretName'
|
||||
value: 'testSecretValue'
|
||||
|
||||
game-ci stack does not exist (["game-ci-github-automation-424-linux64-a9hz-cleanup","game-ci-github-automation-423-linux64-v34g-cleanup","game-ci-github-automation-423-linux64-v34g","game-ci-github-automation-422-linux64-7x6i-cleanup","game-ci-github-automation-422-linux64-7x6i","game-ci-github-automation-414-linux64-j21p-cleanup","game-ci-github-automation-414-linux64-j21p","game-ci-github-automation-413-linux64-tcih-cleanup","game-ci-github-automation-413-linux64-tcih","game-ci-github-automation-411-linux64-0s69-cleanup","game-ci-github-automation-411-linux64-0s69","game-ci-github-automation-410-linux64-1tli-cleanup","game-ci-github-automation-410-linux64-1tli","game-ci-github-automation-408-linux64-8pbw-cleanup","game-ci-github-automation-408-linux64-8pbw","game-ci-github-automation-407-linux64-21un-cleanup","game-ci-github-automation-407-linux64-21un","game-ci-github-automation-406-linux64-dizb-cleanup","game-ci-github-automation-406-linux64-dizb","game-ci-github-automation-405-linux64-9xj5-cleanup","game-ci-github-automation-405-linux64-9xj5","game-ci-github-automation-402-linux64-0bym-cleanup","game-ci-github-automation-402-linux64-0bym","game-ci-github-automation-400-linux64-arqv-cleanup","game-ci-github-automation-400-linux64-arqv","game-ci-github-automation-399-linux64-utkt-cleanup","game-ci-github-automation-399-linux64-utkt","game-ci-github-automation-397-linux64-xwfu-cleanup","game-ci-github-automation-397-linux64-xwfu","game-ci-github-automation-396-linux64-2g3q-cleanup","game-ci-github-automation-396-linux64-2g3q","game-ci-github-automation","game-ci-stack-integration-tests-390-linux64-mcdw-cleanup","game-ci-stack-integration-tests-390-linux64-mcdw","game-ci-stack-integration-tests-391-linux64-2arq-cleanup","game-ci-stack-integration-tests-391-linux64-2arq","game-ci-stack-integration-tests-390-linux64-awd0-cleanup","game-ci-stack-integration-tests-390-linux64-awd0","game-ci-stack-integration-tests"])
|
||||
created stack (version: eedce7440581ab2e8a80cee59e34ed64)
|
||||
416
dist/cloud-formations/base-setup.yml
vendored
416
dist/cloud-formations/base-setup.yml
vendored
@@ -1,416 +0,0 @@
|
||||
AWSTemplateFormatVersion: '2010-09-09'
|
||||
Description: AWS Fargate cluster that can span public and private subnets. Supports
|
||||
public facing load balancers, private internal load balancers, and
|
||||
both internal and external service discovery namespaces.
|
||||
Parameters:
|
||||
EnvironmentName:
|
||||
Type: String
|
||||
Default: development
|
||||
Description: "Your deployment environment: DEV, QA , PROD"
|
||||
Version:
|
||||
Type: String
|
||||
Description: "hash of template"
|
||||
|
||||
# ContainerPort:
|
||||
# Type: Number
|
||||
# Default: 80
|
||||
# Description: What port number the application inside the docker container is binding to
|
||||
|
||||
|
||||
|
||||
Mappings:
|
||||
# Hard values for the subnet masks. These masks define
|
||||
# the range of internal IP addresses that can be assigned.
|
||||
# The VPC can have all IP's from 10.0.0.0 to 10.0.255.255
|
||||
# There are four subnets which cover the ranges:
|
||||
#
|
||||
# 10.0.0.0 - 10.0.0.255
|
||||
# 10.0.1.0 - 10.0.1.255
|
||||
# 10.0.2.0 - 10.0.2.255
|
||||
# 10.0.3.0 - 10.0.3.255
|
||||
|
||||
SubnetConfig:
|
||||
VPC:
|
||||
CIDR: '10.0.0.0/16'
|
||||
PublicOne:
|
||||
CIDR: '10.0.0.0/24'
|
||||
PublicTwo:
|
||||
CIDR: '10.0.1.0/24'
|
||||
|
||||
Resources:
|
||||
|
||||
|
||||
|
||||
# VPC in which containers will be networked.
|
||||
# It has two public subnets, and two private subnets.
|
||||
# We distribute the subnets across the first two available subnets
|
||||
# for the region, for high availability.
|
||||
VPC:
|
||||
Type: AWS::EC2::VPC
|
||||
Properties:
|
||||
EnableDnsSupport: true
|
||||
EnableDnsHostnames: true
|
||||
CidrBlock: !FindInMap ['SubnetConfig', 'VPC', 'CIDR']
|
||||
|
||||
EFSServerSecurityGroup:
|
||||
Type: AWS::EC2::SecurityGroup
|
||||
Properties:
|
||||
GroupName: "efs-server-endpoints"
|
||||
GroupDescription: Which client ip addrs are allowed to access EFS server
|
||||
VpcId: !Ref 'VPC'
|
||||
SecurityGroupIngress:
|
||||
- IpProtocol: tcp
|
||||
FromPort: 2049
|
||||
ToPort: 2049
|
||||
SourceSecurityGroupId: !Ref ContainerSecurityGroup
|
||||
#CidrIp: !FindInMap ['SubnetConfig', 'VPC', 'CIDR']
|
||||
# A security group for the containers we will run in Fargate.
|
||||
# Rules are added to this security group based on what ingress you
|
||||
# add for the cluster.
|
||||
ContainerSecurityGroup:
|
||||
Type: AWS::EC2::SecurityGroup
|
||||
Properties:
|
||||
GroupName: "task security group"
|
||||
GroupDescription: Access to the Fargate containers
|
||||
VpcId: !Ref 'VPC'
|
||||
# SecurityGroupIngress:
|
||||
# - IpProtocol: tcp
|
||||
# FromPort: !Ref ContainerPort
|
||||
# ToPort: !Ref ContainerPort
|
||||
# CidrIp: 0.0.0.0/0
|
||||
SecurityGroupEgress:
|
||||
- IpProtocol: -1
|
||||
FromPort: 2049
|
||||
ToPort: 2049
|
||||
CidrIp: "0.0.0.0/0"
|
||||
|
||||
|
||||
|
||||
|
||||
# Two public subnets, where containers can have public IP addresses
|
||||
PublicSubnetOne:
|
||||
Type: AWS::EC2::Subnet
|
||||
Properties:
|
||||
AvailabilityZone: !Select
|
||||
- 0
|
||||
- Fn::GetAZs: !Ref 'AWS::Region'
|
||||
VpcId: !Ref 'VPC'
|
||||
CidrBlock: !FindInMap ['SubnetConfig', 'PublicOne', 'CIDR']
|
||||
# MapPublicIpOnLaunch: true
|
||||
|
||||
PublicSubnetTwo:
|
||||
Type: AWS::EC2::Subnet
|
||||
Properties:
|
||||
AvailabilityZone: !Select
|
||||
- 1
|
||||
- Fn::GetAZs: !Ref 'AWS::Region'
|
||||
VpcId: !Ref 'VPC'
|
||||
CidrBlock: !FindInMap ['SubnetConfig', 'PublicTwo', 'CIDR']
|
||||
# MapPublicIpOnLaunch: true
|
||||
|
||||
|
||||
# Setup networking resources for the public subnets. Containers
|
||||
# in the public subnets have public IP addresses and the routing table
|
||||
# sends network traffic via the internet gateway.
|
||||
InternetGateway:
|
||||
Type: AWS::EC2::InternetGateway
|
||||
GatewayAttachement:
|
||||
Type: AWS::EC2::VPCGatewayAttachment
|
||||
Properties:
|
||||
VpcId: !Ref 'VPC'
|
||||
InternetGatewayId: !Ref 'InternetGateway'
|
||||
|
||||
# Attaching a Internet Gateway to route table makes it public.
|
||||
PublicRouteTable:
|
||||
Type: AWS::EC2::RouteTable
|
||||
Properties:
|
||||
VpcId: !Ref 'VPC'
|
||||
PublicRoute:
|
||||
Type: AWS::EC2::Route
|
||||
DependsOn: GatewayAttachement
|
||||
Properties:
|
||||
RouteTableId: !Ref 'PublicRouteTable'
|
||||
DestinationCidrBlock: '0.0.0.0/0'
|
||||
GatewayId: !Ref 'InternetGateway'
|
||||
|
||||
# Attaching a public route table makes a subnet public.
|
||||
PublicSubnetOneRouteTableAssociation:
|
||||
Type: AWS::EC2::SubnetRouteTableAssociation
|
||||
Properties:
|
||||
SubnetId: !Ref PublicSubnetOne
|
||||
RouteTableId: !Ref PublicRouteTable
|
||||
PublicSubnetTwoRouteTableAssociation:
|
||||
Type: AWS::EC2::SubnetRouteTableAssociation
|
||||
Properties:
|
||||
SubnetId: !Ref PublicSubnetTwo
|
||||
RouteTableId: !Ref PublicRouteTable
|
||||
|
||||
|
||||
|
||||
# ECS Resources
|
||||
ECSCluster:
|
||||
Type: AWS::ECS::Cluster
|
||||
|
||||
|
||||
|
||||
# A role used to allow AWS Autoscaling to inspect stats and adjust scaleable targets
|
||||
# on your AWS account
|
||||
AutoscalingRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [application-autoscaling.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: service-autoscaling
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- 'application-autoscaling:*'
|
||||
- 'cloudwatch:DescribeAlarms'
|
||||
- 'cloudwatch:PutMetricAlarm'
|
||||
- 'ecs:DescribeServices'
|
||||
- 'ecs:UpdateService'
|
||||
Resource: '*'
|
||||
|
||||
# This is an IAM role which authorizes ECS to manage resources on your
|
||||
# account on your behalf, such as updating your load balancer with the
|
||||
# details of where your containers are, so that traffic can reach your
|
||||
# containers.
|
||||
ECSRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [ecs.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: ecs-service
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
# Rules which allow ECS to attach network interfaces to instances
|
||||
# on your behalf in order for awsvpc networking mode to work right
|
||||
- 'ec2:AttachNetworkInterface'
|
||||
- 'ec2:CreateNetworkInterface'
|
||||
- 'ec2:CreateNetworkInterfacePermission'
|
||||
- 'ec2:DeleteNetworkInterface'
|
||||
- 'ec2:DeleteNetworkInterfacePermission'
|
||||
- 'ec2:Describe*'
|
||||
- 'ec2:DetachNetworkInterface'
|
||||
|
||||
# Rules which allow ECS to update load balancers on your behalf
|
||||
# with the information sabout how to send traffic to your containers
|
||||
- 'elasticloadbalancing:DeregisterInstancesFromLoadBalancer'
|
||||
- 'elasticloadbalancing:DeregisterTargets'
|
||||
- 'elasticloadbalancing:Describe*'
|
||||
- 'elasticloadbalancing:RegisterInstancesWithLoadBalancer'
|
||||
- 'elasticloadbalancing:RegisterTargets'
|
||||
Resource: '*'
|
||||
|
||||
# This is a role which is used by the ECS tasks themselves.
|
||||
ECSTaskExecutionRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [ecs-tasks.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: AmazonECSTaskExecutionRolePolicy
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
|
||||
# Allow the use of secret manager
|
||||
- 'secretsmanager:GetSecretValue'
|
||||
- 'kms:Decrypt'
|
||||
|
||||
# Allow the ECS Tasks to download images from ECR
|
||||
- 'ecr:GetAuthorizationToken'
|
||||
- 'ecr:BatchCheckLayerAvailability'
|
||||
- 'ecr:GetDownloadUrlForLayer'
|
||||
- 'ecr:BatchGetImage'
|
||||
|
||||
# Allow the ECS tasks to upload logs to CloudWatch
|
||||
- 'logs:CreateLogStream'
|
||||
- 'logs:PutLogEvents'
|
||||
Resource: '*'
|
||||
|
||||
DeleteCFNLambdaExecutionRole:
|
||||
Type: "AWS::IAM::Role"
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Version: "2012-10-17"
|
||||
Statement:
|
||||
- Effect: "Allow"
|
||||
Principal:
|
||||
Service: ["lambda.amazonaws.com"]
|
||||
Action: "sts:AssumeRole"
|
||||
Path: "/"
|
||||
Policies:
|
||||
- PolicyName: DeleteCFNLambdaExecutionRole
|
||||
PolicyDocument:
|
||||
Version: "2012-10-17"
|
||||
Statement:
|
||||
- Effect: "Allow"
|
||||
Action:
|
||||
- "logs:CreateLogGroup"
|
||||
- "logs:CreateLogStream"
|
||||
- "logs:PutLogEvents"
|
||||
Resource: "arn:aws:logs:*:*:*"
|
||||
- Effect: "Allow"
|
||||
Action:
|
||||
- "cloudformation:DeleteStack"
|
||||
- "kinesis:DeleteStream"
|
||||
- "secretsmanager:DeleteSecret"
|
||||
- "kinesis:DescribeStreamSummary"
|
||||
- "logs:DeleteLogGroup"
|
||||
- "logs:DeleteSubscriptionFilter"
|
||||
- "ecs:DeregisterTaskDefinition"
|
||||
- "lambda:DeleteFunction"
|
||||
- "lambda:InvokeFunction"
|
||||
- "events:RemoveTargets"
|
||||
- "events:DeleteRule"
|
||||
- "lambda:RemovePermission"
|
||||
Resource: "*"
|
||||
|
||||
### cloud watch to kinesis role
|
||||
|
||||
CloudWatchIAMRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: [logs.amazonaws.com]
|
||||
Action: ['sts:AssumeRole']
|
||||
Path: /
|
||||
Policies:
|
||||
- PolicyName: service-autoscaling
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- 'kinesis:PutRecord'
|
||||
Resource: '*'
|
||||
#####################EFS#####################
|
||||
|
||||
EfsFileStorage:
|
||||
Type: 'AWS::EFS::FileSystem'
|
||||
Properties:
|
||||
BackupPolicy:
|
||||
Status: ENABLED
|
||||
PerformanceMode: maxIO
|
||||
Encrypted: false
|
||||
|
||||
|
||||
FileSystemPolicy:
|
||||
Version: "2012-10-17"
|
||||
Statement:
|
||||
- Effect: "Allow"
|
||||
Action:
|
||||
- "elasticfilesystem:ClientMount"
|
||||
- "elasticfilesystem:ClientWrite"
|
||||
- "elasticfilesystem:ClientRootAccess"
|
||||
Principal:
|
||||
AWS: "*"
|
||||
|
||||
|
||||
MountTargetResource1:
|
||||
Type: AWS::EFS::MountTarget
|
||||
Properties:
|
||||
FileSystemId: !Ref EfsFileStorage
|
||||
SubnetId: !Ref PublicSubnetOne
|
||||
SecurityGroups:
|
||||
- !Ref EFSServerSecurityGroup
|
||||
|
||||
MountTargetResource2:
|
||||
Type: AWS::EFS::MountTarget
|
||||
Properties:
|
||||
FileSystemId: !Ref EfsFileStorage
|
||||
SubnetId: !Ref PublicSubnetTwo
|
||||
SecurityGroups:
|
||||
- !Ref EFSServerSecurityGroup
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Outputs:
|
||||
|
||||
EfsFileStorageId:
|
||||
Description: 'The connection endpoint for the database.'
|
||||
Value: !Ref EfsFileStorage
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:EfsFileStorageId
|
||||
ClusterName:
|
||||
Description: The name of the ECS cluster
|
||||
Value: !Ref 'ECSCluster'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ClusterName
|
||||
AutoscalingRole:
|
||||
Description: The ARN of the role used for autoscaling
|
||||
Value: !GetAtt 'AutoscalingRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:AutoscalingRole
|
||||
ECSRole:
|
||||
Description: The ARN of the ECS role
|
||||
Value: !GetAtt 'ECSRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ECSRole
|
||||
ECSTaskExecutionRole:
|
||||
Description: The ARN of the ECS role tsk execution role
|
||||
Value: !GetAtt 'ECSTaskExecutionRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ECSTaskExecutionRole
|
||||
|
||||
DeleteCFNLambdaExecutionRole:
|
||||
Description: Lambda execution role for cleaning up cloud formations
|
||||
Value: !GetAtt 'DeleteCFNLambdaExecutionRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:DeleteCFNLambdaExecutionRole
|
||||
|
||||
CloudWatchIAMRole:
|
||||
Description: The ARN of the CloudWatch role for subscription filter
|
||||
Value: !GetAtt 'CloudWatchIAMRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:CloudWatchIAMRole
|
||||
VpcId:
|
||||
Description: The ID of the VPC that this stack is deployed in
|
||||
Value: !Ref 'VPC'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:VpcId
|
||||
PublicSubnetOne:
|
||||
Description: Public subnet one
|
||||
Value: !Ref 'PublicSubnetOne'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:PublicSubnetOne
|
||||
PublicSubnetTwo:
|
||||
Description: Public subnet two
|
||||
Value: !Ref 'PublicSubnetTwo'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:PublicSubnetTwo
|
||||
|
||||
ContainerSecurityGroup:
|
||||
Description: A security group used to allow Fargate containers to receive traffic
|
||||
Value: !Ref 'ContainerSecurityGroup'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ContainerSecurityGroup
|
||||
221
dist/cloud-formations/task-def-formation.yml
vendored
221
dist/cloud-formations/task-def-formation.yml
vendored
@@ -1,221 +0,0 @@
|
||||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: >-
|
||||
AWS Fargate cluster that can span public and private subnets. Supports public
|
||||
facing load balancers, private internal load balancers, and both internal and
|
||||
external service discovery namespaces.
|
||||
Parameters:
|
||||
EnvironmentName:
|
||||
Type: String
|
||||
Default: development
|
||||
Description: 'Your deployment environment: DEV, QA , PROD'
|
||||
ServiceName:
|
||||
Type: String
|
||||
Default: example
|
||||
Description: A name for the service
|
||||
ImageUrl:
|
||||
Type: String
|
||||
Default: nginx
|
||||
Description: >-
|
||||
The url of a docker image that contains the application process that will
|
||||
handle the traffic for this service
|
||||
ContainerPort:
|
||||
Type: Number
|
||||
Default: 80
|
||||
Description: What port number the application inside the docker container is binding to
|
||||
ContainerCpu:
|
||||
Type: Number
|
||||
Default: 1024
|
||||
Description: How much CPU to give the container. 1024 is 1 CPU
|
||||
ContainerMemory:
|
||||
Type: Number
|
||||
Default: 2048
|
||||
Description: How much memory in megabytes to give the container
|
||||
BUILDGUID:
|
||||
Type: String
|
||||
Default: ''
|
||||
Command:
|
||||
Type: String
|
||||
Default: 'ls'
|
||||
EntryPoint:
|
||||
Type: String
|
||||
Default: '/bin/sh'
|
||||
WorkingDirectory:
|
||||
Type: String
|
||||
Default: '/efsdata/'
|
||||
Role:
|
||||
Type: String
|
||||
Default: ''
|
||||
Description: >-
|
||||
(Optional) An IAM role to give the service's containers if the code within
|
||||
needs to access other AWS resources
|
||||
EFSMountDirectory:
|
||||
Type: String
|
||||
Default: '/efsdata'
|
||||
# template secrets p1 - input
|
||||
Mappings:
|
||||
SubnetConfig:
|
||||
VPC:
|
||||
CIDR: 10.0.0.0/16
|
||||
PublicOne:
|
||||
CIDR: 10.0.0.0/24
|
||||
PublicTwo:
|
||||
CIDR: 10.0.1.0/24
|
||||
Conditions:
|
||||
HasCustomRole: !Not
|
||||
- !Equals
|
||||
- Ref: Role
|
||||
- ''
|
||||
Resources:
|
||||
LogGroup:
|
||||
Type: 'AWS::Logs::LogGroup'
|
||||
Properties:
|
||||
LogGroupName: !Ref ServiceName
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
SubscriptionFilter:
|
||||
Type: 'AWS::Logs::SubscriptionFilter'
|
||||
Properties:
|
||||
FilterPattern: ''
|
||||
RoleArn:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:CloudWatchIAMRole'
|
||||
LogGroupName: !Ref ServiceName
|
||||
DestinationArn:
|
||||
'Fn::GetAtt':
|
||||
- KinesisStream
|
||||
- Arn
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||
DependsOn:
|
||||
- LogGroup
|
||||
- KinesisStream
|
||||
KinesisStream:
|
||||
Type: 'AWS::Kinesis::Stream'
|
||||
Properties:
|
||||
Name: !Ref ServiceName
|
||||
ShardCount: 1
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
|
||||
# template secrets p2 - secret
|
||||
|
||||
TaskDefinition:
|
||||
Type: 'AWS::ECS::TaskDefinition'
|
||||
Properties:
|
||||
Family: !Ref ServiceName
|
||||
Cpu: !Ref ContainerCpu
|
||||
Memory: !Ref ContainerMemory
|
||||
NetworkMode: awsvpc
|
||||
Volumes:
|
||||
- Name: efs-data
|
||||
EFSVolumeConfiguration:
|
||||
FilesystemId:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:EfsFileStorageId'
|
||||
TransitEncryption: ENABLED
|
||||
RequiresCompatibilities:
|
||||
- FARGATE
|
||||
ExecutionRoleArn:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:ECSTaskExecutionRole'
|
||||
TaskRoleArn:
|
||||
'Fn::If':
|
||||
- HasCustomRole
|
||||
- !Ref Role
|
||||
- !Ref 'AWS::NoValue'
|
||||
ContainerDefinitions:
|
||||
- Name: !Ref ServiceName
|
||||
Cpu: !Ref ContainerCpu
|
||||
Memory: !Ref ContainerMemory
|
||||
Image: !Ref ImageUrl
|
||||
EntryPoint:
|
||||
Fn::Split:
|
||||
- ","
|
||||
- !Ref EntryPoint
|
||||
Command:
|
||||
Fn::Split:
|
||||
- ","
|
||||
- !Ref Command
|
||||
WorkingDirectory: !Ref WorkingDirectory
|
||||
Environment:
|
||||
- Name: ALLOW_EMPTY_PASSWORD
|
||||
Value: 'yes'
|
||||
# template - env vars
|
||||
MountPoints:
|
||||
- SourceVolume: efs-data
|
||||
ContainerPath: !Ref EFSMountDirectory
|
||||
ReadOnly: false
|
||||
Secrets:
|
||||
# template secrets p3 - container def
|
||||
LogConfiguration:
|
||||
LogDriver: awslogs
|
||||
Options:
|
||||
awslogs-group: !Ref ServiceName
|
||||
awslogs-region: !Ref 'AWS::Region'
|
||||
awslogs-stream-prefix: !Ref ServiceName
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: dabb0116-abe0-48a6-a8af-cf9111c879a5
|
||||
DependsOn:
|
||||
- LogGroup
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
dabb0116-abe0-48a6-a8af-cf9111c879a5:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 270
|
||||
'y': 90
|
||||
z: 1
|
||||
embeds: []
|
||||
dependson:
|
||||
- aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
c6f18447-b879-4696-8873-f981b2cedd2b:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 270
|
||||
'y': 210
|
||||
z: 1
|
||||
embeds: []
|
||||
7f809e91-9e5d-4678-98c1-c5085956c480:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 60
|
||||
'y': 300
|
||||
z: 1
|
||||
embeds: []
|
||||
dependson:
|
||||
- aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
- c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
aece53ae-b82d-4267-bc16-ed964b05db27:
|
||||
size:
|
||||
width: 150
|
||||
height: 150
|
||||
position:
|
||||
x: 60
|
||||
'y': 90
|
||||
z: 1
|
||||
embeds: []
|
||||
4d2da56c-3643-46b8-aaee-e46e19f95fcc:
|
||||
source:
|
||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||
target:
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
z: 11
|
||||
14eb957b-f094-4653-93c4-77b2f851953c:
|
||||
source:
|
||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||
target:
|
||||
id: c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
z: 12
|
||||
85c57444-e5bb-4230-bc85-e545cd4558f6:
|
||||
source:
|
||||
id: dabb0116-abe0-48a6-a8af-cf9111c879a5
|
||||
target:
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
z: 13
|
||||
@@ -10,7 +10,10 @@ namespace UnityBuilderAction.Input
|
||||
{
|
||||
EditorUserBuildSettings.buildAppBundle = options["customBuildPath"].EndsWith(".aab");
|
||||
if (options.TryGetValue("androidKeystoreName", out string keystoreName) && !string.IsNullOrEmpty(keystoreName))
|
||||
{
|
||||
PlayerSettings.Android.useCustomKeystore = true;
|
||||
PlayerSettings.Android.keystoreName = keystoreName;
|
||||
}
|
||||
if (options.TryGetValue("androidKeystorePass", out string keystorePass) && !string.IsNullOrEmpty(keystorePass))
|
||||
PlayerSettings.Android.keystorePass = keystorePass;
|
||||
if (options.TryGetValue("androidKeyaliasName", out string keyaliasName) && !string.IsNullOrEmpty(keyaliasName))
|
||||
@@ -18,7 +21,19 @@ namespace UnityBuilderAction.Input
|
||||
if (options.TryGetValue("androidKeyaliasPass", out string keyaliasPass) && !string.IsNullOrEmpty(keyaliasPass))
|
||||
PlayerSettings.Android.keyaliasPass = keyaliasPass;
|
||||
if (options.TryGetValue("androidTargetSdkVersion", out string androidTargetSdkVersion) && !string.IsNullOrEmpty(androidTargetSdkVersion))
|
||||
PlayerSettings.Android.targetSdkVersion = (AndroidSdkVersions) Enum.Parse(typeof(AndroidSdkVersions), androidTargetSdkVersion);
|
||||
{
|
||||
var targetSdkVersion = AndroidSdkVersions.AndroidApiLevelAuto;
|
||||
try
|
||||
{
|
||||
targetSdkVersion =
|
||||
(AndroidSdkVersions) Enum.Parse(typeof(AndroidSdkVersions), androidTargetSdkVersion);
|
||||
}
|
||||
catch
|
||||
{
|
||||
UnityEngine.Debug.Log("Failed to parse androidTargetSdkVersion! Fallback to AndroidApiLevelAuto");
|
||||
}
|
||||
PlayerSettings.Android.targetSdkVersion = targetSdkVersion;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
4100
dist/index.js
generated
vendored
4100
dist/index.js
generated
vendored
File diff suppressed because it is too large
Load Diff
2
dist/index.js.map
generated
vendored
2
dist/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
9
dist/platforms/mac/steps/build.sh
vendored
9
dist/platforms/mac/steps/build.sh
vendored
@@ -126,10 +126,11 @@ echo ""
|
||||
# Reference: https://docs.unity3d.com/2019.3/Documentation/Manual/CommandLineArguments.html
|
||||
|
||||
/Applications/Unity/Hub/Editor/$UNITY_VERSION/Unity.app/Contents/MacOS/Unity \
|
||||
-logfile /dev/stdout \
|
||||
-quit \
|
||||
-batchmode \
|
||||
-nographics \
|
||||
-username "$UNITY_EMAIL" \
|
||||
-password "$UNITY_PASSWORD" \
|
||||
-customBuildName "$BUILD_NAME" \
|
||||
-projectPath "$UNITY_PROJECT_PATH" \
|
||||
-buildTarget "$BUILD_TARGET" \
|
||||
@@ -143,11 +144,15 @@ echo ""
|
||||
-androidKeyaliasName "$ANDROID_KEYALIAS_NAME" \
|
||||
-androidKeyaliasPass "$ANDROID_KEYALIAS_PASS" \
|
||||
-androidTargetSdkVersion "$ANDROID_TARGET_SDK_VERSION" \
|
||||
$CUSTOM_PARAMETERS
|
||||
$CUSTOM_PARAMETERS \
|
||||
> "$UNITY_PROJECT_PATH/out.log" 2>&1
|
||||
|
||||
# Catch exit code
|
||||
BUILD_EXIT_CODE=$?
|
||||
|
||||
# Display logs
|
||||
cat "$UNITY_PROJECT_PATH/out.log"
|
||||
|
||||
# Display results
|
||||
if [ $BUILD_EXIT_CODE -eq 0 ]; then
|
||||
echo "Build succeeded";
|
||||
|
||||
2
dist/platforms/mac/steps/return_license.sh
vendored
2
dist/platforms/mac/steps/return_license.sh
vendored
@@ -9,6 +9,8 @@ pushd "$ACTIVATE_LICENSE_PATH"
|
||||
-batchmode \
|
||||
-nographics \
|
||||
-quit \
|
||||
-username "$UNITY_EMAIL" \
|
||||
-password "$UNITY_PASSWORD" \
|
||||
-returnlicense \
|
||||
-projectPath "$ACTIVATE_LICENSE_PATH"
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ else
|
||||
git config --global --replace-all url."https://token:$GIT_PRIVATE_TOKEN@github.com/".insteadOf ssh://git@github.com/
|
||||
git config --global --add url."https://token:$GIT_PRIVATE_TOKEN@github.com/".insteadOf git@github.com
|
||||
|
||||
git config --global url."https://token:$GIT_PRIVATE_TOKEN@github.com/".insteadOf "https://github.com/"
|
||||
git config --global --add url."https://token:$GIT_PRIVATE_TOKEN@github.com/".insteadOf "https://github.com/"
|
||||
git config --global url."https://ssh:$GIT_PRIVATE_TOKEN@github.com/".insteadOf "ssh://git@github.com/"
|
||||
git config --global url."https://git:$GIT_PRIVATE_TOKEN@github.com/".insteadOf "git@github.com:"
|
||||
|
||||
|
||||
6
dist/platforms/windows/build.ps1
vendored
6
dist/platforms/windows/build.ps1
vendored
@@ -116,6 +116,12 @@ Write-Output ""
|
||||
-customBuildTarget $Env:BUILD_TARGET `
|
||||
-customBuildPath $Env:CUSTOM_BUILD_PATH `
|
||||
-buildVersion $Env:VERSION `
|
||||
-androidVersionCode $Env:ANDROID_VERSION_CODE `
|
||||
-androidKeystoreName $Env:ANDROID_KEYSTORE_NAME `
|
||||
-androidKeystorePass $Env:ANDROID_KEYSTORE_PASS `
|
||||
-androidKeyaliasName $Env:ANDROID_KEYALIAS_NAME `
|
||||
-androidKeyaliasPass $Env:ANDROID_KEYALIAS_PASS `
|
||||
-androidTargetSdkVersion $Env:ANDROID_TARGET_SDK_VERSION `
|
||||
$Env:CUSTOM_PARAMETERS `
|
||||
-logfile | Out-Host
|
||||
|
||||
|
||||
@@ -6,13 +6,6 @@
|
||||
color: true
|
||||
extends: {}
|
||||
|
||||
pre-push:
|
||||
parallel: true
|
||||
commands:
|
||||
packages-audit:
|
||||
tags: security
|
||||
run: yarn audit
|
||||
|
||||
pre-commit:
|
||||
parallel: true
|
||||
commands:
|
||||
|
||||
10
package.json
10
package.json
@@ -7,11 +7,14 @@
|
||||
"author": "Webber <webber@takken.io>",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"prepare": "lefthook install",
|
||||
"prepare": "lefthook install && npx husky uninstall -y",
|
||||
"build": "yarn && tsc && ncc build lib --source-map --license licenses.txt",
|
||||
"lint": "prettier --check \"src/**/*.{js,ts}\" && eslint src/**/*.ts",
|
||||
"format": "prettier --write \"src/**/*.{js,ts}\"",
|
||||
"cli": "yarn ts-node src/index.ts -m cli",
|
||||
"gcp-secrets-tests": "cross-env cloudRunnerCluster=aws cloudRunnerTests=true readInputOverrideCommand=\"gcloud secrets versions access 1 --secret=\"{0}\"\" populateOverride=true readInputFromOverrideList=UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD yarn test -i -t \"cloud runner\"",
|
||||
"gcp-secrets-cli": "cross-env cloudRunnerTests=true readInputOverrideCommand=\"gcloud secrets versions access 1 --secret=\"{0}\"\" yarn ts-node src/index.ts -m cli --populateOverride true --readInputFromOverrideList UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD",
|
||||
"aws-secrets-cli": "cross-env cloudRunnerTests=true readInputOverrideCommand=\"aws secretsmanager get-secret-value --secret-id {0}\" yarn ts-node src/index.ts -m cli --populateOverride true --readInputFromOverrideList UNITY_EMAIL,UNITY_SERIAL,UNITY_PASSWORD",
|
||||
"cli-aws": "cross-env cloudRunnerCluster=aws yarn run test-cli",
|
||||
"cli-k8s": "cross-env cloudRunnerCluster=k8s yarn run test-cli",
|
||||
"test-cli": "cross-env cloudRunnerTests=true yarn ts-node src/index.ts -m cli --projectPath test-project",
|
||||
@@ -22,7 +25,7 @@
|
||||
"test-i-k8s": "cross-env cloudRunnerTests=true cloudRunnerCluster=k8s yarn test -i -t \"cloud runner\""
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.6.0",
|
||||
"@actions/core": "^1.9.1",
|
||||
"@actions/exec": "^1.1.0",
|
||||
"@actions/github": "^5.0.0",
|
||||
"@kubernetes/client-node": "^0.16.3",
|
||||
@@ -37,12 +40,13 @@
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"semver": "^7.3.5",
|
||||
"unity-changeset": "^1.6.0",
|
||||
"uuid": "^8.3.2",
|
||||
"yaml": "^1.10.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@arkweid/lefthook": "^0.7.7",
|
||||
"@types/jest": "^27.4.1",
|
||||
"@types/node": "^17.0.21",
|
||||
"@types/node": "^17.0.23",
|
||||
"@types/semver": "^7.3.9",
|
||||
"@typescript-eslint/parser": "4.8.1",
|
||||
"@vercel/ncc": "^0.33.3",
|
||||
|
||||
22
src/index.ts
22
src/index.ts
@@ -1,10 +1,15 @@
|
||||
import * as core from '@actions/core';
|
||||
import { Action, BuildParameters, Cache, Docker, ImageTag, Output, CloudRunner } from './model';
|
||||
import { CLI } from './model/cli/cli';
|
||||
import { Action, BuildParameters, Cache, CloudRunner, Docker, ImageTag, Output } from './model';
|
||||
import { Cli } from './model/cli/cli';
|
||||
import MacBuilder from './model/mac-builder';
|
||||
import PlatformSetup from './model/platform-setup';
|
||||
async function runMain() {
|
||||
try {
|
||||
if (Cli.InitCliMode()) {
|
||||
await Cli.RunCli();
|
||||
|
||||
return;
|
||||
}
|
||||
Action.checkCompatibility();
|
||||
Cache.verify();
|
||||
|
||||
@@ -13,11 +18,7 @@ async function runMain() {
|
||||
const buildParameters = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameters);
|
||||
|
||||
if (
|
||||
buildParameters.cloudRunnerCluster &&
|
||||
buildParameters.cloudRunnerCluster !== '' &&
|
||||
buildParameters.cloudRunnerCluster !== 'local'
|
||||
) {
|
||||
if (buildParameters.cloudRunnerCluster !== 'local') {
|
||||
await CloudRunner.run(buildParameters, baseImage.toString());
|
||||
} else {
|
||||
core.info('Building locally');
|
||||
@@ -35,9 +36,4 @@ async function runMain() {
|
||||
core.setFailed((error as Error).message);
|
||||
}
|
||||
}
|
||||
const options = CLI.SetupCli();
|
||||
if (CLI.isCliMode(options)) {
|
||||
CLI.RunCli(options);
|
||||
} else {
|
||||
runMain();
|
||||
}
|
||||
runMain();
|
||||
|
||||
9
src/integrity.test.ts
Normal file
9
src/integrity.test.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { stat } from 'fs/promises';
|
||||
|
||||
describe('Integrity tests', () => {
|
||||
describe('package-lock.json', () => {
|
||||
it('does not exist', async () => {
|
||||
await expect(stat(`${process.cwd()}/package-lock.json`)).rejects.toThrowError();
|
||||
});
|
||||
});
|
||||
});
|
||||
112
src/model/__data__/versions.ts
Normal file
112
src/model/__data__/versions.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
export const completelyValidSemanticVersions = [
|
||||
'0.0.4',
|
||||
'1.2.3',
|
||||
'10.20.30',
|
||||
'1.1.2-prerelease+meta',
|
||||
'1.1.2+meta',
|
||||
'1.1.2+meta-valid',
|
||||
'1.0.0-alpha',
|
||||
'1.0.0-beta',
|
||||
'1.0.0-alpha.beta',
|
||||
'1.0.0-alpha.beta.1',
|
||||
'1.0.0-alpha.1',
|
||||
'1.0.0-alpha0.valid',
|
||||
'1.0.0-alpha.0valid',
|
||||
'1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay',
|
||||
'1.0.0-rc.1+build.1',
|
||||
'2.0.0-rc.1+build.123',
|
||||
'1.2.3-beta',
|
||||
'10.2.3-DEV-SNAPSHOT',
|
||||
'1.2.3-SNAPSHOT-123',
|
||||
'1.0.0',
|
||||
'2.0.0',
|
||||
'1.1.7',
|
||||
'2.0.0+build.1848',
|
||||
'2.0.1-alpha.1227',
|
||||
'1.0.0-alpha+beta',
|
||||
'1.2.3----RC-SNAPSHOT.12.9.1--.12+788',
|
||||
'1.2.3----R-S.12.9.1--.12+meta',
|
||||
'1.2.3----RC-SNAPSHOT.12.9.1--.12',
|
||||
'1.0.0+0.build.1-rc.10000aaa-kk-0.1',
|
||||
'99999999999999999999999.999999999999999999.99999999999999999',
|
||||
'1.0.0-0A.is.legal',
|
||||
];
|
||||
|
||||
export const notCompletelyValidSemanticVersions = [
|
||||
'1',
|
||||
'1.2',
|
||||
'1.2.3-0123',
|
||||
'1.2.3-0123.0123',
|
||||
'1.1.2+.123',
|
||||
'+invalid',
|
||||
'-invalid',
|
||||
'-invalid+invalid',
|
||||
'-invalid.01',
|
||||
'alpha',
|
||||
'alpha.beta',
|
||||
'alpha.beta.1',
|
||||
'alpha.1',
|
||||
'alpha+beta',
|
||||
'alpha_beta',
|
||||
'alpha.',
|
||||
'alpha..',
|
||||
'beta',
|
||||
'1.0.0-alpha_beta',
|
||||
'-alpha.',
|
||||
'1.0.0-alpha..',
|
||||
'1.0.0-alpha..1',
|
||||
'1.0.0-alpha...1',
|
||||
'1.0.0-alpha....1',
|
||||
'1.0.0-alpha.....1',
|
||||
'1.0.0-alpha......1',
|
||||
'1.0.0-alpha.......1',
|
||||
'01.1.1',
|
||||
'1.01.1',
|
||||
'1.1.01',
|
||||
'1.2',
|
||||
'1.2.3.DEV',
|
||||
'1.2-SNAPSHOT',
|
||||
'1.2.31.2.3----RC-SNAPSHOT.12.09.1--..12+788',
|
||||
'1.2-RC-SNAPSHOT',
|
||||
'-1.0.3-gamma+b7718',
|
||||
'+justmeta',
|
||||
'9.8.7+meta+meta',
|
||||
'9.8.7-whatever+meta+meta',
|
||||
'99999999999999999999999.999999999999999999.99999999999999999----RC-SNAPSHOT.12.09.1--------------------------------..12',
|
||||
];
|
||||
|
||||
const addVariantsPrependingV = (array: string[]) => array.map((tag) => [tag, `v${tag}`]).flat();
|
||||
|
||||
/**
|
||||
* Array of versions that will be detected as version tags. Not all of these are
|
||||
* "semantic versions", but can be used to generate one. Especially using the
|
||||
* `versioning: Semantic` option.
|
||||
*/
|
||||
export const validVersionTagInputs = addVariantsPrependingV([
|
||||
'0',
|
||||
'1',
|
||||
'0.1',
|
||||
'1.0',
|
||||
'1.1.0',
|
||||
'1.2.3',
|
||||
...completelyValidSemanticVersions,
|
||||
]);
|
||||
|
||||
export const invalidVersionTagInputs = addVariantsPrependingV([
|
||||
'+invalid',
|
||||
'-invalid',
|
||||
'-invalid+invalid',
|
||||
'-invalid.01',
|
||||
'alpha',
|
||||
'alpha.beta',
|
||||
'alpha.beta.1',
|
||||
'alpha.1',
|
||||
'alpha+beta',
|
||||
'alpha_beta',
|
||||
'alpha.',
|
||||
'alpha..',
|
||||
'beta',
|
||||
'-alpha.',
|
||||
'-1.0.3-gamma+b7718',
|
||||
'+justmeta',
|
||||
]);
|
||||
@@ -6,12 +6,14 @@ export default class AndroidVersioning {
|
||||
if (!inputVersionCode) {
|
||||
return AndroidVersioning.versionToVersionCode(version);
|
||||
}
|
||||
|
||||
return inputVersionCode;
|
||||
}
|
||||
|
||||
static versionToVersionCode(version) {
|
||||
if (version === 'none') {
|
||||
core.info(`Versioning strategy is set to ${version}, so android version code should not be applied.`);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -19,6 +21,7 @@ export default class AndroidVersioning {
|
||||
|
||||
if (!parsedVersion) {
|
||||
core.warning(`Could not parse "${version}" to semver, defaulting android version code to 1`);
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -32,11 +35,13 @@ export default class AndroidVersioning {
|
||||
);
|
||||
}
|
||||
core.info(`Using android versionCode ${versionCode}`);
|
||||
|
||||
return versionCode;
|
||||
}
|
||||
|
||||
static determineSdkManagerParameters(targetSdkVersion) {
|
||||
const parsedVersion = Number.parseInt(targetSdkVersion.slice(-2), 10);
|
||||
|
||||
return Number.isNaN(parsedVersion) ? '' : `platforms;android-${parsedVersion}`;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import { customAlphabet } from 'nanoid';
|
||||
import * as core from '@actions/core';
|
||||
import AndroidVersioning from './android-versioning';
|
||||
import CloudRunnerConstants from './cloud-runner/services/cloud-runner-constants';
|
||||
import CloudRunnerNamespace from './cloud-runner/services/cloud-runner-namespace';
|
||||
import CloudRunnerBuildGuid from './cloud-runner/services/cloud-runner-guid';
|
||||
import Input from './input';
|
||||
import Platform from './platform';
|
||||
import UnityVersioning from './unity-versioning';
|
||||
import Versioning from './versioning';
|
||||
import { GitRepoReader } from './input-readers/git-repo';
|
||||
import { GithubCliReader } from './input-readers/github-cli';
|
||||
import { Cli } from './cli/cli';
|
||||
|
||||
class BuildParameters {
|
||||
public editorVersion!: string;
|
||||
@@ -33,15 +35,22 @@ class BuildParameters {
|
||||
public cloudRunnerCluster!: string;
|
||||
public awsBaseStackName!: string;
|
||||
public gitPrivateToken!: string;
|
||||
public remoteBuildCluster!: string;
|
||||
public awsStackName!: string;
|
||||
public kubeConfig!: string;
|
||||
public githubToken!: string;
|
||||
public cloudRunnerMemory!: string;
|
||||
public cloudRunnerCpu!: string;
|
||||
public kubeVolumeSize!: string;
|
||||
public kubeVolume!: string;
|
||||
public kubeStorageClass!: string;
|
||||
public chownFilesTo!: string;
|
||||
public customJobHooks!: string;
|
||||
public cachePushOverrideCommand!: string;
|
||||
public cachePullOverrideCommand!: string;
|
||||
public readInputFromOverrideList!: string;
|
||||
public readInputOverrideCommand!: string;
|
||||
public checkDependencyHealthOverride!: string;
|
||||
public startDependenciesOverride!: string;
|
||||
public cacheKey!: string;
|
||||
|
||||
public postBuildSteps!: string;
|
||||
public preBuildSteps!: string;
|
||||
@@ -52,6 +61,10 @@ class BuildParameters {
|
||||
public gitSha!: string;
|
||||
public logId!: string;
|
||||
public buildGuid!: string;
|
||||
public cloudRunnerBranch!: string;
|
||||
public cloudRunnerIntegrationTests!: boolean;
|
||||
public cloudRunnerBuilderPlatform!: string | undefined;
|
||||
public isCliMode!: boolean;
|
||||
|
||||
static async create(): Promise<BuildParameters> {
|
||||
const buildFile = this.parseBuildFile(Input.buildName, Input.targetPlatform, Input.androidAppBundle);
|
||||
@@ -63,8 +76,8 @@ class BuildParameters {
|
||||
// Todo - Don't use process.env directly, that's what the input model class is for.
|
||||
// ---
|
||||
let unitySerial = '';
|
||||
if (!process.env.UNITY_SERIAL) {
|
||||
//No serial was present so it is a personal license that we need to convert
|
||||
if (!process.env.UNITY_SERIAL && Input.githubInputEnabled) {
|
||||
// No serial was present, so it is a personal license that we need to convert
|
||||
if (!process.env.UNITY_LICENSE) {
|
||||
throw new Error(`Missing Unity License File and no Serial was found. If this
|
||||
is a personal license, make sure to follow the activation
|
||||
@@ -75,8 +88,6 @@ class BuildParameters {
|
||||
} else {
|
||||
unitySerial = process.env.UNITY_SERIAL!;
|
||||
}
|
||||
core.setSecret(unitySerial);
|
||||
// ---
|
||||
|
||||
return {
|
||||
editorVersion,
|
||||
@@ -101,12 +112,12 @@ class BuildParameters {
|
||||
androidSdkManagerParameters,
|
||||
customParameters: Input.customParameters,
|
||||
sshAgent: Input.sshAgent,
|
||||
gitPrivateToken: await Input.gitPrivateToken(),
|
||||
gitPrivateToken: Input.gitPrivateToken || (await GithubCliReader.GetGitHubAuthToken()),
|
||||
chownFilesTo: Input.chownFilesTo,
|
||||
cloudRunnerCluster: Input.cloudRunnerCluster,
|
||||
cloudRunnerBuilderPlatform: Input.cloudRunnerBuilderPlatform,
|
||||
awsBaseStackName: Input.awsBaseStackName,
|
||||
kubeConfig: Input.kubeConfig,
|
||||
githubToken: await Input.githubToken(),
|
||||
cloudRunnerMemory: Input.cloudRunnerMemory,
|
||||
cloudRunnerCpu: Input.cloudRunnerCpu,
|
||||
kubeVolumeSize: Input.kubeVolumeSize,
|
||||
@@ -115,14 +126,24 @@ class BuildParameters {
|
||||
preBuildSteps: Input.preBuildSteps,
|
||||
customJob: Input.customJob,
|
||||
runNumber: Input.runNumber,
|
||||
branch: await Input.branch(),
|
||||
// Todo - move this out of UserInput and into some class that determines additional information (as needed)
|
||||
githubRepo: await Input.githubRepo(),
|
||||
remoteBuildCluster: Input.cloudRunnerCluster,
|
||||
branch: Input.branch.replace('/head', '') || (await GitRepoReader.GetBranch()),
|
||||
cloudRunnerBranch: Input.cloudRunnerBranch.split('/').reverse()[0],
|
||||
cloudRunnerIntegrationTests: Input.cloudRunnerTests,
|
||||
githubRepo: Input.githubRepo || (await GitRepoReader.GetRemote()) || 'game-ci/unity-builder',
|
||||
isCliMode: Cli.isCliMode,
|
||||
awsStackName: Input.awsBaseStackName,
|
||||
gitSha: Input.gitSha,
|
||||
logId: customAlphabet(CloudRunnerConstants.alphabet, 9)(),
|
||||
buildGuid: CloudRunnerNamespace.generateBuildName(Input.runNumber, Input.targetPlatform),
|
||||
buildGuid: CloudRunnerBuildGuid.generateGuid(Input.runNumber, Input.targetPlatform),
|
||||
customJobHooks: Input.customJobHooks(),
|
||||
cachePullOverrideCommand: Input.cachePullOverrideCommand(),
|
||||
cachePushOverrideCommand: Input.cachePushOverrideCommand(),
|
||||
readInputOverrideCommand: Input.readInputOverrideCommand(),
|
||||
readInputFromOverrideList: Input.readInputFromOverrideList(),
|
||||
kubeStorageClass: Input.kubeStorageClass,
|
||||
checkDependencyHealthOverride: Input.checkDependencyHealthOverride,
|
||||
startDependenciesOverride: Input.startDependenciesOverride,
|
||||
cacheKey: Input.cacheKey,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -146,6 +167,7 @@ class BuildParameters {
|
||||
throw new Error(`License File was corrupted, unable to locate serial`);
|
||||
}
|
||||
const endIndex = license.indexOf(endKey, startIndex);
|
||||
|
||||
// Slice off the first 4 characters as they are garbage values
|
||||
return Buffer.from(license.slice(startIndex, endIndex), 'base64').toString('binary').slice(4);
|
||||
}
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
const targets = new Array();
|
||||
export function CliFunction(key: string, description: string) {
|
||||
return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) {
|
||||
targets.push({
|
||||
target,
|
||||
propertyKey,
|
||||
descriptor,
|
||||
key,
|
||||
description,
|
||||
});
|
||||
};
|
||||
}
|
||||
export function GetCliFunctions(key) {
|
||||
return targets.find((x) => x.key === key);
|
||||
}
|
||||
export function GetAllCliModes() {
|
||||
return targets.map((x) => {
|
||||
return {
|
||||
key: x.key,
|
||||
description: x.description,
|
||||
};
|
||||
});
|
||||
}
|
||||
45
src/model/cli/cli-functions-repository.ts
Normal file
45
src/model/cli/cli-functions-repository.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
export class CliFunctionsRepository {
|
||||
private static targets: any[] = [];
|
||||
public static PushCliFunction(
|
||||
target: any,
|
||||
propertyKey: string,
|
||||
descriptor: PropertyDescriptor,
|
||||
key: string,
|
||||
description: string,
|
||||
) {
|
||||
CliFunctionsRepository.targets.push({
|
||||
target,
|
||||
propertyKey,
|
||||
descriptor,
|
||||
key,
|
||||
description,
|
||||
});
|
||||
}
|
||||
|
||||
public static GetCliFunctions(key) {
|
||||
const results = CliFunctionsRepository.targets.find((x) => x.key === key);
|
||||
if (results === undefined || results.length === 0) {
|
||||
throw new Error(`no CLI mode found for ${key}`);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public static GetAllCliModes() {
|
||||
return CliFunctionsRepository.targets.map((x) => {
|
||||
return {
|
||||
key: x.key,
|
||||
description: x.description,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
public static PushCliFunctionSource(cliFunction: any) {}
|
||||
}
|
||||
|
||||
export function CliFunction(key: string, description: string) {
|
||||
return (target: any, propertyKey: string, descriptor: PropertyDescriptor) => {
|
||||
CliFunctionsRepository.PushCliFunction(target, propertyKey, descriptor, key, description);
|
||||
};
|
||||
}
|
||||
@@ -3,86 +3,95 @@ import { BuildParameters, CloudRunner, ImageTag, Input } from '..';
|
||||
import * as core from '@actions/core';
|
||||
import { ActionYamlReader } from '../input-readers/action-yaml';
|
||||
import CloudRunnerLogger from '../cloud-runner/services/cloud-runner-logger';
|
||||
import { CliFunction, GetAllCliModes, GetCliFunctions } from './cli-decorator';
|
||||
import { RemoteClientLogger } from './remote-client/remote-client-services/remote-client-logger';
|
||||
import { CloudRunnerState } from '../cloud-runner/state/cloud-runner-state';
|
||||
import { SetupCloudRunnerRepository } from './remote-client/setup-cloud-runner-repository';
|
||||
import * as SDK from 'aws-sdk';
|
||||
import CloudRunnerQueryOverride from '../cloud-runner/services/cloud-runner-query-override';
|
||||
import { CliFunction, CliFunctionsRepository } from './cli-functions-repository';
|
||||
import { AwsCliCommands } from '../cloud-runner/providers/aws/commands/aws-cli-commands';
|
||||
import { Caching } from '../cloud-runner/remote-client/caching';
|
||||
import { LfsHashing } from '../cloud-runner/services/lfs-hashing';
|
||||
import { RemoteClient } from '../cloud-runner/remote-client';
|
||||
|
||||
export class CLI {
|
||||
static async RunCli(options: any): Promise<void> {
|
||||
Input.githubInputEnabled = false;
|
||||
|
||||
const results = GetCliFunctions(options.mode);
|
||||
|
||||
if (results === undefined || results.length === 0) {
|
||||
throw new Error('no CLI mode found');
|
||||
export class Cli {
|
||||
public static options;
|
||||
static get isCliMode() {
|
||||
return Cli.options !== undefined && Cli.options.mode !== undefined && Cli.options.mode !== '';
|
||||
}
|
||||
public static query(key, alternativeKey) {
|
||||
if (Cli.options && Cli.options[key] !== undefined) {
|
||||
return Cli.options[key];
|
||||
}
|
||||
if (Cli.options && alternativeKey && Cli.options[alternativeKey] !== undefined) {
|
||||
return Cli.options[alternativeKey];
|
||||
}
|
||||
|
||||
CloudRunnerLogger.log(`Entrypoint: ${results.key}`);
|
||||
|
||||
options.versioning = 'None';
|
||||
Input.cliOptions = options;
|
||||
return await results.target[results.propertyKey]();
|
||||
}
|
||||
static isCliMode(options: any) {
|
||||
return options.mode !== undefined && options.mode !== '';
|
||||
return;
|
||||
}
|
||||
|
||||
public static SetupCli() {
|
||||
public static InitCliMode() {
|
||||
CliFunctionsRepository.PushCliFunctionSource(AwsCliCommands);
|
||||
CliFunctionsRepository.PushCliFunctionSource(Caching);
|
||||
CliFunctionsRepository.PushCliFunctionSource(LfsHashing);
|
||||
CliFunctionsRepository.PushCliFunctionSource(RemoteClient);
|
||||
const program = new Command();
|
||||
program.version('0.0.1');
|
||||
const properties = Object.getOwnPropertyNames(Input);
|
||||
core.info(`\n`);
|
||||
core.info(`INPUT:`);
|
||||
const actionYamlReader: ActionYamlReader = new ActionYamlReader();
|
||||
for (const element of properties) {
|
||||
program.option(`--${element} <${element}>`, actionYamlReader.GetActionYamlValue(element));
|
||||
if (Input[element] !== undefined && Input[element] !== '' && typeof Input[element] !== `function`) {
|
||||
}
|
||||
program.option(
|
||||
'-m, --mode <mode>',
|
||||
CliFunctionsRepository.GetAllCliModes()
|
||||
.map((x) => `${x.key} (${x.description})`)
|
||||
.join(` | `),
|
||||
);
|
||||
program.option('--populateOverride <populateOverride>', 'should use override query to pull input false by default');
|
||||
program.option('--cachePushFrom <cachePushFrom>', 'cache push from source folder');
|
||||
program.option('--cachePushTo <cachePushTo>', 'cache push to caching folder');
|
||||
program.option('--artifactName <artifactName>', 'caching artifact name');
|
||||
program.parse(process.argv);
|
||||
Cli.options = program.opts();
|
||||
|
||||
return Cli.isCliMode;
|
||||
}
|
||||
|
||||
static async RunCli(): Promise<void> {
|
||||
Input.githubInputEnabled = false;
|
||||
if (Cli.options['populateOverride'] === `true`) {
|
||||
await CloudRunnerQueryOverride.PopulateQueryOverrideInput();
|
||||
}
|
||||
Cli.logInput();
|
||||
const results = CliFunctionsRepository.GetCliFunctions(Cli.options.mode);
|
||||
CloudRunnerLogger.log(`Entrypoint: ${results.key}`);
|
||||
Cli.options.versioning = 'None';
|
||||
|
||||
return await results.target[results.propertyKey]();
|
||||
}
|
||||
|
||||
@CliFunction(`print-input`, `prints all input`)
|
||||
private static logInput() {
|
||||
core.info(`\n`);
|
||||
core.info(`INPUT:`);
|
||||
const properties = Object.getOwnPropertyNames(Input);
|
||||
for (const element of properties) {
|
||||
if (
|
||||
Input[element] !== undefined &&
|
||||
Input[element] !== '' &&
|
||||
typeof Input[element] !== `function` &&
|
||||
element !== 'length' &&
|
||||
element !== 'cliOptions' &&
|
||||
element !== 'prototype'
|
||||
) {
|
||||
core.info(`${element} ${Input[element]}`);
|
||||
}
|
||||
}
|
||||
core.info(`\n`);
|
||||
program.option(
|
||||
'-m, --mode <mode>',
|
||||
GetAllCliModes()
|
||||
.map((x) => `${x.key} (${x.description})`)
|
||||
.join(` | `),
|
||||
);
|
||||
program.parse(process.argv);
|
||||
|
||||
return program.opts();
|
||||
}
|
||||
|
||||
@CliFunction(`cli`, `runs a cloud runner build`)
|
||||
public static async CLIBuild(): Promise<string> {
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
|
||||
return await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
}
|
||||
|
||||
@CliFunction(`remote-cli`, `sets up a repository, usually before a game-ci build`)
|
||||
static async runRemoteClientJob() {
|
||||
const buildParameter = JSON.parse(process.env.BUILD_PARAMETERS || '{}');
|
||||
RemoteClientLogger.log(`Build Params:
|
||||
${JSON.stringify(buildParameter, undefined, 4)}
|
||||
`);
|
||||
CloudRunnerState.setup(buildParameter);
|
||||
await SetupCloudRunnerRepository.run();
|
||||
}
|
||||
|
||||
@CliFunction(`cach-push`, `push to cache`)
|
||||
static async cachePush() {}
|
||||
|
||||
@CliFunction(`cach-pull`, `pull from cache`)
|
||||
static async cachePull() {}
|
||||
|
||||
@CliFunction(`garbage-collect-aws`, `garbage collect aws`)
|
||||
static async garbageCollectAws() {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = new SDK.CloudFormation();
|
||||
|
||||
const stacks = await CF.listStacks().promise();
|
||||
CloudRunnerLogger.log(JSON.stringify(stacks, undefined, 4));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
import { assert } from 'console';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { Input } from '../../..';
|
||||
import CloudRunnerLogger from '../../../cloud-runner/services/cloud-runner-logger';
|
||||
import { CloudRunnerState } from '../../../cloud-runner/state/cloud-runner-state';
|
||||
import { CloudRunnerSystem } from './cloud-runner-system';
|
||||
import { LFSHashing } from './lfs-hashing';
|
||||
import { RemoteClientLogger } from './remote-client-logger';
|
||||
|
||||
export class Caching {
|
||||
public static async PushToCache(cacheFolder: string, sourceFolder: string, cacheKey: string) {
|
||||
const startPath = process.cwd();
|
||||
try {
|
||||
if (!fs.existsSync(cacheFolder)) {
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||
}
|
||||
process.chdir(path.resolve(sourceFolder, '..'));
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
CloudRunnerLogger.log(
|
||||
`Hashed cache folder ${await LFSHashing.hashAllFiles(sourceFolder)} ${sourceFolder} ${path.basename(
|
||||
sourceFolder,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`ls ${path.basename(sourceFolder)}`);
|
||||
}
|
||||
await CloudRunnerSystem.Run(`zip ${cacheKey}.zip ${path.basename(sourceFolder)}`);
|
||||
assert(fs.existsSync(`${cacheKey}.zip`), 'cache zip exists');
|
||||
assert(fs.existsSync(path.basename(sourceFolder)), 'source folder exists');
|
||||
await CloudRunnerSystem.Run(`mv ${cacheKey}.zip ${cacheFolder}`);
|
||||
RemoteClientLogger.log(`moved ${cacheKey}.zip to ${cacheFolder}`);
|
||||
assert(fs.existsSync(`${path.join(cacheFolder, cacheKey)}.zip`), 'cache zip exists inside cache folder');
|
||||
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`ls ${cacheFolder}`);
|
||||
}
|
||||
} catch (error) {
|
||||
process.chdir(`${startPath}`);
|
||||
throw error;
|
||||
}
|
||||
process.chdir(`${startPath}`);
|
||||
}
|
||||
public static async PullFromCache(cacheFolder: string, destinationFolder: string, cacheKey: string = ``) {
|
||||
const startPath = process.cwd();
|
||||
RemoteClientLogger.log(`Caching for ${path.basename(destinationFolder)}`);
|
||||
try {
|
||||
if (!fs.existsSync(cacheFolder)) {
|
||||
fs.mkdirSync(cacheFolder);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(destinationFolder)) {
|
||||
fs.mkdirSync(destinationFolder);
|
||||
}
|
||||
|
||||
const latestInBranch = await (await CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .zip$ | head -1`))
|
||||
.replace(/\n/g, ``)
|
||||
.replace('.zip', '');
|
||||
|
||||
process.chdir(cacheFolder);
|
||||
|
||||
const cacheSelection = cacheKey !== `` && fs.existsSync(`${cacheKey}.zip`) ? cacheKey : latestInBranch;
|
||||
await CloudRunnerLogger.log(`cache key ${cacheKey} selection ${cacheSelection}`);
|
||||
|
||||
if (fs.existsSync(`${cacheSelection}.zip`)) {
|
||||
const resultsFolder = `results${CloudRunnerState.buildParams.buildGuid}`;
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`tree ${destinationFolder}`);
|
||||
}
|
||||
RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.zip`);
|
||||
assert(`${fs.existsSync(destinationFolder)}`);
|
||||
assert(`${fs.existsSync(`${cacheSelection}.zip`)}`);
|
||||
const fullResultsFolder = path.join(cacheFolder, resultsFolder);
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`tree ${cacheFolder}`);
|
||||
}
|
||||
await CloudRunnerSystem.Run(`unzip ${cacheSelection}.zip -d ${path.basename(resultsFolder)}`);
|
||||
RemoteClientLogger.log(`cache item extracted to ${fullResultsFolder}`);
|
||||
assert(`${fs.existsSync(fullResultsFolder)}`);
|
||||
const destinationParentFolder = path.resolve(destinationFolder, '..');
|
||||
if (fs.existsSync(destinationFolder)) {
|
||||
fs.rmSync(destinationFolder, { recursive: true, force: true });
|
||||
}
|
||||
await CloudRunnerSystem.Run(
|
||||
`mv "${fullResultsFolder}/${path.basename(destinationFolder)}" "${destinationParentFolder}"`,
|
||||
);
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`tree ${destinationParentFolder}`);
|
||||
}
|
||||
} else {
|
||||
RemoteClientLogger.logWarning(`cache item ${cacheKey} doesn't exist ${destinationFolder}`);
|
||||
if (cacheSelection !== ``) {
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`tree ${cacheFolder}`);
|
||||
}
|
||||
RemoteClientLogger.logWarning(`cache item ${cacheKey}.zip doesn't exist ${destinationFolder}`);
|
||||
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
process.chdir(`${startPath}`);
|
||||
throw error;
|
||||
}
|
||||
process.chdir(`${startPath}`);
|
||||
}
|
||||
|
||||
public static handleCachePurging() {
|
||||
if (process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined) {
|
||||
RemoteClientLogger.log(`purging ${CloudRunnerState.purgeRemoteCaching}`);
|
||||
fs.rmdirSync(CloudRunnerState.cacheFolder, { recursive: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
import { exec } from 'child_process';
|
||||
import { RemoteClientLogger } from './remote-client-logger';
|
||||
|
||||
export class CloudRunnerSystem {
|
||||
public static async Run(command: string, suppressError = false) {
|
||||
for (const element of command.split(`\n`)) {
|
||||
RemoteClientLogger.log(element);
|
||||
}
|
||||
return await new Promise<string>((promise) => {
|
||||
let output = '';
|
||||
const child = exec(command, (error, stdout, stderr) => {
|
||||
if (error && !suppressError) {
|
||||
throw error;
|
||||
}
|
||||
if (stderr) {
|
||||
const diagnosticOutput = `${stderr.toString()}`;
|
||||
RemoteClientLogger.logCliDiagnostic(diagnosticOutput);
|
||||
output += diagnosticOutput;
|
||||
return;
|
||||
}
|
||||
const outputChunk = `${stdout}`;
|
||||
output += outputChunk;
|
||||
});
|
||||
child.on('close', function (code) {
|
||||
RemoteClientLogger.log(`[Exit code ${code}]`);
|
||||
if (code !== 0 && !suppressError) {
|
||||
throw new Error(output);
|
||||
}
|
||||
const outputLines = output.split(`\n`);
|
||||
for (const element of outputLines) {
|
||||
RemoteClientLogger.log(element);
|
||||
}
|
||||
promise(output);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,81 +0,0 @@
|
||||
import fs from 'fs';
|
||||
import { CloudRunnerState } from '../../cloud-runner/state/cloud-runner-state';
|
||||
import { Caching } from './remote-client-services/caching';
|
||||
import { LFSHashing } from './remote-client-services/lfs-hashing';
|
||||
import { CloudRunnerSystem } from './remote-client-services/cloud-runner-system';
|
||||
import { Input } from '../..';
|
||||
import { RemoteClientLogger } from './remote-client-services/remote-client-logger';
|
||||
import path from 'path';
|
||||
import { assert } from 'console';
|
||||
|
||||
export class SetupCloudRunnerRepository {
|
||||
public static async run() {
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerState.buildPathFull}`);
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerState.repoPathFull}`);
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerState.cacheFolderFull}`);
|
||||
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`ls -lh`);
|
||||
await CloudRunnerSystem.Run(`tree`);
|
||||
}
|
||||
await SetupCloudRunnerRepository.cloneRepoWithoutLFSFiles();
|
||||
if (Input.cloudRunnerTests) {
|
||||
await CloudRunnerSystem.Run(`ls -lh`);
|
||||
await CloudRunnerSystem.Run(`tree`);
|
||||
}
|
||||
const lfsHashes = await LFSHashing.createLFSHashFiles();
|
||||
if (fs.existsSync(CloudRunnerState.libraryFolderFull)) {
|
||||
RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
||||
}
|
||||
await Caching.PullFromCache(
|
||||
CloudRunnerState.lfsCacheFolderFull,
|
||||
CloudRunnerState.lfsDirectoryFull,
|
||||
`${lfsHashes.lfsGuid}`,
|
||||
);
|
||||
await SetupCloudRunnerRepository.pullLatestLFS();
|
||||
await Caching.PushToCache(
|
||||
CloudRunnerState.lfsCacheFolderFull,
|
||||
CloudRunnerState.lfsDirectoryFull,
|
||||
`${lfsHashes.lfsGuid}`,
|
||||
);
|
||||
await Caching.PullFromCache(CloudRunnerState.libraryCacheFolderFull, CloudRunnerState.libraryFolderFull);
|
||||
Caching.handleCachePurging();
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async cloneRepoWithoutLFSFiles() {
|
||||
try {
|
||||
process.chdir(`${CloudRunnerState.repoPathFull}`);
|
||||
RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
||||
await CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
|
||||
RemoteClientLogger.log(`Cloning the repository being built:`);
|
||||
await CloudRunnerSystem.Run(`git lfs install --skip-smudge`);
|
||||
await CloudRunnerSystem.Run(
|
||||
`git clone -b ${CloudRunnerState.branchName} ${CloudRunnerState.targetBuildRepoUrl} ${path.resolve(
|
||||
`..`,
|
||||
path.basename(CloudRunnerState.repoPathFull),
|
||||
)}`,
|
||||
);
|
||||
assert(fs.existsSync(`.git`));
|
||||
RemoteClientLogger.log(`${CloudRunnerState.buildParams.branch}`);
|
||||
await CloudRunnerSystem.Run(`git checkout ${CloudRunnerState.buildParams.branch}`);
|
||||
assert(fs.existsSync(path.join(`.git`, `lfs`)), 'LFS folder should not exist before caching');
|
||||
RemoteClientLogger.log(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async pullLatestLFS() {
|
||||
await CloudRunnerSystem.Run(`ls -lh ${CloudRunnerState.lfsDirectoryFull}/..`);
|
||||
process.chdir(CloudRunnerState.repoPathFull);
|
||||
await CloudRunnerSystem.Run(`git lfs pull`);
|
||||
RemoteClientLogger.log(`pulled latest LFS files`);
|
||||
assert(fs.existsSync(CloudRunnerState.lfsDirectoryFull));
|
||||
await CloudRunnerSystem.Run(`ls -lh ${CloudRunnerState.lfsDirectoryFull}/..`);
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,3 @@
|
||||
export class CloudRunnerStatics {
|
||||
public static readonly logPrefix = `Cloud-Runner-System`;
|
||||
public static readonly logPrefix = `Cloud-Runner`;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import CloudRunnerEnvironmentVariable from './services/cloud-runner-environment-variable';
|
||||
import CloudRunnerSecret from './services/cloud-runner-secret';
|
||||
|
||||
export class CloudRunnerStepState {
|
||||
public image: string;
|
||||
@@ -4,6 +4,9 @@ import Input from '../input';
|
||||
import { CloudRunnerStatics } from './cloud-runner-statics';
|
||||
import { TaskParameterSerializer } from './services/task-parameter-serializer';
|
||||
import UnityVersioning from '../unity-versioning';
|
||||
import { Cli } from '../cli/cli';
|
||||
import CloudRunnerLogger from './services/cloud-runner-logger';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
describe('Cloud Runner', () => {
|
||||
it('responds', () => {});
|
||||
@@ -13,10 +16,12 @@ describe('Cloud Runner', () => {
|
||||
const testSecretValue = 'testSecretValue';
|
||||
if (Input.cloudRunnerTests) {
|
||||
it('All build parameters sent to cloud runner as env vars', async () => {
|
||||
Input.cliOptions = {
|
||||
// Build parameters
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.read('test-project'),
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
customJob: `
|
||||
- name: 'step 1'
|
||||
image: 'alpine'
|
||||
@@ -27,9 +32,16 @@ describe('Cloud Runner', () => {
|
||||
`,
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
|
||||
// Setup parameters
|
||||
const buildParameter = await BuildParameters.create();
|
||||
Input.githubInputEnabled = true;
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
|
||||
// Run the job
|
||||
const file = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
|
||||
// Assert results
|
||||
expect(file).toContain(JSON.stringify(buildParameter));
|
||||
expect(file).toContain(`${Input.ToEnvVarFormat(testSecretName)}=${testSecretValue}`);
|
||||
const environmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables();
|
||||
@@ -41,10 +53,90 @@ describe('Cloud Runner', () => {
|
||||
if (typeof element.value === `string`) {
|
||||
element.value = element.value.replace(/\s+/g, '');
|
||||
}
|
||||
CloudRunnerLogger.log(`checking input/build param ${element.name} ${element.value}`);
|
||||
}
|
||||
}
|
||||
for (const element of environmentVariables) {
|
||||
if (element.value !== undefined && typeof element.value !== 'function') {
|
||||
expect(newLinePurgedFile).toContain(`${element.name}`);
|
||||
expect(newLinePurgedFile).toContain(`${element.name}=${element.value}`);
|
||||
}
|
||||
}
|
||||
delete Cli.options;
|
||||
}, 1000000);
|
||||
it('Run one build it should not use cache, run subsequent build which should use cache', async () => {
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.determineUnityVersion('test-project', UnityVersioning.read('test-project')),
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
const results = await CloudRunner.run(buildParameter, baseImage.toString());
|
||||
const libraryString = 'Rebuilding Library because the asset database could not be found!';
|
||||
const buildSucceededString = 'Build succeeded';
|
||||
expect(results).toContain(libraryString);
|
||||
expect(results).toContain(buildSucceededString);
|
||||
CloudRunnerLogger.log(`run 1 succeeded`);
|
||||
const buildParameter2 = await BuildParameters.create();
|
||||
const baseImage2 = new ImageTag(buildParameter2);
|
||||
const results2 = await CloudRunner.run(buildParameter2, baseImage2.toString());
|
||||
CloudRunnerLogger.log(`run 2 succeeded`);
|
||||
expect(results2).toContain(buildSucceededString);
|
||||
expect(results2).toEqual(expect.not.stringContaining(libraryString));
|
||||
Input.githubInputEnabled = true;
|
||||
delete Cli.options;
|
||||
}, 1000000);
|
||||
}
|
||||
it('Local cloud runner returns commands', async () => {
|
||||
// Build parameters
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.read('test-project'),
|
||||
cloudRunnerCluster: 'local-system',
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
customJob: `
|
||||
- name: 'step 1'
|
||||
image: 'alpine'
|
||||
commands: 'dir'
|
||||
secrets:
|
||||
- name: '${testSecretName}'
|
||||
value: '${testSecretValue}'
|
||||
`,
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
|
||||
// Setup parameters
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
|
||||
// Run the job
|
||||
await expect(CloudRunner.run(buildParameter, baseImage.toString())).resolves.not.toThrow();
|
||||
Input.githubInputEnabled = true;
|
||||
delete Cli.options;
|
||||
}, 1000000);
|
||||
it('Test cloud runner returns commands', async () => {
|
||||
// Build parameters
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.read('test-project'),
|
||||
cloudRunnerCluster: 'test',
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
|
||||
// Setup parameters
|
||||
const buildParameter = await BuildParameters.create();
|
||||
const baseImage = new ImageTag(buildParameter);
|
||||
|
||||
// Run the job
|
||||
await expect(CloudRunner.run(buildParameter, baseImage.toString())).resolves.not.toThrow();
|
||||
Input.githubInputEnabled = true;
|
||||
delete Cli.options;
|
||||
}, 1000000);
|
||||
});
|
||||
|
||||
@@ -1,35 +1,58 @@
|
||||
import AWSBuildPlatform from './aws';
|
||||
import { BuildParameters } from '..';
|
||||
import { CloudRunnerState } from './state/cloud-runner-state';
|
||||
import Kubernetes from './k8s';
|
||||
import AwsBuildPlatform from './providers/aws';
|
||||
import { BuildParameters, Input } from '..';
|
||||
import Kubernetes from './providers/k8s';
|
||||
import CloudRunnerLogger from './services/cloud-runner-logger';
|
||||
import { CloudRunnerStepState } from './state/cloud-runner-step-state';
|
||||
import { CloudRunnerStepState } from './cloud-runner-step-state';
|
||||
import { WorkflowCompositionRoot } from './workflows/workflow-composition-root';
|
||||
import { CloudRunnerError } from './error/cloud-runner-error';
|
||||
import { TaskParameterSerializer } from './services/task-parameter-serializer';
|
||||
import * as core from '@actions/core';
|
||||
import CloudRunnerSecret from './services/cloud-runner-secret';
|
||||
import { ProviderInterface } from './providers/provider-interface';
|
||||
import CloudRunnerEnvironmentVariable from './services/cloud-runner-environment-variable';
|
||||
import TestCloudRunner from './providers/test';
|
||||
import LocalCloudRunner from './providers/local';
|
||||
import LocalDockerCloudRunner from './providers/local-docker';
|
||||
|
||||
class CloudRunner {
|
||||
public static Provider: ProviderInterface;
|
||||
static buildParameters: BuildParameters;
|
||||
public static defaultSecrets: CloudRunnerSecret[];
|
||||
public static cloudRunnerEnvironmentVariables: CloudRunnerEnvironmentVariable[];
|
||||
private static setup(buildParameters: BuildParameters) {
|
||||
CloudRunnerLogger.setup();
|
||||
CloudRunnerState.setup(buildParameters);
|
||||
CloudRunner.buildParameters = buildParameters;
|
||||
CloudRunner.setupBuildPlatform();
|
||||
const parameters = TaskParameterSerializer.readBuildEnvironmentVariables();
|
||||
for (const element of parameters) {
|
||||
core.setOutput(element.name, element.value);
|
||||
CloudRunner.defaultSecrets = TaskParameterSerializer.readDefaultSecrets();
|
||||
CloudRunner.cloudRunnerEnvironmentVariables = TaskParameterSerializer.readBuildEnvironmentVariables();
|
||||
if (!buildParameters.isCliMode) {
|
||||
const buildParameterPropertyNames = Object.getOwnPropertyNames(buildParameters);
|
||||
for (const element of CloudRunner.cloudRunnerEnvironmentVariables) {
|
||||
core.setOutput(Input.ToEnvVarFormat(element.name), element.value);
|
||||
}
|
||||
for (const element of buildParameterPropertyNames) {
|
||||
core.setOutput(Input.ToEnvVarFormat(element), buildParameters[element]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static setupBuildPlatform() {
|
||||
switch (CloudRunnerState.buildParams.cloudRunnerCluster) {
|
||||
CloudRunnerLogger.log(`Cloud Runner platform selected ${CloudRunner.buildParameters.cloudRunnerCluster}`);
|
||||
switch (CloudRunner.buildParameters.cloudRunnerCluster) {
|
||||
case 'k8s':
|
||||
CloudRunnerLogger.log('Cloud Runner platform selected Kubernetes');
|
||||
CloudRunnerState.CloudRunnerProviderPlatform = new Kubernetes(CloudRunnerState.buildParams);
|
||||
CloudRunner.Provider = new Kubernetes(CloudRunner.buildParameters);
|
||||
break;
|
||||
default:
|
||||
case 'aws':
|
||||
CloudRunnerLogger.log('Cloud Runner platform selected AWS');
|
||||
CloudRunnerState.CloudRunnerProviderPlatform = new AWSBuildPlatform(CloudRunnerState.buildParams);
|
||||
CloudRunner.Provider = new AwsBuildPlatform(CloudRunner.buildParameters);
|
||||
break;
|
||||
case 'test':
|
||||
CloudRunner.Provider = new TestCloudRunner();
|
||||
break;
|
||||
case 'local-system':
|
||||
CloudRunner.Provider = new LocalCloudRunner();
|
||||
break;
|
||||
case 'local-docker':
|
||||
CloudRunner.Provider = new LocalDockerCloudRunner();
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -37,33 +60,30 @@ class CloudRunner {
|
||||
static async run(buildParameters: BuildParameters, baseImage: string) {
|
||||
CloudRunner.setup(buildParameters);
|
||||
try {
|
||||
core.startGroup('Setup remote runner');
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.setupSharedResources(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
CloudRunnerState.buildParams,
|
||||
CloudRunnerState.branchName,
|
||||
CloudRunnerState.defaultSecrets,
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('Setup shared cloud runner resources');
|
||||
await CloudRunner.Provider.setup(
|
||||
CloudRunner.buildParameters.buildGuid,
|
||||
CloudRunner.buildParameters,
|
||||
CloudRunner.buildParameters.branch,
|
||||
CloudRunner.defaultSecrets,
|
||||
);
|
||||
core.endGroup();
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||
const output = await new WorkflowCompositionRoot().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
CloudRunnerState.defaultSecrets,
|
||||
),
|
||||
new CloudRunnerStepState(baseImage, CloudRunner.cloudRunnerEnvironmentVariables, CloudRunner.defaultSecrets),
|
||||
);
|
||||
core.startGroup('Cleanup');
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedResources(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
CloudRunnerState.buildParams,
|
||||
CloudRunnerState.branchName,
|
||||
CloudRunnerState.defaultSecrets,
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('Cleanup shared cloud runner resources');
|
||||
await CloudRunner.Provider.cleanup(
|
||||
CloudRunner.buildParameters.buildGuid,
|
||||
CloudRunner.buildParameters,
|
||||
CloudRunner.buildParameters.branch,
|
||||
CloudRunner.defaultSecrets,
|
||||
);
|
||||
CloudRunnerLogger.log(`Cleanup complete`);
|
||||
core.endGroup();
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||
|
||||
return output;
|
||||
} catch (error) {
|
||||
core.endGroup();
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||
await CloudRunnerError.handleException(error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import * as core from '@actions/core';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
|
||||
export class CloudRunnerError {
|
||||
public static async handleException(error: unknown) {
|
||||
CloudRunnerLogger.error(JSON.stringify(error, undefined, 4));
|
||||
core.setFailed('Cloud Runner failed');
|
||||
await CloudRunnerState.CloudRunnerProviderPlatform.cleanupSharedResources(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
CloudRunnerState.buildParams,
|
||||
CloudRunnerState.branchName,
|
||||
CloudRunnerState.defaultSecrets,
|
||||
await CloudRunner.Provider.cleanup(
|
||||
CloudRunner.buildParameters.buildGuid,
|
||||
CloudRunner.buildParameters,
|
||||
CloudRunner.buildParameters.branch,
|
||||
CloudRunner.defaultSecrets,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import * as core from '@actions/core';
|
||||
import * as SDK from 'aws-sdk';
|
||||
import * as fs from 'fs';
|
||||
import path from 'path';
|
||||
import { BaseStackFormation } from './cloud-formations/base-stack-formation';
|
||||
const crypto = require('crypto');
|
||||
|
||||
export class AWSBaseStack {
|
||||
@@ -14,7 +13,7 @@ export class AWSBaseStack {
|
||||
async setupBaseStack(CF: SDK.CloudFormation) {
|
||||
const baseStackName = this.baseStackName;
|
||||
|
||||
const baseStack = fs.readFileSync(path.join(__dirname, 'cloud-formations', 'base-setup.yml'), 'utf8');
|
||||
const baseStack = BaseStackFormation.formation;
|
||||
|
||||
// Cloud Formation Input
|
||||
const describeStackInput: SDK.CloudFormation.DescribeStacksInput = {
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as fs from 'fs';
|
||||
import { TaskDefinitionFormation } from './cloud-formations/task-definition-formation';
|
||||
|
||||
export class AWSTemplates {
|
||||
export class AWSCloudFormationTemplates {
|
||||
public static getParameterTemplate(p1) {
|
||||
return `
|
||||
${p1}:
|
||||
@@ -29,10 +29,11 @@ export class AWSTemplates {
|
||||
public static insertAtTemplate(template, insertionKey, insertion) {
|
||||
const index = template.search(insertionKey) + insertionKey.length + '\n'.length;
|
||||
template = [template.slice(0, index), insertion, template.slice(index)].join('');
|
||||
|
||||
return template;
|
||||
}
|
||||
|
||||
public static readTaskCloudFormationTemplate(): string {
|
||||
return fs.readFileSync(`${__dirname}/cloud-formations/task-def-formation.yml`, 'utf8');
|
||||
return TaskDefinitionFormation.formation;
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,13 @@
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import * as SDK from 'aws-sdk';
|
||||
import * as core from '@actions/core';
|
||||
import { Input } from '../..';
|
||||
import CloudRunner from '../../cloud-runner';
|
||||
|
||||
export class AWSError {
|
||||
static async handleStackCreationFailure(error: any, CF: SDK.CloudFormation, taskDefStackName: string) {
|
||||
CloudRunnerLogger.log('aws error: ');
|
||||
core.error(JSON.stringify(error, undefined, 4));
|
||||
if (Input.cloudRunnerTests) {
|
||||
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
||||
CloudRunnerLogger.log('Getting events and resources for task stack');
|
||||
const events = (await CF.describeStackEvents({ StackName: taskDefStackName }).promise()).StackEvents;
|
||||
CloudRunnerLogger.log(JSON.stringify(events, undefined, 4));
|
||||
@@ -1,9 +1,10 @@
|
||||
import * as SDK from 'aws-sdk';
|
||||
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import { AWSTemplates } from './aws-templates';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
import { AWSCloudFormationTemplates } from './aws-cloud-formation-templates';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import { AWSError } from './aws-error';
|
||||
import CloudRunner from '../../cloud-runner';
|
||||
|
||||
export class AWSJobStack {
|
||||
private baseStackName: string;
|
||||
@@ -22,7 +23,21 @@ export class AWSJobStack {
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<CloudRunnerAWSTaskDef> {
|
||||
const taskDefStackName = `${this.baseStackName}-${buildGuid}`;
|
||||
let taskDefCloudFormation = AWSTemplates.readTaskCloudFormationTemplate();
|
||||
let taskDefCloudFormation = AWSCloudFormationTemplates.readTaskCloudFormationTemplate();
|
||||
const cpu = CloudRunner.buildParameters.cloudRunnerCpu || '1024';
|
||||
const memory = CloudRunner.buildParameters.cloudRunnerMemory || '3072';
|
||||
taskDefCloudFormation = taskDefCloudFormation.replace(
|
||||
`ContainerCpu:
|
||||
Default: 1024`,
|
||||
`ContainerCpu:
|
||||
Default: ${Number.parseInt(cpu)}`,
|
||||
);
|
||||
taskDefCloudFormation = taskDefCloudFormation.replace(
|
||||
`ContainerMemory:
|
||||
Default: 2048`,
|
||||
`ContainerMemory:
|
||||
Default: ${Number.parseInt(memory)}`,
|
||||
);
|
||||
for (const secret of secrets) {
|
||||
secret.ParameterKey = `${buildGuid.replace(/[^\dA-Za-z]/g, '')}${secret.ParameterKey.replace(
|
||||
/[^\dA-Za-z]/g,
|
||||
@@ -35,20 +50,20 @@ export class AWSJobStack {
|
||||
secrets = secrets.filter((x) => x !== secret);
|
||||
continue;
|
||||
}
|
||||
taskDefCloudFormation = AWSTemplates.insertAtTemplate(
|
||||
taskDefCloudFormation = AWSCloudFormationTemplates.insertAtTemplate(
|
||||
taskDefCloudFormation,
|
||||
'p1 - input',
|
||||
AWSTemplates.getParameterTemplate(secret.ParameterKey),
|
||||
AWSCloudFormationTemplates.getParameterTemplate(secret.ParameterKey),
|
||||
);
|
||||
taskDefCloudFormation = AWSTemplates.insertAtTemplate(
|
||||
taskDefCloudFormation = AWSCloudFormationTemplates.insertAtTemplate(
|
||||
taskDefCloudFormation,
|
||||
'p2 - secret',
|
||||
AWSTemplates.getSecretTemplate(`${secret.ParameterKey}`),
|
||||
AWSCloudFormationTemplates.getSecretTemplate(`${secret.ParameterKey}`),
|
||||
);
|
||||
taskDefCloudFormation = AWSTemplates.insertAtTemplate(
|
||||
taskDefCloudFormation = AWSCloudFormationTemplates.insertAtTemplate(
|
||||
taskDefCloudFormation,
|
||||
'p3 - container def',
|
||||
AWSTemplates.getSecretDefinitionTemplate(secret.EnvironmentVariable, secret.ParameterKey),
|
||||
AWSCloudFormationTemplates.getSecretDefinitionTemplate(secret.EnvironmentVariable, secret.ParameterKey),
|
||||
);
|
||||
}
|
||||
const secretsMappedToCloudFormationParameters = secrets.map((x) => {
|
||||
@@ -85,7 +100,9 @@ export class AWSJobStack {
|
||||
},
|
||||
...secretsMappedToCloudFormationParameters,
|
||||
];
|
||||
|
||||
CloudRunnerLogger.log(
|
||||
`Starting AWS job with memory: ${CloudRunner.buildParameters.cloudRunnerMemory} cpu: ${CloudRunner.buildParameters.cloudRunnerCpu}`,
|
||||
);
|
||||
let previousStackExists = true;
|
||||
while (previousStackExists) {
|
||||
previousStackExists = false;
|
||||
@@ -101,25 +118,19 @@ export class AWSJobStack {
|
||||
}
|
||||
}
|
||||
}
|
||||
const createStackInput: SDK.CloudFormation.CreateStackInput = {
|
||||
StackName: taskDefStackName,
|
||||
TemplateBody: taskDefCloudFormation,
|
||||
Capabilities: ['CAPABILITY_IAM'],
|
||||
Parameters: parameters,
|
||||
};
|
||||
|
||||
try {
|
||||
await CF.createStack({
|
||||
StackName: taskDefStackName,
|
||||
TemplateBody: taskDefCloudFormation,
|
||||
Capabilities: ['CAPABILITY_IAM'],
|
||||
Parameters: parameters,
|
||||
}).promise();
|
||||
await CF.createStack(createStackInput).promise();
|
||||
CloudRunnerLogger.log('Creating cloud runner job');
|
||||
await CF.waitFor('stackCreateComplete', { StackName: taskDefStackName }).promise();
|
||||
} catch (error) {
|
||||
await AWSError.handleStackCreationFailure(
|
||||
error,
|
||||
CF,
|
||||
taskDefStackName,
|
||||
//taskDefCloudFormation,
|
||||
//parameters,
|
||||
//secrets,
|
||||
);
|
||||
await AWSError.handleStackCreationFailure(error, CF, taskDefStackName);
|
||||
throw error;
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import * as AWS from 'aws-sdk';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||
import * as core from '@actions/core';
|
||||
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
|
||||
import * as zlib from 'zlib';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { Input } from '../..';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerStatics } from '../cloud-runner-statics';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import { Input } from '../../..';
|
||||
import CloudRunner from '../../cloud-runner';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../../services/cloud-runner-build-command-process';
|
||||
import { FollowLogStreamService } from '../../services/follow-log-stream-service';
|
||||
|
||||
class AWSTaskRunner {
|
||||
static async runTask(
|
||||
@@ -39,7 +39,7 @@ class AWSTaskRunner {
|
||||
{
|
||||
name: taskDef.taskDefStackName,
|
||||
environment,
|
||||
command: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(commands, CloudRunnerState.buildParams)],
|
||||
command: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(commands, CloudRunner.buildParameters)],
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -52,10 +52,40 @@ class AWSTaskRunner {
|
||||
},
|
||||
},
|
||||
}).promise();
|
||||
|
||||
CloudRunnerLogger.log('Cloud runner job is starting');
|
||||
const taskArn = task.tasks?.[0].taskArn || '';
|
||||
CloudRunnerLogger.log('Cloud runner job is starting');
|
||||
await AWSTaskRunner.waitUntilTaskRunning(ECS, taskArn, cluster);
|
||||
CloudRunnerLogger.log(
|
||||
`Cloud runner job status is running ${(await AWSTaskRunner.describeTasks(ECS, cluster, taskArn))?.lastStatus}`,
|
||||
);
|
||||
const { output, shouldCleanup } = await this.streamLogsUntilTaskStops(
|
||||
ECS,
|
||||
CF,
|
||||
taskDef,
|
||||
cluster,
|
||||
taskArn,
|
||||
streamName,
|
||||
);
|
||||
const taskData = await AWSTaskRunner.describeTasks(ECS, cluster, taskArn);
|
||||
const exitCode = taskData.containers?.[0].exitCode;
|
||||
const wasSuccessful = exitCode === 0 || (exitCode === undefined && taskData.lastStatus === 'RUNNING');
|
||||
if (wasSuccessful) {
|
||||
CloudRunnerLogger.log(`Cloud runner job has finished successfully`);
|
||||
|
||||
return { output, shouldCleanup };
|
||||
} else {
|
||||
if (taskData.stoppedReason === 'Essential container in task exited' && exitCode === 1) {
|
||||
throw new Error('Container exited with code 1');
|
||||
}
|
||||
const message = `Cloud runner job exit code ${exitCode}`;
|
||||
taskData.overrides = undefined;
|
||||
taskData.attachments = undefined;
|
||||
CloudRunnerLogger.log(`${message} ${JSON.stringify(taskData, undefined, 4)}`);
|
||||
throw new Error(message);
|
||||
}
|
||||
}
|
||||
|
||||
private static async waitUntilTaskRunning(ECS: AWS.ECS, taskArn: string, cluster: string) {
|
||||
try {
|
||||
await ECS.waitFor('tasksRunning', { tasks: [taskArn], cluster }).promise();
|
||||
} catch (error_) {
|
||||
@@ -70,24 +100,6 @@ class AWSTaskRunner {
|
||||
core.setFailed(error);
|
||||
core.error(error);
|
||||
}
|
||||
CloudRunnerLogger.log(`Cloud runner job is running`);
|
||||
|
||||
const output = await this.streamLogsUntilTaskStops(ECS, CF, taskDef, cluster, taskArn, streamName);
|
||||
const exitCode = (await AWSTaskRunner.describeTasks(ECS, cluster, taskArn)).containers?.[0].exitCode;
|
||||
CloudRunnerLogger.log(`Cloud runner job exit code ${exitCode}`);
|
||||
if (exitCode !== 0 && exitCode !== undefined) {
|
||||
core.error(
|
||||
`job failed with exit code ${exitCode} ${JSON.stringify(
|
||||
await ECS.describeTasks({ tasks: [taskArn], cluster }).promise(),
|
||||
undefined,
|
||||
4,
|
||||
)}`,
|
||||
);
|
||||
throw new Error(`job failed with exit code ${exitCode}`);
|
||||
} else {
|
||||
CloudRunnerLogger.log(`Cloud runner job has finished successfully`);
|
||||
return output;
|
||||
}
|
||||
}
|
||||
|
||||
static async describeTasks(ECS: AWS.ECS, clusterName: string, taskArn: string) {
|
||||
@@ -114,28 +126,27 @@ class AWSTaskRunner {
|
||||
const stream = await AWSTaskRunner.getLogStream(kinesis, kinesisStreamName);
|
||||
let iterator = await AWSTaskRunner.getLogIterator(kinesis, stream);
|
||||
|
||||
CloudRunnerLogger.log(
|
||||
`Cloud runner job status is ${(await AWSTaskRunner.describeTasks(ECS, clusterName, taskArn))?.lastStatus}`,
|
||||
);
|
||||
|
||||
const logBaseUrl = `https://${Input.region}.console.aws.amazon.com/cloudwatch/home?region=${CF.config.region}#logsV2:log-groups/log-group/${taskDef.taskDefStackName}`;
|
||||
CloudRunnerLogger.log(`You can also see the logs at AWS Cloud Watch: ${logBaseUrl}`);
|
||||
const logBaseUrl = `https://${Input.region}.console.aws.amazon.com/cloudwatch/home?region=${Input.region}#logsV2:log-groups/log-group/${CloudRunner.buildParameters.awsBaseStackName}-${CloudRunner.buildParameters.buildGuid}`;
|
||||
CloudRunnerLogger.log(`You view the log stream on AWS Cloud Watch: ${logBaseUrl}`);
|
||||
let shouldReadLogs = true;
|
||||
let shouldCleanup = true;
|
||||
let timestamp: number = 0;
|
||||
let output = '';
|
||||
while (shouldReadLogs) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1500));
|
||||
const taskData = await AWSTaskRunner.describeTasks(ECS, clusterName, taskArn);
|
||||
({ timestamp, shouldReadLogs } = AWSTaskRunner.checkStreamingShouldContinue(taskData, timestamp, shouldReadLogs));
|
||||
({ iterator, shouldReadLogs, output } = await AWSTaskRunner.handleLogStreamIteration(
|
||||
({ iterator, shouldReadLogs, output, shouldCleanup } = await AWSTaskRunner.handleLogStreamIteration(
|
||||
kinesis,
|
||||
iterator,
|
||||
shouldReadLogs,
|
||||
taskDef,
|
||||
output,
|
||||
shouldCleanup,
|
||||
));
|
||||
}
|
||||
return output;
|
||||
|
||||
return { output, shouldCleanup };
|
||||
}
|
||||
|
||||
private static async handleLogStreamIteration(
|
||||
@@ -144,6 +155,7 @@ class AWSTaskRunner {
|
||||
shouldReadLogs: boolean,
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
output: string,
|
||||
shouldCleanup: boolean,
|
||||
) {
|
||||
const records = await kinesis
|
||||
.getRecords({
|
||||
@@ -151,11 +163,22 @@ class AWSTaskRunner {
|
||||
})
|
||||
.promise();
|
||||
iterator = records.NextShardIterator || '';
|
||||
({ shouldReadLogs, output } = AWSTaskRunner.logRecords(records, iterator, taskDef, shouldReadLogs, output));
|
||||
return { iterator, shouldReadLogs, output };
|
||||
({ shouldReadLogs, output, shouldCleanup } = AWSTaskRunner.logRecords(
|
||||
records,
|
||||
iterator,
|
||||
taskDef,
|
||||
shouldReadLogs,
|
||||
output,
|
||||
shouldCleanup,
|
||||
));
|
||||
|
||||
return { iterator, shouldReadLogs, output, shouldCleanup };
|
||||
}
|
||||
|
||||
private static checkStreamingShouldContinue(taskData: AWS.ECS.Task, timestamp: number, shouldReadLogs: boolean) {
|
||||
if (taskData?.lastStatus === 'UNKNOWN') {
|
||||
CloudRunnerLogger.log('## Cloud runner job unknwon');
|
||||
}
|
||||
if (taskData?.lastStatus !== 'RUNNING') {
|
||||
if (timestamp === 0) {
|
||||
CloudRunnerLogger.log('## Cloud runner job stopped, streaming end of logs');
|
||||
@@ -167,6 +190,7 @@ class AWSTaskRunner {
|
||||
}
|
||||
CloudRunnerLogger.log(`## Status of job: ${taskData.lastStatus}`);
|
||||
}
|
||||
|
||||
return { timestamp, shouldReadLogs };
|
||||
}
|
||||
|
||||
@@ -176,6 +200,7 @@ class AWSTaskRunner {
|
||||
taskDef: CloudRunnerAWSTaskDef,
|
||||
shouldReadLogs: boolean,
|
||||
output: string,
|
||||
shouldCleanup: boolean,
|
||||
) {
|
||||
if (records.Records.length > 0 && iterator) {
|
||||
for (let index = 0; index < records.Records.length; index++) {
|
||||
@@ -184,23 +209,19 @@ class AWSTaskRunner {
|
||||
);
|
||||
if (json.messageType === 'DATA_MESSAGE') {
|
||||
for (let logEventsIndex = 0; logEventsIndex < json.logEvents.length; logEventsIndex++) {
|
||||
let message = json.logEvents[logEventsIndex].message;
|
||||
if (json.logEvents[logEventsIndex].message.includes(`---${CloudRunnerState.buildParams.logId}`)) {
|
||||
CloudRunnerLogger.log('End of log transmission received');
|
||||
shouldReadLogs = false;
|
||||
} else if (message.includes('Rebuilding Library because the asset database could not be found!')) {
|
||||
core.warning('LIBRARY NOT FOUND!');
|
||||
}
|
||||
message = `[${CloudRunnerStatics.logPrefix}] ${message}`;
|
||||
if (Input.cloudRunnerTests) {
|
||||
output += message;
|
||||
}
|
||||
CloudRunnerLogger.log(message);
|
||||
const message = json.logEvents[logEventsIndex].message;
|
||||
({ shouldReadLogs, shouldCleanup, output } = FollowLogStreamService.handleIteration(
|
||||
message,
|
||||
shouldReadLogs,
|
||||
shouldCleanup,
|
||||
output,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return { shouldReadLogs, output };
|
||||
|
||||
return { shouldReadLogs, output, shouldCleanup };
|
||||
}
|
||||
|
||||
private static async getLogStream(kinesis: AWS.Kinesis, kinesisStreamName: string) {
|
||||
@@ -1,7 +1,7 @@
|
||||
AWSTemplateFormatVersion: '2010-09-09'
|
||||
Description: AWS Fargate cluster that can span public and private subnets. Supports
|
||||
public facing load balancers, private internal load balancers, and
|
||||
both internal and external service discovery namespaces.
|
||||
export class BaseStackFormation {
|
||||
public static readonly baseStackDecription = `Game-CI base stack`;
|
||||
public static readonly formation: string = `AWSTemplateFormatVersion: '2010-09-09'
|
||||
Description: ${BaseStackFormation.baseStackDecription}
|
||||
Parameters:
|
||||
EnvironmentName:
|
||||
Type: String
|
||||
@@ -335,57 +335,58 @@ Outputs:
|
||||
Description: 'The connection endpoint for the database.'
|
||||
Value: !Ref EfsFileStorage
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:EfsFileStorageId
|
||||
Name: !Sub ${'${EnvironmentName}'}:EfsFileStorageId
|
||||
ClusterName:
|
||||
Description: The name of the ECS cluster
|
||||
Value: !Ref 'ECSCluster'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ClusterName
|
||||
Name: !Sub${' ${EnvironmentName}'}:ClusterName
|
||||
AutoscalingRole:
|
||||
Description: The ARN of the role used for autoscaling
|
||||
Value: !GetAtt 'AutoscalingRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:AutoscalingRole
|
||||
Name: !Sub ${'${EnvironmentName}'}:AutoscalingRole
|
||||
ECSRole:
|
||||
Description: The ARN of the ECS role
|
||||
Value: !GetAtt 'ECSRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ECSRole
|
||||
Name: !Sub ${'${EnvironmentName}'}:ECSRole
|
||||
ECSTaskExecutionRole:
|
||||
Description: The ARN of the ECS role tsk execution role
|
||||
Value: !GetAtt 'ECSTaskExecutionRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ECSTaskExecutionRole
|
||||
Name: !Sub ${'${EnvironmentName}'}:ECSTaskExecutionRole
|
||||
|
||||
DeleteCFNLambdaExecutionRole:
|
||||
Description: Lambda execution role for cleaning up cloud formations
|
||||
Value: !GetAtt 'DeleteCFNLambdaExecutionRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:DeleteCFNLambdaExecutionRole
|
||||
Name: !Sub ${'${EnvironmentName}'}:DeleteCFNLambdaExecutionRole
|
||||
|
||||
CloudWatchIAMRole:
|
||||
Description: The ARN of the CloudWatch role for subscription filter
|
||||
Value: !GetAtt 'CloudWatchIAMRole.Arn'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:CloudWatchIAMRole
|
||||
Name: !Sub ${'${EnvironmentName}'}:CloudWatchIAMRole
|
||||
VpcId:
|
||||
Description: The ID of the VPC that this stack is deployed in
|
||||
Value: !Ref 'VPC'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:VpcId
|
||||
Name: !Sub ${'${EnvironmentName}'}:VpcId
|
||||
PublicSubnetOne:
|
||||
Description: Public subnet one
|
||||
Value: !Ref 'PublicSubnetOne'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:PublicSubnetOne
|
||||
Name: !Sub ${'${EnvironmentName}'}:PublicSubnetOne
|
||||
PublicSubnetTwo:
|
||||
Description: Public subnet two
|
||||
Value: !Ref 'PublicSubnetTwo'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:PublicSubnetTwo
|
||||
|
||||
Name: !Sub ${'${EnvironmentName}'}:PublicSubnetTwo
|
||||
ContainerSecurityGroup:
|
||||
Description: A security group used to allow Fargate containers to receive traffic
|
||||
Value: !Ref 'ContainerSecurityGroup'
|
||||
Export:
|
||||
Name: !Sub ${EnvironmentName}:ContainerSecurityGroup
|
||||
Name: !Sub ${'${EnvironmentName}'}:ContainerSecurityGroup
|
||||
`;
|
||||
}
|
||||
@@ -1,8 +1,7 @@
|
||||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: >-
|
||||
AWS Fargate cluster that can span public and private subnets. Supports public
|
||||
facing load balancers, private internal load balancers, and both internal and
|
||||
external service discovery namespaces.
|
||||
export class TaskDefinitionFormation {
|
||||
public static readonly description: string = `Game CI Cloud Runner Task Stack`;
|
||||
public static readonly formation: string = `AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: ${TaskDefinitionFormation.description}
|
||||
Parameters:
|
||||
EnvironmentName:
|
||||
Type: String
|
||||
@@ -23,12 +22,12 @@ Parameters:
|
||||
Default: 80
|
||||
Description: What port number the application inside the docker container is binding to
|
||||
ContainerCpu:
|
||||
Type: Number
|
||||
Default: 1024
|
||||
Type: Number
|
||||
Description: How much CPU to give the container. 1024 is 1 CPU
|
||||
ContainerMemory:
|
||||
Type: Number
|
||||
Default: 2048
|
||||
Type: Number
|
||||
Description: How much memory in megabytes to give the container
|
||||
BUILDGUID:
|
||||
Type: String
|
||||
@@ -78,7 +77,7 @@ Resources:
|
||||
Properties:
|
||||
FilterPattern: ''
|
||||
RoleArn:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:CloudWatchIAMRole'
|
||||
'Fn::ImportValue': !Sub '${'${EnvironmentName}'}:CloudWatchIAMRole'
|
||||
LogGroupName: !Ref ServiceName
|
||||
DestinationArn:
|
||||
'Fn::GetAtt':
|
||||
@@ -98,9 +97,7 @@ Resources:
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
|
||||
# template secrets p2 - secret
|
||||
|
||||
TaskDefinition:
|
||||
Type: 'AWS::ECS::TaskDefinition'
|
||||
Properties:
|
||||
@@ -112,12 +109,12 @@ Resources:
|
||||
- Name: efs-data
|
||||
EFSVolumeConfiguration:
|
||||
FilesystemId:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:EfsFileStorageId'
|
||||
'Fn::ImportValue': !Sub '${'${EnvironmentName}'}:EfsFileStorageId'
|
||||
TransitEncryption: ENABLED
|
||||
RequiresCompatibilities:
|
||||
- FARGATE
|
||||
ExecutionRoleArn:
|
||||
'Fn::ImportValue': !Sub '${EnvironmentName}:ECSTaskExecutionRole'
|
||||
'Fn::ImportValue': !Sub '${'${EnvironmentName}'}:ECSTaskExecutionRole'
|
||||
TaskRoleArn:
|
||||
'Fn::If':
|
||||
- HasCustomRole
|
||||
@@ -153,69 +150,7 @@ Resources:
|
||||
awslogs-group: !Ref ServiceName
|
||||
awslogs-region: !Ref 'AWS::Region'
|
||||
awslogs-stream-prefix: !Ref ServiceName
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
id: dabb0116-abe0-48a6-a8af-cf9111c879a5
|
||||
DependsOn:
|
||||
- LogGroup
|
||||
Metadata:
|
||||
'AWS::CloudFormation::Designer':
|
||||
dabb0116-abe0-48a6-a8af-cf9111c879a5:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 270
|
||||
'y': 90
|
||||
z: 1
|
||||
embeds: []
|
||||
dependson:
|
||||
- aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
c6f18447-b879-4696-8873-f981b2cedd2b:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 270
|
||||
'y': 210
|
||||
z: 1
|
||||
embeds: []
|
||||
7f809e91-9e5d-4678-98c1-c5085956c480:
|
||||
size:
|
||||
width: 60
|
||||
height: 60
|
||||
position:
|
||||
x: 60
|
||||
'y': 300
|
||||
z: 1
|
||||
embeds: []
|
||||
dependson:
|
||||
- aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
- c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
aece53ae-b82d-4267-bc16-ed964b05db27:
|
||||
size:
|
||||
width: 150
|
||||
height: 150
|
||||
position:
|
||||
x: 60
|
||||
'y': 90
|
||||
z: 1
|
||||
embeds: []
|
||||
4d2da56c-3643-46b8-aaee-e46e19f95fcc:
|
||||
source:
|
||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||
target:
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
z: 11
|
||||
14eb957b-f094-4653-93c4-77b2f851953c:
|
||||
source:
|
||||
id: 7f809e91-9e5d-4678-98c1-c5085956c480
|
||||
target:
|
||||
id: c6f18447-b879-4696-8873-f981b2cedd2b
|
||||
z: 12
|
||||
85c57444-e5bb-4230-bc85-e545cd4558f6:
|
||||
source:
|
||||
id: dabb0116-abe0-48a6-a8af-cf9111c879a5
|
||||
target:
|
||||
id: aece53ae-b82d-4267-bc16-ed964b05db27
|
||||
z: 13
|
||||
`;
|
||||
}
|
||||
@@ -0,0 +1,170 @@
|
||||
import AWS from 'aws-sdk';
|
||||
import { CliFunction } from '../../../../cli/cli-functions-repository';
|
||||
import Input from '../../../../input';
|
||||
import CloudRunnerLogger from '../../../services/cloud-runner-logger';
|
||||
import { BaseStackFormation } from '../cloud-formations/base-stack-formation';
|
||||
|
||||
export class AwsCliCommands {
|
||||
@CliFunction(`aws-list-all`, `List all resources`)
|
||||
static async awsListAll() {
|
||||
await AwsCliCommands.awsListStacks(undefined, true);
|
||||
await AwsCliCommands.awsListTasks();
|
||||
await AwsCliCommands.awsListLogGroups(undefined, true);
|
||||
}
|
||||
@CliFunction(`aws-garbage-collect`, `garbage collect aws resources not in use !WIP!`)
|
||||
static async garbageCollectAws() {
|
||||
await AwsCliCommands.cleanup(false);
|
||||
}
|
||||
@CliFunction(`aws-garbage-collect-all`, `garbage collect aws resources regardless of whether they are in use`)
|
||||
static async garbageCollectAwsAll() {
|
||||
await AwsCliCommands.cleanup(true);
|
||||
}
|
||||
@CliFunction(
|
||||
`aws-garbage-collect-all-1d-older`,
|
||||
`garbage collect aws resources created more than 1d ago (ignore if they are in use)`,
|
||||
)
|
||||
static async garbageCollectAwsAllOlderThanOneDay() {
|
||||
await AwsCliCommands.cleanup(true, true);
|
||||
}
|
||||
static isOlderThan1day(date: any) {
|
||||
const ageDate = new Date(date.getTime() - Date.now());
|
||||
|
||||
return ageDate.getDay() > 0;
|
||||
}
|
||||
@CliFunction(`aws-list-stacks`, `List stacks`)
|
||||
static async awsListStacks(perResultCallback: any = false, verbose: boolean = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = new AWS.CloudFormation();
|
||||
const stacks =
|
||||
(await CF.listStacks().promise()).StackSummaries?.filter(
|
||||
(_x) => _x.StackStatus !== 'DELETE_COMPLETE', // &&
|
||||
// _x.TemplateDescription === TaskDefinitionFormation.description.replace('\n', ''),
|
||||
) || [];
|
||||
CloudRunnerLogger.log(`Stacks ${stacks.length}`);
|
||||
for (const element of stacks) {
|
||||
const ageDate = new Date(element.CreationTime.getTime() - Date.now());
|
||||
if (verbose)
|
||||
CloudRunnerLogger.log(
|
||||
`Task Stack ${element.StackName} - Age D${ageDate.getDay()} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
||||
);
|
||||
if (perResultCallback) await perResultCallback(element);
|
||||
}
|
||||
const baseStacks =
|
||||
(await CF.listStacks().promise()).StackSummaries?.filter(
|
||||
(_x) =>
|
||||
_x.StackStatus !== 'DELETE_COMPLETE' && _x.TemplateDescription === BaseStackFormation.baseStackDecription,
|
||||
) || [];
|
||||
CloudRunnerLogger.log(`Base Stacks ${baseStacks.length}`);
|
||||
for (const element of baseStacks) {
|
||||
const ageDate = new Date(element.CreationTime.getTime() - Date.now());
|
||||
if (verbose)
|
||||
CloudRunnerLogger.log(
|
||||
`Base Stack ${
|
||||
element.StackName
|
||||
} - Age D${ageDate.getHours()} H${ageDate.getHours()} M${ageDate.getMinutes()}`,
|
||||
);
|
||||
if (perResultCallback) await perResultCallback(element);
|
||||
}
|
||||
if (stacks === undefined) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
@CliFunction(`aws-list-tasks`, `List tasks`)
|
||||
static async awsListTasks(perResultCallback: any = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const ecs = new AWS.ECS();
|
||||
const clusters = (await ecs.listClusters().promise()).clusterArns || [];
|
||||
CloudRunnerLogger.log(`Clusters ${clusters.length}`);
|
||||
for (const element of clusters) {
|
||||
const input: AWS.ECS.ListTasksRequest = {
|
||||
cluster: element,
|
||||
};
|
||||
|
||||
const list = (await ecs.listTasks(input).promise()).taskArns || [];
|
||||
if (list.length > 0) {
|
||||
const describeInput: AWS.ECS.DescribeTasksRequest = { tasks: list, cluster: element };
|
||||
const describeList = (await ecs.describeTasks(describeInput).promise()).tasks || [];
|
||||
if (describeList === []) {
|
||||
continue;
|
||||
}
|
||||
CloudRunnerLogger.log(`Tasks ${describeList.length}`);
|
||||
for (const taskElement of describeList) {
|
||||
if (taskElement === undefined) {
|
||||
continue;
|
||||
}
|
||||
taskElement.overrides = {};
|
||||
taskElement.attachments = [];
|
||||
if (taskElement.createdAt === undefined) {
|
||||
CloudRunnerLogger.log(`Skipping ${taskElement.taskDefinitionArn} no createdAt date`);
|
||||
continue;
|
||||
}
|
||||
if (perResultCallback) await perResultCallback(taskElement, element);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@CliFunction(`aws-list-log-groups`, `List tasks`)
|
||||
static async awsListLogGroups(perResultCallback: any = false, verbose: boolean = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const ecs = new AWS.CloudWatchLogs();
|
||||
let logStreamInput: AWS.CloudWatchLogs.DescribeLogGroupsRequest = {
|
||||
/* logGroupNamePrefix: 'game-ci' */
|
||||
};
|
||||
let logGroupsDescribe = await ecs.describeLogGroups(logStreamInput).promise();
|
||||
const logGroups = logGroupsDescribe.logGroups || [];
|
||||
while (logGroupsDescribe.nextToken) {
|
||||
logStreamInput = { /* logGroupNamePrefix: 'game-ci',*/ nextToken: logGroupsDescribe.nextToken };
|
||||
logGroupsDescribe = await ecs.describeLogGroups(logStreamInput).promise();
|
||||
logGroups.push(...(logGroupsDescribe?.logGroups || []));
|
||||
}
|
||||
|
||||
CloudRunnerLogger.log(`Log Groups ${logGroups.length}`);
|
||||
for (const element of logGroups) {
|
||||
if (element.creationTime === undefined) {
|
||||
CloudRunnerLogger.log(`Skipping ${element.logGroupName} no createdAt date`);
|
||||
continue;
|
||||
}
|
||||
const ageDate = new Date(new Date(element.creationTime).getTime() - Date.now());
|
||||
if (verbose)
|
||||
CloudRunnerLogger.log(
|
||||
`Log Group Name ${
|
||||
element.logGroupName
|
||||
} - Age D${ageDate.getDay()} H${ageDate.getHours()} M${ageDate.getMinutes()} - 1d old ${AwsCliCommands.isOlderThan1day(
|
||||
new Date(element.creationTime),
|
||||
)}`,
|
||||
);
|
||||
if (perResultCallback) await perResultCallback(element, element);
|
||||
}
|
||||
}
|
||||
|
||||
private static async cleanup(deleteResources = false, OneDayOlderOnly: boolean = false) {
|
||||
process.env.AWS_REGION = Input.region;
|
||||
const CF = new AWS.CloudFormation();
|
||||
const ecs = new AWS.ECS();
|
||||
const cwl = new AWS.CloudWatchLogs();
|
||||
await AwsCliCommands.awsListStacks(async (element) => {
|
||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(element.CreationTime))) {
|
||||
if (element.StackName === 'game-ci' || element.TemplateDescription === 'Game-CI base stack') {
|
||||
CloudRunnerLogger.log(`Skipping ${element.StackName} ignore list`);
|
||||
|
||||
return;
|
||||
}
|
||||
CloudRunnerLogger.log(`Deleting ${element.logGroupName}`);
|
||||
const deleteStackInput: AWS.CloudFormation.DeleteStackInput = { StackName: element.StackName };
|
||||
await CF.deleteStack(deleteStackInput).promise();
|
||||
}
|
||||
});
|
||||
await AwsCliCommands.awsListTasks(async (taskElement, element) => {
|
||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(taskElement.CreatedAt))) {
|
||||
CloudRunnerLogger.log(`Stopping task ${taskElement.containers?.[0].name}`);
|
||||
await ecs.stopTask({ task: taskElement.taskArn || '', cluster: element }).promise();
|
||||
}
|
||||
});
|
||||
await AwsCliCommands.awsListLogGroups(async (element) => {
|
||||
if (deleteResources && (!OneDayOlderOnly || AwsCliCommands.isOlderThan1day(new Date(element.createdAt)))) {
|
||||
CloudRunnerLogger.log(`Deleting ${element.logGroupName}`);
|
||||
await cwl.deleteLogGroup({ logGroupName: element.logGroupName || '' }).promise();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,22 +1,22 @@
|
||||
import * as SDK from 'aws-sdk';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerAWSTaskDef from './cloud-runner-aws-task-def';
|
||||
import AWSTaskRunner from './aws-task-runner';
|
||||
import { CloudRunnerProviderInterface } from '../services/cloud-runner-provider-interface';
|
||||
import BuildParameters from '../../build-parameters';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { ProviderInterface } from '../provider-interface';
|
||||
import BuildParameters from '../../../build-parameters';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import { AWSJobStack } from './aws-job-stack';
|
||||
import { AWSBaseStack } from './aws-base-stack';
|
||||
import { Input } from '../..';
|
||||
import { Input } from '../../..';
|
||||
|
||||
class AWSBuildEnvironment implements CloudRunnerProviderInterface {
|
||||
class AWSBuildEnvironment implements ProviderInterface {
|
||||
private baseStackName: string;
|
||||
|
||||
constructor(buildParameters: BuildParameters) {
|
||||
this.baseStackName = buildParameters.awsBaseStackName;
|
||||
}
|
||||
async cleanupSharedResources(
|
||||
async cleanup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
@@ -26,7 +26,7 @@ class AWSBuildEnvironment implements CloudRunnerProviderInterface {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {}
|
||||
async setupSharedResources(
|
||||
async setup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
@@ -66,20 +66,24 @@ class AWSBuildEnvironment implements CloudRunnerProviderInterface {
|
||||
);
|
||||
|
||||
let postRunTaskTimeMs;
|
||||
let output = '';
|
||||
try {
|
||||
const postSetupStacksTimeMs = Date.now();
|
||||
CloudRunnerLogger.log(`Setup job time: ${Math.floor((postSetupStacksTimeMs - startTimeMs) / 1000)}s`);
|
||||
output = await AWSTaskRunner.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
|
||||
const { output, shouldCleanup } = await AWSTaskRunner.runTask(taskDef, ECS, CF, environment, buildGuid, commands);
|
||||
postRunTaskTimeMs = Date.now();
|
||||
CloudRunnerLogger.log(`Run job time: ${Math.floor((postRunTaskTimeMs - postSetupStacksTimeMs) / 1000)}s`);
|
||||
} finally {
|
||||
await this.cleanupResources(CF, taskDef);
|
||||
if (shouldCleanup) {
|
||||
await this.cleanupResources(CF, taskDef);
|
||||
}
|
||||
const postCleanupTimeMs = Date.now();
|
||||
if (postRunTaskTimeMs !== undefined)
|
||||
CloudRunnerLogger.log(`Cleanup job time: ${Math.floor((postCleanupTimeMs - postRunTaskTimeMs) / 1000)}s`);
|
||||
|
||||
return output;
|
||||
} catch (error) {
|
||||
await this.cleanupResources(CF, taskDef);
|
||||
throw error;
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
async cleanupResources(CF: SDK.CloudFormation, taskDef: CloudRunnerAWSTaskDef) {
|
||||
@@ -1,19 +1,20 @@
|
||||
import * as k8s from '@kubernetes/client-node';
|
||||
import { BuildParameters, Output } from '../..';
|
||||
import { BuildParameters, Output } from '../../..';
|
||||
import * as core from '@actions/core';
|
||||
import { CloudRunnerProviderInterface } from '../services/cloud-runner-provider-interface';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import { ProviderInterface } from '../provider-interface';
|
||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
import KubernetesStorage from './kubernetes-storage';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||
import KubernetesTaskRunner from './kubernetes-task-runner';
|
||||
import KubernetesSecret from './kubernetes-secret';
|
||||
import waitUntil from 'async-wait-until';
|
||||
import KubernetesJobSpecFactory from './kubernetes-job-spec-factory';
|
||||
import KubernetesServiceAccount from './kubernetes-service-account';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import { CoreV1Api } from '@kubernetes/client-node';
|
||||
import DependencyOverrideService from '../../services/depdency-override-service';
|
||||
|
||||
class Kubernetes implements CloudRunnerProviderInterface {
|
||||
class Kubernetes implements ProviderInterface {
|
||||
private kubeConfig: k8s.KubeConfig;
|
||||
private kubeClient: k8s.CoreV1Api;
|
||||
private kubeClientBatch: k8s.BatchV1Api;
|
||||
@@ -38,7 +39,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
||||
this.namespace = 'default';
|
||||
this.buildParameters = buildParameters;
|
||||
}
|
||||
public async setupSharedResources(
|
||||
public async setup(
|
||||
buildGuid: string,
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
@@ -50,6 +51,9 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
||||
this.pvcName = `unity-builder-pvc-${buildGuid}`;
|
||||
this.cleanupCronJobName = `unity-builder-cronjob-${buildGuid}`;
|
||||
this.serviceAccountName = `service-account-${buildGuid}`;
|
||||
if (await DependencyOverrideService.CheckHealth()) {
|
||||
await DependencyOverrideService.TryStartDependencies();
|
||||
}
|
||||
await KubernetesStorage.createPersistentVolumeClaim(
|
||||
buildParameters,
|
||||
this.pvcName,
|
||||
@@ -73,7 +77,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<string> {
|
||||
try {
|
||||
// setup
|
||||
// Setup
|
||||
this.buildGuid = buildGuid;
|
||||
this.secretName = `build-credentials-${buildGuid}`;
|
||||
this.jobName = `unity-builder-job-${buildGuid}`;
|
||||
@@ -94,7 +98,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
||||
k8s,
|
||||
);
|
||||
|
||||
//run
|
||||
// Run
|
||||
const jobResult = await this.kubeClientBatch.createNamespacedJob(this.namespace, jobSpec);
|
||||
CloudRunnerLogger.log(`Creating build job ${JSON.stringify(jobResult.body.metadata, undefined, 4)}`);
|
||||
|
||||
@@ -115,7 +119,6 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
||||
this.podName,
|
||||
'main',
|
||||
this.namespace,
|
||||
CloudRunnerLogger.log,
|
||||
);
|
||||
break;
|
||||
} catch (error: any) {
|
||||
@@ -127,6 +130,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
||||
}
|
||||
}
|
||||
await this.cleanupTaskResources();
|
||||
|
||||
return output;
|
||||
} catch (error) {
|
||||
CloudRunnerLogger.log('Running job failed');
|
||||
@@ -159,6 +163,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
||||
async () => {
|
||||
const jobBody = (await this.kubeClientBatch.readNamespacedJob(this.jobName, this.namespace)).body;
|
||||
const podBody = (await this.kubeClient.readNamespacedPod(this.podName, this.namespace)).body;
|
||||
|
||||
return (jobBody === null || jobBody.status?.active === 0) && podBody === null;
|
||||
},
|
||||
{
|
||||
@@ -170,7 +175,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
||||
} catch {}
|
||||
}
|
||||
|
||||
async cleanupSharedResources(
|
||||
async cleanup(
|
||||
buildGuid: string,
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
@@ -191,6 +196,7 @@ class Kubernetes implements CloudRunnerProviderInterface {
|
||||
if (pod === undefined) {
|
||||
throw new Error("pod with job-name label doesn't exist");
|
||||
}
|
||||
|
||||
return pod;
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,9 @@
|
||||
import { V1EnvVar, V1EnvVarSource, V1SecretKeySelector } from '@kubernetes/client-node';
|
||||
import BuildParameters from '../../build-parameters';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import BuildParameters from '../../../build-parameters';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../../services/cloud-runner-build-command-process';
|
||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
import CloudRunner from '../../cloud-runner';
|
||||
|
||||
class KubernetesJobSpecFactory {
|
||||
static getJobSpec(
|
||||
@@ -103,13 +103,13 @@ class KubernetesJobSpecFactory {
|
||||
name: 'main',
|
||||
image,
|
||||
command: ['/bin/sh'],
|
||||
args: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(command, CloudRunnerState.buildParams)],
|
||||
args: ['-c', CloudRunnerBuildCommandProcessor.ProcessCommands(command, CloudRunner.buildParameters)],
|
||||
|
||||
workingDir: `${workingDirectory}`,
|
||||
resources: {
|
||||
requests: {
|
||||
memory: buildParameters.cloudRunnerMemory,
|
||||
cpu: buildParameters.cloudRunnerCpu,
|
||||
memory: buildParameters.cloudRunnerMemory || '750M',
|
||||
cpu: buildParameters.cloudRunnerCpu || '1',
|
||||
},
|
||||
},
|
||||
env: [
|
||||
@@ -117,6 +117,7 @@ class KubernetesJobSpecFactory {
|
||||
const environmentVariable = new V1EnvVar();
|
||||
environmentVariable.name = x.name;
|
||||
environmentVariable.value = x.value;
|
||||
|
||||
return environmentVariable;
|
||||
}),
|
||||
...secrets.map((x) => {
|
||||
@@ -127,6 +128,7 @@ class KubernetesJobSpecFactory {
|
||||
const environmentVariable = new V1EnvVar();
|
||||
environmentVariable.name = x.EnvironmentVariable;
|
||||
environmentVariable.valueFrom = secret;
|
||||
|
||||
return environmentVariable;
|
||||
}),
|
||||
],
|
||||
@@ -155,6 +157,7 @@ class KubernetesJobSpecFactory {
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return job;
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { CoreV1Api } from '@kubernetes/client-node';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
import * as k8s from '@kubernetes/client-node';
|
||||
const base64 = require('base-64');
|
||||
|
||||
@@ -21,6 +21,7 @@ class KubernetesSecret {
|
||||
for (const buildSecret of secrets) {
|
||||
secret.data[buildSecret.ParameterKey] = base64.encode(buildSecret.ParameterValue);
|
||||
}
|
||||
|
||||
return kubeClient.createNamespacedSecret(namespace, secret);
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,7 @@ class KubernetesServiceAccount {
|
||||
name: serviceAccountName,
|
||||
};
|
||||
serviceAccount.automountServiceAccountToken = false;
|
||||
|
||||
return kubeClient.createNamespacedServiceAccount(namespace, serviceAccount);
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,10 @@
|
||||
import waitUntil from 'async-wait-until';
|
||||
import * as core from '@actions/core';
|
||||
import * as k8s from '@kubernetes/client-node';
|
||||
import BuildParameters from '../../build-parameters';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import BuildParameters from '../../../build-parameters';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import YAML from 'yaml';
|
||||
import { IncomingMessage } from 'http';
|
||||
|
||||
class KubernetesStorage {
|
||||
public static async createPersistentVolumeClaim(
|
||||
@@ -15,6 +16,7 @@ class KubernetesStorage {
|
||||
if (buildParameters.kubeVolume) {
|
||||
CloudRunnerLogger.log(buildParameters.kubeVolume);
|
||||
pvcName = buildParameters.kubeVolume;
|
||||
|
||||
return;
|
||||
}
|
||||
const pvcList = (await kubeClient.listNamespacedPersistentVolumeClaim(namespace)).body.items.map(
|
||||
@@ -24,7 +26,10 @@ class KubernetesStorage {
|
||||
CloudRunnerLogger.log(JSON.stringify(pvcList, undefined, 4));
|
||||
if (pvcList.includes(pvcName)) {
|
||||
CloudRunnerLogger.log(`pvc ${pvcName} already exists`);
|
||||
core.setOutput('volume', pvcName);
|
||||
if (!buildParameters.isCliMode) {
|
||||
core.setOutput('volume', pvcName);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
CloudRunnerLogger.log(`Creating PVC ${pvcName} (does not exist)`);
|
||||
@@ -48,10 +53,10 @@ class KubernetesStorage {
|
||||
CloudRunnerLogger.log(`${await this.getPVCPhase(kubeClient, name, namespace)}`);
|
||||
await waitUntil(
|
||||
async () => {
|
||||
return (await this.getPVCPhase(kubeClient, name, namespace)) !== 'Pending';
|
||||
return (await this.getPVCPhase(kubeClient, name, namespace)) === 'Pending';
|
||||
},
|
||||
{
|
||||
timeout: 500000,
|
||||
timeout: 750000,
|
||||
intervalBetweenAttempts: 15000,
|
||||
},
|
||||
);
|
||||
@@ -83,7 +88,7 @@ class KubernetesStorage {
|
||||
};
|
||||
pvc.spec = {
|
||||
accessModes: ['ReadWriteOnce'],
|
||||
storageClassName: process.env.K8s_STORAGE_CLASS || 'standard',
|
||||
storageClassName: buildParameters.kubeStorageClass === '' ? 'standard' : buildParameters.kubeStorageClass,
|
||||
resources: {
|
||||
requests: {
|
||||
storage: buildParameters.kubeVolumeSize,
|
||||
@@ -94,11 +99,12 @@ class KubernetesStorage {
|
||||
YAML.parse(process.env.K8s_STORAGE_PVC_SPEC);
|
||||
}
|
||||
const result = await kubeClient.createNamespacedPersistentVolumeClaim(namespace, pvc);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static async handleResult(
|
||||
result: { response: import('http').IncomingMessage; body: k8s.V1PersistentVolumeClaim },
|
||||
result: { response: IncomingMessage; body: k8s.V1PersistentVolumeClaim },
|
||||
kubeClient: k8s.CoreV1Api,
|
||||
namespace: string,
|
||||
pvcName: string,
|
||||
@@ -1,10 +1,10 @@
|
||||
import { CoreV1Api, KubeConfig, Log } from '@kubernetes/client-node';
|
||||
import { Writable } from 'stream';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import * as core from '@actions/core';
|
||||
import { CloudRunnerStatics } from '../cloud-runner-statics';
|
||||
import { CloudRunnerStatics } from '../../cloud-runner-statics';
|
||||
import waitUntil from 'async-wait-until';
|
||||
import { Input } from '../..';
|
||||
import { FollowLogStreamService } from '../../services/follow-log-stream-service';
|
||||
|
||||
class KubernetesTaskRunner {
|
||||
static async runTask(
|
||||
@@ -14,20 +14,23 @@ class KubernetesTaskRunner {
|
||||
podName: string,
|
||||
containerName: string,
|
||||
namespace: string,
|
||||
logCallback: any,
|
||||
) {
|
||||
CloudRunnerLogger.log(`Streaming logs from pod: ${podName} container: ${containerName} namespace: ${namespace}`);
|
||||
const stream = new Writable();
|
||||
let output = '';
|
||||
let didStreamAnyLogs: boolean = false;
|
||||
let shouldReadLogs = true;
|
||||
let shouldCleanup = true;
|
||||
stream._write = (chunk, encoding, next) => {
|
||||
didStreamAnyLogs = true;
|
||||
let message = chunk.toString().trimRight(`\n`);
|
||||
message = `[${CloudRunnerStatics.logPrefix}] ${message}`;
|
||||
if (Input.cloudRunnerTests) {
|
||||
output += message;
|
||||
}
|
||||
logCallback(message);
|
||||
({ shouldReadLogs, shouldCleanup, output } = FollowLogStreamService.handleIteration(
|
||||
message,
|
||||
shouldReadLogs,
|
||||
shouldCleanup,
|
||||
output,
|
||||
));
|
||||
next();
|
||||
};
|
||||
const logOptions = {
|
||||
@@ -73,6 +76,7 @@ class KubernetesTaskRunner {
|
||||
throw error;
|
||||
}
|
||||
CloudRunnerLogger.log('end of log stream');
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
@@ -90,6 +94,7 @@ class KubernetesTaskRunner {
|
||||
}`,
|
||||
);
|
||||
if (success || phase !== 'Pending') return true;
|
||||
|
||||
return false;
|
||||
},
|
||||
{
|
||||
@@ -97,6 +102,7 @@ class KubernetesTaskRunner {
|
||||
intervalBetweenAttempts: 15000,
|
||||
},
|
||||
);
|
||||
|
||||
return success;
|
||||
}
|
||||
}
|
||||
49
src/model/cloud-runner/providers/local-docker/index.ts
Normal file
49
src/model/cloud-runner/providers/local-docker/index.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import BuildParameters from '../../../build-parameters';
|
||||
import { CloudRunnerSystem } from '../../services/cloud-runner-system';
|
||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import { ProviderInterface } from '../provider-interface';
|
||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
|
||||
class LocalDockerCloudRunner implements ProviderInterface {
|
||||
cleanup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {}
|
||||
setup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {}
|
||||
public runTask(
|
||||
commands: string,
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
image: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
mountdir: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
workingdir: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
environment: CloudRunnerEnvironmentVariable[],
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<string> {
|
||||
CloudRunnerLogger.log(buildGuid);
|
||||
CloudRunnerLogger.log(commands);
|
||||
|
||||
return CloudRunnerSystem.Run(commands, false, false);
|
||||
}
|
||||
}
|
||||
export default LocalDockerCloudRunner;
|
||||
49
src/model/cloud-runner/providers/local/index.ts
Normal file
49
src/model/cloud-runner/providers/local/index.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import BuildParameters from '../../../build-parameters';
|
||||
import { CloudRunnerSystem } from '../../services/cloud-runner-system';
|
||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import { ProviderInterface } from '../provider-interface';
|
||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
|
||||
class LocalCloudRunner implements ProviderInterface {
|
||||
cleanup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {}
|
||||
public setup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {}
|
||||
public async runTask(
|
||||
buildGuid: string,
|
||||
image: string,
|
||||
commands: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
mountdir: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
workingdir: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
environment: CloudRunnerEnvironmentVariable[],
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<string> {
|
||||
CloudRunnerLogger.log(image);
|
||||
CloudRunnerLogger.log(buildGuid);
|
||||
CloudRunnerLogger.log(commands);
|
||||
|
||||
return await CloudRunnerSystem.Run(commands);
|
||||
}
|
||||
}
|
||||
export default LocalCloudRunner;
|
||||
@@ -1,9 +1,9 @@
|
||||
import BuildParameters from '../../build-parameters';
|
||||
import CloudRunnerEnvironmentVariable from './cloud-runner-environment-variable';
|
||||
import CloudRunnerSecret from './cloud-runner-secret';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
|
||||
export interface CloudRunnerProviderInterface {
|
||||
cleanupSharedResources(
|
||||
export interface ProviderInterface {
|
||||
cleanup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
@@ -13,7 +13,7 @@ export interface CloudRunnerProviderInterface {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
);
|
||||
setupSharedResources(
|
||||
setup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
50
src/model/cloud-runner/providers/test/index.ts
Normal file
50
src/model/cloud-runner/providers/test/index.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import BuildParameters from '../../../build-parameters';
|
||||
import CloudRunnerEnvironmentVariable from '../../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerLogger from '../../services/cloud-runner-logger';
|
||||
import { ProviderInterface } from '../provider-interface';
|
||||
import CloudRunnerSecret from '../../services/cloud-runner-secret';
|
||||
|
||||
class TestCloudRunner implements ProviderInterface {
|
||||
cleanup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {}
|
||||
setup(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildGuid: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
buildParameters: BuildParameters,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
branchName: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
defaultSecretsArray: { ParameterKey: string; EnvironmentVariable: string; ParameterValue: string }[],
|
||||
) {}
|
||||
public async runTask(
|
||||
commands: string,
|
||||
buildGuid: string,
|
||||
image: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
mountdir: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
workingdir: string,
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
environment: CloudRunnerEnvironmentVariable[],
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
secrets: CloudRunnerSecret[],
|
||||
): Promise<string> {
|
||||
CloudRunnerLogger.log(image);
|
||||
CloudRunnerLogger.log(buildGuid);
|
||||
CloudRunnerLogger.log(commands);
|
||||
|
||||
return await new Promise((result) => {
|
||||
result(commands);
|
||||
});
|
||||
}
|
||||
}
|
||||
export default TestCloudRunner;
|
||||
63
src/model/cloud-runner/remote-client/caching.test.ts
Normal file
63
src/model/cloud-runner/remote-client/caching.test.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import BuildParameters from '../../build-parameters';
|
||||
import { Cli } from '../../cli/cli';
|
||||
import Input from '../../input';
|
||||
import UnityVersioning from '../../unity-versioning';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
import { CloudRunnerSystem } from '../services/cloud-runner-system';
|
||||
import { Caching } from './caching';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
describe('Cloud Runner Caching', () => {
|
||||
it('responds', () => {});
|
||||
});
|
||||
describe('Cloud Runner Caching', () => {
|
||||
if (process.platform === 'linux') {
|
||||
it('Simple caching works', async () => {
|
||||
Cli.options = {
|
||||
versioning: 'None',
|
||||
projectPath: 'test-project',
|
||||
unityVersion: UnityVersioning.read('test-project'),
|
||||
targetPlatform: 'StandaloneLinux64',
|
||||
cacheKey: `test-case-${uuidv4()}`,
|
||||
};
|
||||
Input.githubInputEnabled = false;
|
||||
const buildParameter = await BuildParameters.create();
|
||||
CloudRunner.buildParameters = buildParameter;
|
||||
|
||||
// Create test folder
|
||||
const testFolder = path.resolve(__dirname, Cli.options.cacheKey);
|
||||
fs.mkdirSync(testFolder);
|
||||
|
||||
// Create cache folder
|
||||
const cacheFolder = path.resolve(__dirname, `cache-${Cli.options.cacheKey}`);
|
||||
fs.mkdirSync(cacheFolder);
|
||||
|
||||
// Add test file to test folders
|
||||
fs.writeFileSync(path.resolve(testFolder, 'test.txt'), Cli.options.cacheKey);
|
||||
await Caching.PushToCache(cacheFolder, testFolder, `${Cli.options.cacheKey}`);
|
||||
|
||||
// Delete test folder
|
||||
fs.rmdirSync(testFolder, { recursive: true });
|
||||
await Caching.PullFromCache(
|
||||
cacheFolder.replace(/\\/g, `/`),
|
||||
testFolder.replace(/\\/g, `/`),
|
||||
`${Cli.options.cacheKey}`,
|
||||
);
|
||||
await CloudRunnerSystem.Run(`du -h ${__dirname}`);
|
||||
await CloudRunnerSystem.Run(`tree ${testFolder}`);
|
||||
await CloudRunnerSystem.Run(`tree ${cacheFolder}`);
|
||||
|
||||
// Compare validity to original hash
|
||||
expect(fs.readFileSync(path.resolve(testFolder, 'test.txt'), { encoding: 'utf8' }).toString()).toContain(
|
||||
Cli.options.cacheKey,
|
||||
);
|
||||
fs.rmdirSync(testFolder, { recursive: true });
|
||||
fs.rmdirSync(cacheFolder, { recursive: true });
|
||||
|
||||
Input.githubInputEnabled = true;
|
||||
delete Cli.options;
|
||||
}, 1000000);
|
||||
}
|
||||
});
|
||||
172
src/model/cloud-runner/remote-client/caching.ts
Normal file
172
src/model/cloud-runner/remote-client/caching.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
import { assert } from 'console';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
|
||||
import { CloudRunnerSystem } from '../services/cloud-runner-system';
|
||||
import { LfsHashing } from '../services/lfs-hashing';
|
||||
import { RemoteClientLogger } from './remote-client-logger';
|
||||
import { Cli } from '../../cli/cli';
|
||||
import { CliFunction } from '../../cli/cli-functions-repository';
|
||||
// eslint-disable-next-line github/no-then
|
||||
const fileExists = async (fpath) => !!(await fs.promises.stat(fpath).catch(() => false));
|
||||
|
||||
export class Caching {
|
||||
@CliFunction(`cache-push`, `push to cache`)
|
||||
static async cachePush() {
|
||||
try {
|
||||
const buildParameter = JSON.parse(process.env.BUILD_PARAMETERS || '{}');
|
||||
CloudRunner.buildParameters = buildParameter;
|
||||
await Caching.PushToCache(
|
||||
Cli.options['cachePushTo'],
|
||||
Cli.options['cachePushFrom'],
|
||||
Cli.options['artifactName'] || '',
|
||||
);
|
||||
} catch (error: any) {
|
||||
CloudRunnerLogger.log(`${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
@CliFunction(`cache-pull`, `pull from cache`)
|
||||
static async cachePull() {
|
||||
try {
|
||||
const buildParameter = JSON.parse(process.env.BUILD_PARAMETERS || '{}');
|
||||
CloudRunner.buildParameters = buildParameter;
|
||||
await Caching.PullFromCache(
|
||||
Cli.options['cachePushFrom'],
|
||||
Cli.options['cachePushTo'],
|
||||
Cli.options['artifactName'] || '',
|
||||
);
|
||||
} catch (error: any) {
|
||||
CloudRunnerLogger.log(`${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
public static async PushToCache(cacheFolder: string, sourceFolder: string, cacheArtifactName: string) {
|
||||
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
||||
const startPath = process.cwd();
|
||||
try {
|
||||
if (!(await fileExists(cacheFolder))) {
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${cacheFolder}`);
|
||||
}
|
||||
process.chdir(path.resolve(sourceFolder, '..'));
|
||||
|
||||
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
||||
CloudRunnerLogger.log(
|
||||
`Hashed cache folder ${await LfsHashing.hashAllFiles(sourceFolder)} ${sourceFolder} ${path.basename(
|
||||
sourceFolder,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
// eslint-disable-next-line func-style
|
||||
const formatFunction = function (format: string) {
|
||||
const arguments_ = Array.prototype.slice.call(
|
||||
[path.resolve(sourceFolder, '..'), cacheFolder, cacheArtifactName],
|
||||
1,
|
||||
);
|
||||
|
||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
||||
});
|
||||
};
|
||||
await CloudRunnerSystem.Run(`tar -cf ${cacheArtifactName}.tar ${path.basename(sourceFolder)}`);
|
||||
assert(await fileExists(`${cacheArtifactName}.tar`), 'cache archive exists');
|
||||
assert(await fileExists(path.basename(sourceFolder)), 'source folder exists');
|
||||
if (CloudRunner.buildParameters.cachePushOverrideCommand) {
|
||||
await CloudRunnerSystem.Run(formatFunction(CloudRunner.buildParameters.cachePushOverrideCommand));
|
||||
}
|
||||
await CloudRunnerSystem.Run(`mv ${cacheArtifactName}.tar ${cacheFolder}`);
|
||||
RemoteClientLogger.log(`moved cache entry ${cacheArtifactName} to ${cacheFolder}`);
|
||||
assert(
|
||||
await fileExists(`${path.join(cacheFolder, cacheArtifactName)}.tar`),
|
||||
'cache archive exists inside cache folder',
|
||||
);
|
||||
} catch (error) {
|
||||
process.chdir(`${startPath}`);
|
||||
throw error;
|
||||
}
|
||||
process.chdir(`${startPath}`);
|
||||
}
|
||||
public static async PullFromCache(cacheFolder: string, destinationFolder: string, cacheArtifactName: string = ``) {
|
||||
cacheArtifactName = cacheArtifactName.replace(' ', '');
|
||||
const startPath = process.cwd();
|
||||
RemoteClientLogger.log(`Caching for ${path.basename(destinationFolder)}`);
|
||||
try {
|
||||
if (!(await fileExists(cacheFolder))) {
|
||||
await fs.promises.mkdir(cacheFolder);
|
||||
}
|
||||
|
||||
if (!(await fileExists(destinationFolder))) {
|
||||
await fs.promises.mkdir(destinationFolder);
|
||||
}
|
||||
|
||||
const latestInBranch = await (await CloudRunnerSystem.Run(`ls -t "${cacheFolder}" | grep .tar$ | head -1`))
|
||||
.replace(/\n/g, ``)
|
||||
.replace('.tar', '');
|
||||
|
||||
process.chdir(cacheFolder);
|
||||
|
||||
const cacheSelection =
|
||||
cacheArtifactName !== `` && (await fileExists(`${cacheArtifactName}.tar`)) ? cacheArtifactName : latestInBranch;
|
||||
await CloudRunnerLogger.log(`cache key ${cacheArtifactName} selection ${cacheSelection}`);
|
||||
|
||||
// eslint-disable-next-line func-style
|
||||
const formatFunction = function (format: string) {
|
||||
const arguments_ = Array.prototype.slice.call(
|
||||
[path.resolve(destinationFolder, '..'), cacheFolder, cacheArtifactName],
|
||||
1,
|
||||
);
|
||||
|
||||
return format.replace(/{(\d+)}/g, function (match, number) {
|
||||
return typeof arguments_[number] != 'undefined' ? arguments_[number] : match;
|
||||
});
|
||||
};
|
||||
|
||||
if (CloudRunner.buildParameters.cachePullOverrideCommand) {
|
||||
await CloudRunnerSystem.Run(formatFunction(CloudRunner.buildParameters.cachePullOverrideCommand));
|
||||
}
|
||||
|
||||
if (await fileExists(`${cacheSelection}.tar`)) {
|
||||
const resultsFolder = `results${CloudRunner.buildParameters.buildGuid}`;
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${resultsFolder}`);
|
||||
RemoteClientLogger.log(`cache item exists ${cacheFolder}/${cacheSelection}.tar`);
|
||||
const fullResultsFolder = path.join(cacheFolder, resultsFolder);
|
||||
await CloudRunnerSystem.Run(`tar -xf ${cacheSelection}.tar -C ${fullResultsFolder}`);
|
||||
RemoteClientLogger.log(`cache item extracted to ${fullResultsFolder}`);
|
||||
assert(await fileExists(fullResultsFolder), `cache extraction results folder exists`);
|
||||
const destinationParentFolder = path.resolve(destinationFolder, '..');
|
||||
|
||||
if (await fileExists(destinationFolder)) {
|
||||
await fs.promises.rmdir(destinationFolder, { recursive: true });
|
||||
}
|
||||
await CloudRunnerSystem.Run(
|
||||
`mv "${path.join(fullResultsFolder, path.basename(destinationFolder))}" "${destinationParentFolder}"`,
|
||||
);
|
||||
const contents = await fs.promises.readdir(
|
||||
path.join(destinationParentFolder, path.basename(destinationFolder)),
|
||||
);
|
||||
CloudRunnerLogger.log(
|
||||
`There is ${contents.length} files/dir in the cache pulled contents for ${path.basename(destinationFolder)}`,
|
||||
);
|
||||
} else {
|
||||
RemoteClientLogger.logWarning(`cache item ${cacheArtifactName} doesn't exist ${destinationFolder}`);
|
||||
if (cacheSelection !== ``) {
|
||||
RemoteClientLogger.logWarning(`cache item ${cacheArtifactName}.tar doesn't exist ${destinationFolder}`);
|
||||
throw new Error(`Failed to get cache item, but cache hit was found: ${cacheSelection}`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
process.chdir(`${startPath}`);
|
||||
throw error;
|
||||
}
|
||||
process.chdir(`${startPath}`);
|
||||
}
|
||||
|
||||
public static async handleCachePurging() {
|
||||
if (process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined) {
|
||||
RemoteClientLogger.log(`purging ${CloudRunnerFolders.purgeRemoteCaching}`);
|
||||
fs.promises.rmdir(CloudRunnerFolders.cacheFolder, { recursive: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
97
src/model/cloud-runner/remote-client/index.ts
Normal file
97
src/model/cloud-runner/remote-client/index.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import fs from 'fs';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
|
||||
import { Caching } from './caching';
|
||||
import { LfsHashing } from '../services/lfs-hashing';
|
||||
import { RemoteClientLogger } from './remote-client-logger';
|
||||
import path from 'path';
|
||||
import { assert } from 'console';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { CliFunction } from '../../cli/cli-functions-repository';
|
||||
import { CloudRunnerSystem } from '../services/cloud-runner-system';
|
||||
|
||||
export class RemoteClient {
|
||||
public static async bootstrapRepository() {
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute}`);
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.repoPathAbsolute}`);
|
||||
await CloudRunnerSystem.Run(`mkdir -p ${CloudRunnerFolders.cacheFolderFull}`);
|
||||
process.chdir(CloudRunnerFolders.repoPathAbsolute);
|
||||
await RemoteClient.cloneRepoWithoutLFSFiles();
|
||||
await RemoteClient.sizeOfFolder('repo before lfs cache pull', CloudRunnerFolders.repoPathAbsolute);
|
||||
const lfsHashes = await LfsHashing.createLFSHashFiles();
|
||||
if (fs.existsSync(CloudRunnerFolders.libraryFolderAbsolute)) {
|
||||
RemoteClientLogger.logWarning(`!Warning!: The Unity library was included in the git repository`);
|
||||
}
|
||||
await Caching.PullFromCache(
|
||||
CloudRunnerFolders.lfsCacheFolderFull,
|
||||
CloudRunnerFolders.lfsFolderAbsolute,
|
||||
`${lfsHashes.lfsGuidSum}`,
|
||||
);
|
||||
await RemoteClient.sizeOfFolder('repo after lfs cache pull', CloudRunnerFolders.repoPathAbsolute);
|
||||
await RemoteClient.pullLatestLFS();
|
||||
await RemoteClient.sizeOfFolder('repo before lfs git pull', CloudRunnerFolders.repoPathAbsolute);
|
||||
await Caching.PushToCache(
|
||||
CloudRunnerFolders.lfsCacheFolderFull,
|
||||
CloudRunnerFolders.lfsFolderAbsolute,
|
||||
`${lfsHashes.lfsGuidSum}`,
|
||||
);
|
||||
await Caching.PullFromCache(CloudRunnerFolders.libraryCacheFolderFull, CloudRunnerFolders.libraryFolderAbsolute);
|
||||
await RemoteClient.sizeOfFolder('repo after library cache pull', CloudRunnerFolders.repoPathAbsolute);
|
||||
await Caching.handleCachePurging();
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async sizeOfFolder(message: string, folder: string) {
|
||||
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
||||
CloudRunnerLogger.log(`Size of ${message}`);
|
||||
await CloudRunnerSystem.Run(`du -sh ${folder}`);
|
||||
}
|
||||
}
|
||||
|
||||
private static async cloneRepoWithoutLFSFiles() {
|
||||
try {
|
||||
process.chdir(`${CloudRunnerFolders.repoPathAbsolute}`);
|
||||
RemoteClientLogger.log(`Initializing source repository for cloning with caching of LFS files`);
|
||||
await CloudRunnerSystem.Run(`git config --global advice.detachedHead false`);
|
||||
RemoteClientLogger.log(`Cloning the repository being built:`);
|
||||
await CloudRunnerSystem.Run(`git config --global filter.lfs.smudge "git-lfs smudge --skip -- %f"`);
|
||||
await CloudRunnerSystem.Run(`git config --global filter.lfs.process "git-lfs filter-process --skip"`);
|
||||
await CloudRunnerSystem.Run(
|
||||
`git clone -q ${CloudRunnerFolders.targetBuildRepoUrl} ${path.resolve(
|
||||
`..`,
|
||||
path.basename(CloudRunnerFolders.repoPathAbsolute),
|
||||
)}`,
|
||||
);
|
||||
await CloudRunnerSystem.Run(`git lfs install`);
|
||||
assert(fs.existsSync(`.git`), 'git folder exists');
|
||||
RemoteClientLogger.log(`${CloudRunner.buildParameters.branch}`);
|
||||
await CloudRunnerSystem.Run(`git checkout ${CloudRunner.buildParameters.branch}`);
|
||||
assert(fs.existsSync(path.join(`.git`, `lfs`)), 'LFS folder should not exist before caching');
|
||||
RemoteClientLogger.log(`Checked out ${process.env.GITHUB_SHA}`);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static async pullLatestLFS() {
|
||||
process.chdir(CloudRunnerFolders.repoPathAbsolute);
|
||||
await CloudRunnerSystem.Run(`git config --global filter.lfs.smudge "git-lfs smudge -- %f"`);
|
||||
await CloudRunnerSystem.Run(`git config --global filter.lfs.process "git-lfs filter-process"`);
|
||||
await CloudRunnerSystem.Run(`git lfs pull`);
|
||||
RemoteClientLogger.log(`pulled latest LFS files`);
|
||||
assert(fs.existsSync(CloudRunnerFolders.lfsFolderAbsolute));
|
||||
}
|
||||
|
||||
@CliFunction(`remote-cli`, `sets up a repository, usually before a game-ci build`)
|
||||
static async runRemoteClientJob() {
|
||||
const buildParameter = JSON.parse(process.env.BUILD_PARAMETERS || '{}');
|
||||
RemoteClientLogger.log(`Build Params:
|
||||
${JSON.stringify(buildParameter, undefined, 4)}
|
||||
`);
|
||||
CloudRunner.buildParameters = buildParameter;
|
||||
await RemoteClient.bootstrapRepository();
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import CloudRunnerLogger from '../../../cloud-runner/services/cloud-runner-logger';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
|
||||
export class RemoteClientLogger {
|
||||
public static log(message: string) {
|
||||
@@ -1,25 +1,27 @@
|
||||
import { BuildParameters, Input } from '../..';
|
||||
import { BuildParameters } from '../..';
|
||||
import YAML from 'yaml';
|
||||
import CloudRunnerSecret from './cloud-runner-secret';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
|
||||
export class CloudRunnerBuildCommandProcessor {
|
||||
public static ProcessCommands(commands: string, buildParameters: BuildParameters): string {
|
||||
const hooks = CloudRunnerBuildCommandProcessor.getHooks().filter((x) => x.step.includes(`all`));
|
||||
const hooks = CloudRunnerBuildCommandProcessor.getHooks(buildParameters.customJobHooks).filter((x) =>
|
||||
x.step.includes(`all`),
|
||||
);
|
||||
|
||||
return `echo "---"
|
||||
echo "start cloud runner init"
|
||||
${Input.cloudRunnerTests ? '' : '#'} printenv
|
||||
echo "start cloud runner job"
|
||||
${CloudRunner.buildParameters.cloudRunnerIntegrationTests ? '' : '#'} printenv
|
||||
echo "start of cloud runner job"
|
||||
${hooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
${commands}
|
||||
${hooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||
echo "end of cloud runner job
|
||||
---${buildParameters.logId}"
|
||||
`;
|
||||
echo "end of cloud runner job"
|
||||
echo "---${buildParameters.logId}"`;
|
||||
}
|
||||
|
||||
public static getHooks(): Hook[] {
|
||||
const experimentHooks = process.env.EXPERIMENTAL_HOOKS;
|
||||
public static getHooks(customJobHooks): Hook[] {
|
||||
const experimentHooks = customJobHooks;
|
||||
let output = new Array<Hook>();
|
||||
if (experimentHooks && experimentHooks !== '') {
|
||||
try {
|
||||
@@ -28,6 +30,7 @@ export class CloudRunnerBuildCommandProcessor {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return output.filter((x) => x.step !== undefined && x.hook !== undefined && x.hook.length > 0);
|
||||
}
|
||||
}
|
||||
|
||||
73
src/model/cloud-runner/services/cloud-runner-folders.ts
Normal file
73
src/model/cloud-runner/services/cloud-runner-folders.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import path from 'path';
|
||||
import { CloudRunner } from '../..';
|
||||
|
||||
export class CloudRunnerFolders {
|
||||
public static readonly repositoryFolder = 'repo';
|
||||
|
||||
// Only the following paths that do not start a path.join with another "Full" suffixed property need to start with an absolute /
|
||||
|
||||
public static get uniqueCloudRunnerJobFolderAbsolute(): string {
|
||||
return path.join(`/`, CloudRunnerFolders.buildVolumeFolder, CloudRunner.buildParameters.buildGuid);
|
||||
}
|
||||
|
||||
public static get cacheFolderFull(): string {
|
||||
return path.join(
|
||||
'/',
|
||||
CloudRunnerFolders.buildVolumeFolder,
|
||||
CloudRunnerFolders.cacheFolder,
|
||||
CloudRunner.buildParameters.cacheKey,
|
||||
);
|
||||
}
|
||||
|
||||
public static get builderPathAbsolute(): string {
|
||||
return path.join(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute, `builder`);
|
||||
}
|
||||
|
||||
public static get repoPathAbsolute(): string {
|
||||
return path.join(CloudRunnerFolders.uniqueCloudRunnerJobFolderAbsolute, CloudRunnerFolders.repositoryFolder);
|
||||
}
|
||||
|
||||
public static get projectPathAbsolute(): string {
|
||||
return path.join(CloudRunnerFolders.repoPathAbsolute, CloudRunner.buildParameters.projectPath);
|
||||
}
|
||||
|
||||
public static get libraryFolderAbsolute(): string {
|
||||
return path.join(CloudRunnerFolders.projectPathAbsolute, `Library`);
|
||||
}
|
||||
|
||||
public static get projectBuildFolderAbsolute(): string {
|
||||
return path.join(CloudRunnerFolders.repoPathAbsolute, CloudRunner.buildParameters.buildPath);
|
||||
}
|
||||
|
||||
public static get lfsFolderAbsolute(): string {
|
||||
return path.join(CloudRunnerFolders.repoPathAbsolute, `.git`, `lfs`);
|
||||
}
|
||||
|
||||
public static get purgeRemoteCaching(): boolean {
|
||||
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
||||
}
|
||||
|
||||
public static get lfsCacheFolderFull() {
|
||||
return path.join(CloudRunnerFolders.cacheFolderFull, `lfs`);
|
||||
}
|
||||
|
||||
public static get libraryCacheFolderFull() {
|
||||
return path.join(CloudRunnerFolders.cacheFolderFull, `Library`);
|
||||
}
|
||||
|
||||
public static get unityBuilderRepoUrl(): string {
|
||||
return `https://${CloudRunner.buildParameters.gitPrivateToken}@github.com/game-ci/unity-builder.git`;
|
||||
}
|
||||
|
||||
public static get targetBuildRepoUrl(): string {
|
||||
return `https://${CloudRunner.buildParameters.gitPrivateToken}@github.com/${CloudRunner.buildParameters.githubRepo}.git`;
|
||||
}
|
||||
|
||||
public static get buildVolumeFolder() {
|
||||
return 'data';
|
||||
}
|
||||
|
||||
public static get cacheFolder() {
|
||||
return 'cache';
|
||||
}
|
||||
}
|
||||
@@ -2,8 +2,9 @@ import { customAlphabet } from 'nanoid';
|
||||
import CloudRunnerConstants from './cloud-runner-constants';
|
||||
|
||||
class CloudRunnerNamespace {
|
||||
static generateBuildName(runNumber: string | number, platform: string) {
|
||||
static generateGuid(runNumber: string | number, platform: string) {
|
||||
const nanoid = customAlphabet(CloudRunnerConstants.alphabet, 4);
|
||||
|
||||
return `${runNumber}-${platform.toLowerCase().replace('standalone', '')}-${nanoid()}`;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,62 @@
|
||||
import Input from '../../input';
|
||||
import { GenericInputReader } from '../../input-readers/generic-input-reader';
|
||||
|
||||
const formatFunction = (value, arguments_) => {
|
||||
for (const element of arguments_) {
|
||||
value = value.replace(`{${element.key}}`, element.value);
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
class CloudRunnerQueryOverride {
|
||||
static queryOverrides: any;
|
||||
|
||||
public static query(key, alternativeKey) {
|
||||
if (CloudRunnerQueryOverride.queryOverrides && CloudRunnerQueryOverride.queryOverrides[key] !== undefined) {
|
||||
return CloudRunnerQueryOverride.queryOverrides[key];
|
||||
}
|
||||
if (
|
||||
CloudRunnerQueryOverride.queryOverrides &&
|
||||
alternativeKey &&
|
||||
CloudRunnerQueryOverride.queryOverrides[alternativeKey] !== undefined
|
||||
) {
|
||||
return CloudRunnerQueryOverride.queryOverrides[alternativeKey];
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
private static shouldUseOverride(query) {
|
||||
if (Input.readInputOverrideCommand() !== '') {
|
||||
if (Input.readInputFromOverrideList() !== '') {
|
||||
const doesInclude =
|
||||
Input.readInputFromOverrideList().split(',').includes(query) ||
|
||||
Input.readInputFromOverrideList().split(',').includes(Input.ToEnvVarFormat(query));
|
||||
|
||||
return doesInclude ? true : false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static async queryOverride(query) {
|
||||
if (!this.shouldUseOverride(query)) {
|
||||
throw new Error(`Should not be trying to run override query on ${query}`);
|
||||
}
|
||||
|
||||
return await GenericInputReader.Run(formatFunction(Input.readInputOverrideCommand(), [{ key: 0, value: query }]));
|
||||
}
|
||||
|
||||
public static async PopulateQueryOverrideInput() {
|
||||
const queries = Input.readInputFromOverrideList().split(',');
|
||||
CloudRunnerQueryOverride.queryOverrides = new Array();
|
||||
for (const element of queries) {
|
||||
if (CloudRunnerQueryOverride.shouldUseOverride(element)) {
|
||||
CloudRunnerQueryOverride.queryOverrides[element] = await CloudRunnerQueryOverride.queryOverride(element);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
export default CloudRunnerQueryOverride;
|
||||
46
src/model/cloud-runner/services/cloud-runner-system.ts
Normal file
46
src/model/cloud-runner/services/cloud-runner-system.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { exec } from 'child_process';
|
||||
import { RemoteClientLogger } from '../remote-client/remote-client-logger';
|
||||
|
||||
export class CloudRunnerSystem {
|
||||
public static async Run(command: string, suppressError = false, suppressLogs = false) {
|
||||
for (const element of command.split(`\n`)) {
|
||||
if (!suppressLogs) {
|
||||
RemoteClientLogger.log(element);
|
||||
}
|
||||
}
|
||||
|
||||
return await new Promise<string>((promise, throwError) => {
|
||||
let output = '';
|
||||
const child = exec(command, (error, stdout, stderr) => {
|
||||
if (!suppressError && error) {
|
||||
RemoteClientLogger.log(error.toString());
|
||||
throwError(error);
|
||||
}
|
||||
if (stderr) {
|
||||
const diagnosticOutput = `${stderr.toString()}`;
|
||||
if (!suppressLogs) {
|
||||
RemoteClientLogger.logCliDiagnostic(diagnosticOutput);
|
||||
}
|
||||
output += diagnosticOutput;
|
||||
}
|
||||
const outputChunk = `${stdout}`;
|
||||
output += outputChunk;
|
||||
});
|
||||
child.on('close', (code) => {
|
||||
if (!suppressLogs) {
|
||||
RemoteClientLogger.log(`[${code}]`);
|
||||
}
|
||||
if (code !== 0 && !suppressError) {
|
||||
throwError(output);
|
||||
}
|
||||
const outputLines = output.split(`\n`);
|
||||
for (const element of outputLines) {
|
||||
if (!suppressLogs) {
|
||||
RemoteClientLogger.log(element);
|
||||
}
|
||||
}
|
||||
promise(output);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
22
src/model/cloud-runner/services/depdency-override-service.ts
Normal file
22
src/model/cloud-runner/services/depdency-override-service.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import Input from '../../input';
|
||||
import { CloudRunnerSystem } from './cloud-runner-system';
|
||||
|
||||
class DependencyOverrideService {
|
||||
public static async CheckHealth() {
|
||||
if (Input.checkDependencyHealthOverride) {
|
||||
try {
|
||||
await CloudRunnerSystem.Run(Input.checkDependencyHealthOverride);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
public static async TryStartDependencies() {
|
||||
if (Input.startDependenciesOverride) {
|
||||
await CloudRunnerSystem.Run(Input.startDependenciesOverride);
|
||||
}
|
||||
}
|
||||
}
|
||||
export default DependencyOverrideService;
|
||||
34
src/model/cloud-runner/services/follow-log-stream-service.ts
Normal file
34
src/model/cloud-runner/services/follow-log-stream-service.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import CloudRunnerLogger from './cloud-runner-logger';
|
||||
import * as core from '@actions/core';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
import { CloudRunnerStatics } from '../cloud-runner-statics';
|
||||
|
||||
export class FollowLogStreamService {
|
||||
public static handleIteration(message, shouldReadLogs, shouldCleanup, output) {
|
||||
if (message.includes(`---${CloudRunner.buildParameters.logId}`)) {
|
||||
CloudRunnerLogger.log('End of log transmission received');
|
||||
shouldReadLogs = false;
|
||||
} else if (message.includes('Rebuilding Library because the asset database could not be found!')) {
|
||||
core.warning('LIBRARY NOT FOUND!');
|
||||
core.setOutput('library-found', 'false');
|
||||
} else if (message.includes('Build succeeded')) {
|
||||
core.setOutput('build-result', 'success');
|
||||
} else if (message.includes('Build fail')) {
|
||||
core.setOutput('build-result', 'failed');
|
||||
core.setFailed('unity build failed');
|
||||
core.error('BUILD FAILED!');
|
||||
} else if (CloudRunner.buildParameters.cloudRunnerIntegrationTests && message.includes(': Listening for Jobs')) {
|
||||
core.setOutput('cloud runner stop watching', 'true');
|
||||
shouldReadLogs = false;
|
||||
shouldCleanup = false;
|
||||
core.warning('cloud runner stop watching');
|
||||
}
|
||||
message = `[${CloudRunnerStatics.logPrefix}] ${message}`;
|
||||
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
||||
output += message;
|
||||
}
|
||||
CloudRunnerLogger.log(message);
|
||||
|
||||
return { shouldReadLogs, shouldCleanup, output };
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
import path from 'path';
|
||||
import { CloudRunnerState } from '../../../cloud-runner/state/cloud-runner-state';
|
||||
import { CloudRunnerFolders } from './cloud-runner-folders';
|
||||
import { CloudRunnerSystem } from './cloud-runner-system';
|
||||
import fs from 'fs';
|
||||
import { assert } from 'console';
|
||||
import { Input } from '../../..';
|
||||
import { RemoteClientLogger } from './remote-client-logger';
|
||||
import { Cli } from '../../cli/cli';
|
||||
import { CliFunction } from '../../cli/cli-functions-repository';
|
||||
|
||||
export class LFSHashing {
|
||||
export class LfsHashing {
|
||||
public static async createLFSHashFiles() {
|
||||
try {
|
||||
await CloudRunnerSystem.Run(`git lfs ls-files -l | cut -d ' ' -f1 | sort > .lfs-assets-guid`);
|
||||
@@ -15,16 +15,14 @@ export class LFSHashing {
|
||||
assert(fs.existsSync(`.lfs-assets-guid`));
|
||||
const lfsHashes = {
|
||||
lfsGuid: fs
|
||||
.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid`)}`, 'utf8')
|
||||
.readFileSync(`${path.join(CloudRunnerFolders.repoPathAbsolute, `.lfs-assets-guid`)}`, 'utf8')
|
||||
.replace(/\n/g, ``),
|
||||
lfsGuidSum: fs
|
||||
.readFileSync(`${path.join(CloudRunnerState.repoPathFull, `.lfs-assets-guid-sum`)}`, 'utf8')
|
||||
.readFileSync(`${path.join(CloudRunnerFolders.repoPathAbsolute, `.lfs-assets-guid-sum`)}`, 'utf8')
|
||||
.replace(' .lfs-assets-guid', '')
|
||||
.replace(/\n/g, ``),
|
||||
};
|
||||
if (Input.cloudRunnerTests) {
|
||||
RemoteClientLogger.log(lfsHashes.lfsGuid);
|
||||
RemoteClientLogger.log(lfsHashes.lfsGuidSum);
|
||||
}
|
||||
|
||||
return lfsHashes;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
@@ -37,6 +35,13 @@ export class LFSHashing {
|
||||
.replace(/\n/g, '')
|
||||
.split(` `)[0];
|
||||
process.chdir(startPath);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@CliFunction(`hash`, `hash all folder contents`)
|
||||
static async hash() {
|
||||
const folder = Cli.options['cachePushFrom'];
|
||||
LfsHashing.hashAllFiles(folder);
|
||||
}
|
||||
}
|
||||
@@ -1,24 +1,24 @@
|
||||
import { Input } from '../..';
|
||||
import { CloudRunner, Input } from '../..';
|
||||
import ImageEnvironmentFactory from '../../image-environment-factory';
|
||||
import CloudRunnerEnvironmentVariable from './cloud-runner-environment-variable';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerBuildCommandProcessor } from './cloud-runner-build-command-process';
|
||||
import CloudRunnerSecret from './cloud-runner-secret';
|
||||
import CloudRunnerQueryOverride from './cloud-runner-query-override';
|
||||
|
||||
export class TaskParameterSerializer {
|
||||
public static readBuildEnvironmentVariables(): CloudRunnerEnvironmentVariable[] {
|
||||
TaskParameterSerializer.setupDefaultSecrets();
|
||||
return [
|
||||
{
|
||||
name: 'ContainerMemory',
|
||||
value: CloudRunnerState.buildParams.cloudRunnerMemory,
|
||||
value: CloudRunner.buildParameters.cloudRunnerMemory,
|
||||
},
|
||||
{
|
||||
name: 'ContainerCpu',
|
||||
value: CloudRunnerState.buildParams.cloudRunnerCpu,
|
||||
value: CloudRunner.buildParameters.cloudRunnerCpu,
|
||||
},
|
||||
{
|
||||
name: 'BUILD_TARGET',
|
||||
value: CloudRunnerState.buildParams.targetPlatform,
|
||||
value: CloudRunner.buildParameters.targetPlatform,
|
||||
},
|
||||
...TaskParameterSerializer.serializeBuildParamsAndInput,
|
||||
];
|
||||
@@ -27,7 +27,7 @@ export class TaskParameterSerializer {
|
||||
let array = new Array();
|
||||
array = TaskParameterSerializer.readBuildParameters(array);
|
||||
array = TaskParameterSerializer.readInput(array);
|
||||
const configurableHooks = CloudRunnerBuildCommandProcessor.getHooks();
|
||||
const configurableHooks = CloudRunnerBuildCommandProcessor.getHooks(CloudRunner.buildParameters.customJobHooks);
|
||||
const secrets = configurableHooks.map((x) => x.secrets).filter((x) => x !== undefined && x.length > 0);
|
||||
if (secrets.length > 0) {
|
||||
// eslint-disable-next-line unicorn/no-array-reduce
|
||||
@@ -40,20 +40,23 @@ export class TaskParameterSerializer {
|
||||
array = array.map((x) => {
|
||||
x.name = Input.ToEnvVarFormat(x.name);
|
||||
x.value = `${x.value}`;
|
||||
|
||||
return x;
|
||||
});
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
private static readBuildParameters(array: any[]) {
|
||||
const keys = Object.keys(CloudRunnerState.buildParams);
|
||||
const keys = Object.keys(CloudRunner.buildParameters);
|
||||
for (const element of keys) {
|
||||
array.push({
|
||||
name: element,
|
||||
value: CloudRunnerState.buildParams[element],
|
||||
value: CloudRunner.buildParameters[element],
|
||||
});
|
||||
}
|
||||
array.push({ name: 'buildParameters', value: JSON.stringify(CloudRunnerState.buildParams) });
|
||||
array.push({ name: 'buildParameters', value: JSON.stringify(CloudRunner.buildParameters) });
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
@@ -67,19 +70,46 @@ export class TaskParameterSerializer {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
|
||||
private static setupDefaultSecrets() {
|
||||
if (CloudRunnerState.defaultSecrets === undefined)
|
||||
CloudRunnerState.defaultSecrets = ImageEnvironmentFactory.getEnvironmentVariables(
|
||||
CloudRunnerState.buildParams,
|
||||
).map((x) => {
|
||||
return {
|
||||
ParameterKey: x.name,
|
||||
EnvironmentVariable: x.name,
|
||||
ParameterValue: x.value,
|
||||
};
|
||||
public static readDefaultSecrets(): CloudRunnerSecret[] {
|
||||
let array = new Array();
|
||||
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_SERIAL');
|
||||
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_EMAIL');
|
||||
array = TaskParameterSerializer.tryAddInput(array, 'UNITY_PASSWORD');
|
||||
array.push(
|
||||
...ImageEnvironmentFactory.getEnvironmentVariables(CloudRunner.buildParameters)
|
||||
.filter((x) => array.every((y) => y.ParameterKey !== x.name))
|
||||
.map((x) => {
|
||||
return {
|
||||
ParameterKey: x.name,
|
||||
EnvironmentVariable: x.name,
|
||||
ParameterValue: x.value,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
return array;
|
||||
}
|
||||
private static getValue(key) {
|
||||
return CloudRunnerQueryOverride.queryOverrides !== undefined &&
|
||||
CloudRunnerQueryOverride.queryOverrides[key] !== undefined
|
||||
? CloudRunnerQueryOverride.queryOverrides[key]
|
||||
: process.env[key];
|
||||
}
|
||||
s;
|
||||
private static tryAddInput(array, key): CloudRunnerSecret[] {
|
||||
const value = TaskParameterSerializer.getValue(key);
|
||||
if (value !== undefined && value !== '') {
|
||||
array.push({
|
||||
ParameterKey: key,
|
||||
EnvironmentVariable: key,
|
||||
ParameterValue: value,
|
||||
});
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
import path from 'path';
|
||||
import { BuildParameters } from '../..';
|
||||
import { CloudRunnerProviderInterface } from '../services/cloud-runner-provider-interface';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
|
||||
export class CloudRunnerState {
|
||||
public static CloudRunnerProviderPlatform: CloudRunnerProviderInterface;
|
||||
public static buildParams: BuildParameters;
|
||||
public static defaultSecrets: CloudRunnerSecret[];
|
||||
public static readonly repositoryFolder = 'repo';
|
||||
|
||||
// only the following paths that do not start a path.join with another "Full" suffixed property need to start with an absolute /
|
||||
|
||||
public static get buildPathFull(): string {
|
||||
return path.join(`/`, CloudRunnerState.buildVolumeFolder, CloudRunnerState.buildParams.buildGuid);
|
||||
}
|
||||
|
||||
public static get cacheFolderFull(): string {
|
||||
return path.join(
|
||||
'/',
|
||||
CloudRunnerState.buildVolumeFolder,
|
||||
CloudRunnerState.cacheFolder,
|
||||
CloudRunnerState.branchName,
|
||||
);
|
||||
}
|
||||
|
||||
static setup(buildParameters: BuildParameters) {
|
||||
CloudRunnerState.buildParams = buildParameters;
|
||||
}
|
||||
|
||||
public static get branchName(): string {
|
||||
return CloudRunnerState.buildParams.branch;
|
||||
}
|
||||
public static get builderPathFull(): string {
|
||||
return path.join(CloudRunnerState.buildPathFull, `builder`);
|
||||
}
|
||||
|
||||
public static get repoPathFull(): string {
|
||||
return path.join(CloudRunnerState.buildPathFull, CloudRunnerState.repositoryFolder);
|
||||
}
|
||||
|
||||
public static get projectPathFull(): string {
|
||||
return path.join(CloudRunnerState.repoPathFull, CloudRunnerState.buildParams.projectPath);
|
||||
}
|
||||
|
||||
public static get libraryFolderFull(): string {
|
||||
return path.join(CloudRunnerState.projectPathFull, `Library`);
|
||||
}
|
||||
|
||||
public static get lfsDirectoryFull(): string {
|
||||
return path.join(CloudRunnerState.repoPathFull, `.git`, `lfs`);
|
||||
}
|
||||
|
||||
public static get purgeRemoteCaching(): boolean {
|
||||
return process.env.PURGE_REMOTE_BUILDER_CACHE !== undefined;
|
||||
}
|
||||
|
||||
public static get lfsCacheFolderFull() {
|
||||
return path.join(CloudRunnerState.cacheFolderFull, `lfs`);
|
||||
}
|
||||
|
||||
public static get libraryCacheFolderFull() {
|
||||
return path.join(CloudRunnerState.cacheFolderFull, `Library`);
|
||||
}
|
||||
|
||||
public static get unityBuilderRepoUrl(): string {
|
||||
return `https://${CloudRunnerState.buildParams.githubToken}@github.com/game-ci/unity-builder.git`;
|
||||
}
|
||||
|
||||
public static get targetBuildRepoUrl(): string {
|
||||
return `https://${CloudRunnerState.buildParams.githubToken}@github.com/${CloudRunnerState.buildParams.githubRepo}.git`;
|
||||
}
|
||||
|
||||
public static get buildVolumeFolder() {
|
||||
return 'data';
|
||||
}
|
||||
|
||||
public static get cacheFolder() {
|
||||
return 'cache';
|
||||
}
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
import path from 'path';
|
||||
import { Input } from '../..';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
import { StepInterface } from './step-interface';
|
||||
|
||||
export class BuildStep implements StepInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
return await BuildStep.BuildStep(
|
||||
cloudRunnerStepState.image,
|
||||
cloudRunnerStepState.environment,
|
||||
cloudRunnerStepState.secrets,
|
||||
);
|
||||
}
|
||||
|
||||
private static async BuildStep(
|
||||
image: string,
|
||||
environmentVariables: CloudRunnerEnvironmentVariable[],
|
||||
secrets: CloudRunnerSecret[],
|
||||
) {
|
||||
CloudRunnerLogger.logLine(` `);
|
||||
CloudRunnerLogger.logLine('Starting part 2/2 (build unity project)');
|
||||
const hooks = CloudRunnerBuildCommandProcessor.getHooks().filter((x) => x.step.includes(`setup`));
|
||||
return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
image,
|
||||
`${hooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
export GITHUB_WORKSPACE="${CloudRunnerState.repoPathFull}"
|
||||
cp -r "${path
|
||||
.join(CloudRunnerState.builderPathFull, 'dist', 'default-build-script')
|
||||
.replace(/\\/g, `/`)}" "/UnityBuilderAction"
|
||||
cp -r "${path
|
||||
.join(CloudRunnerState.builderPathFull, 'dist', 'platforms', 'ubuntu', 'entrypoint.sh')
|
||||
.replace(/\\/g, `/`)}" "/entrypoint.sh"
|
||||
cp -r "${path
|
||||
.join(CloudRunnerState.builderPathFull, 'dist', 'platforms', 'ubuntu', 'steps')
|
||||
.replace(/\\/g, `/`)}" "/steps"
|
||||
chmod -R +x "/entrypoint.sh"
|
||||
chmod -R +x "/steps"
|
||||
/entrypoint.sh
|
||||
apt-get update
|
||||
apt-get install -y -q zip tree
|
||||
cd "${CloudRunnerState.libraryFolderFull.replace(/\\/g, `/`)}/.."
|
||||
zip -r "lib-${CloudRunnerState.buildParams.buildGuid}.zip" "Library"
|
||||
mv "lib-${CloudRunnerState.buildParams.buildGuid}.zip" "${CloudRunnerState.cacheFolderFull.replace(
|
||||
/\\/g,
|
||||
`/`,
|
||||
)}/Library"
|
||||
cd "${CloudRunnerState.repoPathFull.replace(/\\/g, `/`)}"
|
||||
${Input.cloudRunnerTests ? '' : '#'} tree -lh
|
||||
zip -r "build-${CloudRunnerState.buildParams.buildGuid}.zip" "build"
|
||||
${Input.cloudRunnerTests ? '' : '#'} tree -lh
|
||||
${Input.cloudRunnerTests ? '' : '#'} tree -lh "${CloudRunnerState.cacheFolderFull.replace(/\\/g, `/`)}"
|
||||
mv "build-${CloudRunnerState.buildParams.buildGuid}.zip" "${CloudRunnerState.cacheFolderFull.replace(
|
||||
/\\/g,
|
||||
`/`,
|
||||
)}"
|
||||
chmod +x ${path.join(CloudRunnerState.builderPathFull, 'dist', `index.js`).replace(/\\/g, `/`)}
|
||||
node ${path
|
||||
.join(CloudRunnerState.builderPathFull, 'dist', `index.js`)
|
||||
.replace(/\\/g, `/`)} -m cache-push "Library" "lib-${
|
||||
CloudRunnerState.buildParams.buildGuid
|
||||
}.zip" "${CloudRunnerState.cacheFolderFull.replace(/\\/g, `/`)}/Library"
|
||||
${Input.cloudRunnerTests ? '' : '#'} tree -lh "${CloudRunnerState.cacheFolderFull}"
|
||||
${hooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||
`,
|
||||
`/${CloudRunnerState.buildVolumeFolder}`,
|
||||
`/${CloudRunnerState.projectPathFull}`,
|
||||
environmentVariables,
|
||||
secrets,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
import path from 'path';
|
||||
import { Input } from '../..';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
||||
import CloudRunnerEnvironmentVariable from '../services/cloud-runner-environment-variable';
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
import { StepInterface } from './step-interface';
|
||||
|
||||
export class SetupStep implements StepInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
try {
|
||||
return await SetupStep.downloadRepository(
|
||||
cloudRunnerStepState.image,
|
||||
cloudRunnerStepState.environment,
|
||||
cloudRunnerStepState.secrets,
|
||||
);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static getCloudRunnerBranch() {
|
||||
return process.env.CLOUD_RUNNER_BRANCH?.includes('/')
|
||||
? process.env.CLOUD_RUNNER_BRANCH.split('/').reverse()[0]
|
||||
: process.env.CLOUD_RUNNER_BRANCH;
|
||||
}
|
||||
|
||||
private static async downloadRepository(
|
||||
image: string,
|
||||
environmentVariables: CloudRunnerEnvironmentVariable[],
|
||||
secrets: CloudRunnerSecret[],
|
||||
) {
|
||||
try {
|
||||
CloudRunnerLogger.log(` `);
|
||||
CloudRunnerLogger.logLine('Starting step 1/2 (setup game files from repository)');
|
||||
const hooks = CloudRunnerBuildCommandProcessor.getHooks().filter((x) => x.step.includes(`setup`));
|
||||
return await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
image,
|
||||
`apk update -q
|
||||
apk add git-lfs jq tree zip unzip nodejs -q
|
||||
${hooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
mkdir -p ${CloudRunnerState.builderPathFull.replace(/\\/g, `/`)}
|
||||
git clone -q -b ${SetupStep.getCloudRunnerBranch()} ${
|
||||
CloudRunnerState.unityBuilderRepoUrl
|
||||
} "${CloudRunnerState.builderPathFull.replace(/\\/g, `/`)}"
|
||||
${Input.cloudRunnerTests ? '' : '#'} tree ${CloudRunnerState.builderPathFull.replace(/\\/g, `/`)}
|
||||
chmod +x ${path.join(CloudRunnerState.builderPathFull, 'dist', `index.js`).replace(/\\/g, `/`)}
|
||||
node ${path.join(CloudRunnerState.builderPathFull, 'dist', `index.js`).replace(/\\/g, `/`)} -m remote-cli
|
||||
${hooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||
`,
|
||||
`/${CloudRunnerState.buildVolumeFolder}`,
|
||||
`/${CloudRunnerState.buildVolumeFolder}/`,
|
||||
environmentVariables,
|
||||
secrets,
|
||||
);
|
||||
} catch (error) {
|
||||
CloudRunnerLogger.logLine(`Failed download repository step 1/2`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
|
||||
export interface StepInterface {
|
||||
run(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
cloudRunnerStepState: CloudRunnerStepState,
|
||||
);
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
import { BuildStep } from '../steps/build-step';
|
||||
import { SetupStep } from '../steps/setup-step';
|
||||
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
|
||||
import { CloudRunnerStepState } from '../cloud-runner-step-state';
|
||||
import { CustomWorkflow } from './custom-workflow';
|
||||
import { WorkflowInterface } from './workflow-interface';
|
||||
import * as core from '@actions/core';
|
||||
import { CloudRunnerBuildCommandProcessor } from '../services/cloud-runner-build-command-process';
|
||||
import path from 'path';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
|
||||
export class BuildAutomationWorkflow implements WorkflowInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
@@ -21,41 +21,36 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
||||
try {
|
||||
CloudRunnerLogger.log(`Cloud Runner is running standard build automation`);
|
||||
|
||||
core.startGroup('pre build steps');
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('pre build steps');
|
||||
let output = '';
|
||||
if (CloudRunnerState.buildParams.preBuildSteps !== '') {
|
||||
output += await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.preBuildSteps);
|
||||
if (CloudRunner.buildParameters.preBuildSteps !== '') {
|
||||
output += await CustomWorkflow.runCustomJob(CloudRunner.buildParameters.preBuildSteps);
|
||||
}
|
||||
core.endGroup();
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||
CloudRunnerLogger.logWithTime('Configurable pre build step(s) time');
|
||||
|
||||
core.startGroup('setup');
|
||||
output += await new SetupStep().run(
|
||||
new CloudRunnerStepState(
|
||||
'alpine/git',
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
CloudRunnerState.defaultSecrets,
|
||||
),
|
||||
);
|
||||
core.endGroup();
|
||||
CloudRunnerLogger.logWithTime('Download repository step time');
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('build');
|
||||
CloudRunnerLogger.log(baseImage.toString());
|
||||
CloudRunnerLogger.logLine(` `);
|
||||
CloudRunnerLogger.logLine('Starting build automation job');
|
||||
|
||||
core.startGroup('build');
|
||||
output += await new BuildStep().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
CloudRunnerState.defaultSecrets,
|
||||
),
|
||||
output += await CloudRunner.Provider.runTask(
|
||||
CloudRunner.buildParameters.buildGuid,
|
||||
baseImage.toString(),
|
||||
BuildAutomationWorkflow.BuildWorkflow,
|
||||
`/${CloudRunnerFolders.buildVolumeFolder}`,
|
||||
`/${CloudRunnerFolders.buildVolumeFolder}/`,
|
||||
CloudRunner.cloudRunnerEnvironmentVariables,
|
||||
CloudRunner.defaultSecrets,
|
||||
);
|
||||
core.endGroup();
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||
CloudRunnerLogger.logWithTime('Build time');
|
||||
|
||||
core.startGroup('post build steps');
|
||||
if (CloudRunnerState.buildParams.postBuildSteps !== '') {
|
||||
output += await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.postBuildSteps);
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.startGroup('post build steps');
|
||||
if (CloudRunner.buildParameters.postBuildSteps !== '') {
|
||||
output += await CustomWorkflow.runCustomJob(CloudRunner.buildParameters.postBuildSteps);
|
||||
}
|
||||
core.endGroup();
|
||||
if (!CloudRunner.buildParameters.isCliMode) core.endGroup();
|
||||
CloudRunnerLogger.logWithTime('Configurable post build step(s) time');
|
||||
|
||||
CloudRunnerLogger.log(`Cloud Runner finished running standard build automation`);
|
||||
@@ -65,4 +60,64 @@ export class BuildAutomationWorkflow implements WorkflowInterface {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private static get BuildWorkflow() {
|
||||
const setupHooks = CloudRunnerBuildCommandProcessor.getHooks(CloudRunner.buildParameters.customJobHooks).filter(
|
||||
(x) => x.step.includes(`setup`),
|
||||
);
|
||||
const buildHooks = CloudRunnerBuildCommandProcessor.getHooks(CloudRunner.buildParameters.customJobHooks).filter(
|
||||
(x) => x.step.includes(`build`),
|
||||
);
|
||||
const builderPath = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', `index.js`).replace(/\\/g, `/`);
|
||||
|
||||
return `apt-get update > /dev/null
|
||||
apt-get install -y tar tree npm git-lfs jq git > /dev/null
|
||||
npm install -g n > /dev/null
|
||||
n stable > /dev/null
|
||||
${setupHooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
export GITHUB_WORKSPACE="${CloudRunnerFolders.repoPathAbsolute.replace(/\\/g, `/`)}"
|
||||
${BuildAutomationWorkflow.setupCommands(builderPath)}
|
||||
${setupHooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}
|
||||
${buildHooks.filter((x) => x.hook.includes(`before`)).map((x) => x.commands) || ' '}
|
||||
${BuildAutomationWorkflow.BuildCommands(builderPath, CloudRunner.buildParameters.buildGuid)}
|
||||
${buildHooks.filter((x) => x.hook.includes(`after`)).map((x) => x.commands) || ' '}`;
|
||||
}
|
||||
|
||||
private static setupCommands(builderPath) {
|
||||
return `export GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
echo "game ci cloud runner clone"
|
||||
mkdir -p ${CloudRunnerFolders.builderPathAbsolute.replace(/\\/g, `/`)}
|
||||
git clone -q -b ${CloudRunner.buildParameters.cloudRunnerBranch} ${
|
||||
CloudRunnerFolders.unityBuilderRepoUrl
|
||||
} "${CloudRunnerFolders.builderPathAbsolute.replace(/\\/g, `/`)}"
|
||||
chmod +x ${builderPath}
|
||||
echo "game ci cloud runner bootstrap"
|
||||
node ${builderPath} -m remote-cli`;
|
||||
}
|
||||
|
||||
private static BuildCommands(builderPath, guid) {
|
||||
const linuxCacheFolder = CloudRunnerFolders.cacheFolderFull.replace(/\\/g, `/`);
|
||||
const distFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist');
|
||||
const ubuntuPlatformsFolder = path.join(CloudRunnerFolders.builderPathAbsolute, 'dist', 'platforms', 'ubuntu');
|
||||
|
||||
return `echo "game ci cloud runner init"
|
||||
mkdir -p ${`${CloudRunnerFolders.projectBuildFolderAbsolute}/build`.replace(/\\/g, `/`)}
|
||||
cd ${CloudRunnerFolders.projectPathAbsolute}
|
||||
cp -r "${path.join(distFolder, 'default-build-script').replace(/\\/g, `/`)}" "/UnityBuilderAction"
|
||||
cp -r "${path.join(ubuntuPlatformsFolder, 'entrypoint.sh').replace(/\\/g, `/`)}" "/entrypoint.sh"
|
||||
cp -r "${path.join(ubuntuPlatformsFolder, 'steps').replace(/\\/g, `/`)}" "/steps"
|
||||
chmod -R +x "/entrypoint.sh"
|
||||
chmod -R +x "/steps"
|
||||
echo "game ci cloud runner start"
|
||||
/entrypoint.sh
|
||||
echo "game ci cloud runner push library to cache"
|
||||
chmod +x ${builderPath}
|
||||
node ${builderPath} -m cache-push --cachePushFrom ${
|
||||
CloudRunnerFolders.libraryFolderAbsolute
|
||||
} --artifactName lib-${guid} --cachePushTo ${linuxCacheFolder}/Library
|
||||
echo "game ci cloud runner push build to cache"
|
||||
node ${builderPath} -m cache-push --cachePushFrom ${
|
||||
CloudRunnerFolders.projectBuildFolderAbsolute
|
||||
} --artifactName build-${guid} --cachePushTo ${`${linuxCacheFolder}/build`.replace(/\\/g, `/`)}`;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,44 +1,45 @@
|
||||
import CloudRunnerLogger from '../services/cloud-runner-logger';
|
||||
import CloudRunnerSecret from '../services/cloud-runner-secret';
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerFolders } from '../services/cloud-runner-folders';
|
||||
import YAML from 'yaml';
|
||||
import { Input } from '../..';
|
||||
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
|
||||
import { CloudRunner, Input } from '../..';
|
||||
|
||||
export class CustomWorkflow {
|
||||
public static async runCustomJob(buildSteps) {
|
||||
try {
|
||||
CloudRunnerLogger.log(`Cloud Runner is running in custom job mode`);
|
||||
if (Input.cloudRunnerTests) {
|
||||
if (CloudRunner.buildParameters.cloudRunnerIntegrationTests) {
|
||||
CloudRunnerLogger.log(`Parsing build steps: ${buildSteps}`);
|
||||
}
|
||||
try {
|
||||
buildSteps = YAML.parse(buildSteps);
|
||||
let output = '';
|
||||
for (const step of buildSteps) {
|
||||
const stepSecrets: CloudRunnerSecret[] = step.secrets.map((x) => {
|
||||
const secret: CloudRunnerSecret = {
|
||||
ParameterKey: x.name,
|
||||
EnvironmentVariable: Input.ToEnvVarFormat(x.name),
|
||||
ParameterValue: x.value,
|
||||
};
|
||||
return secret;
|
||||
});
|
||||
output += await CloudRunnerState.CloudRunnerProviderPlatform.runTask(
|
||||
CloudRunnerState.buildParams.buildGuid,
|
||||
step['image'],
|
||||
step['commands'],
|
||||
`/${CloudRunnerState.buildVolumeFolder}`,
|
||||
`/${CloudRunnerState.buildVolumeFolder}/`,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
[...CloudRunnerState.defaultSecrets, ...stepSecrets],
|
||||
);
|
||||
}
|
||||
return output;
|
||||
} catch (error) {
|
||||
CloudRunnerLogger.log(`failed to parse a custom job "${buildSteps}"`);
|
||||
throw error;
|
||||
}
|
||||
let output = '';
|
||||
for (const step of buildSteps) {
|
||||
const stepSecrets: CloudRunnerSecret[] = step.secrets.map((x) => {
|
||||
const secret: CloudRunnerSecret = {
|
||||
ParameterKey: x.name,
|
||||
EnvironmentVariable: Input.ToEnvVarFormat(x.name),
|
||||
ParameterValue: x.value,
|
||||
};
|
||||
|
||||
return secret;
|
||||
});
|
||||
output += await CloudRunner.Provider.runTask(
|
||||
CloudRunner.buildParameters.buildGuid,
|
||||
step['image'],
|
||||
step['commands'],
|
||||
`/${CloudRunnerFolders.buildVolumeFolder}`,
|
||||
`/${CloudRunnerFolders.buildVolumeFolder}/`,
|
||||
CloudRunner.cloudRunnerEnvironmentVariables,
|
||||
[...CloudRunner.defaultSecrets, ...stepSecrets],
|
||||
);
|
||||
}
|
||||
|
||||
return output;
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import { CloudRunnerState } from '../state/cloud-runner-state';
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
import { CloudRunnerStepState } from '../cloud-runner-step-state';
|
||||
import { CustomWorkflow } from './custom-workflow';
|
||||
import { WorkflowInterface } from './workflow-interface';
|
||||
import { BuildAutomationWorkflow } from './build-automation-workflow';
|
||||
import { TaskParameterSerializer } from '../services/task-parameter-serializer';
|
||||
import { SetupStep } from '../steps/setup-step';
|
||||
import CloudRunner from '../cloud-runner';
|
||||
|
||||
export class WorkflowCompositionRoot implements WorkflowInterface {
|
||||
async run(cloudRunnerStepState: CloudRunnerStepState) {
|
||||
@@ -17,23 +15,12 @@ export class WorkflowCompositionRoot implements WorkflowInterface {
|
||||
|
||||
private static async runJob(baseImage: any) {
|
||||
try {
|
||||
if (CloudRunnerState.buildParams.customJob === `setup`) {
|
||||
return await new SetupStep().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
CloudRunnerState.defaultSecrets,
|
||||
),
|
||||
);
|
||||
} else if (CloudRunnerState.buildParams.customJob !== '') {
|
||||
return await CustomWorkflow.runCustomJob(CloudRunnerState.buildParams.customJob);
|
||||
if (CloudRunner.buildParameters.customJob !== '') {
|
||||
return await CustomWorkflow.runCustomJob(CloudRunner.buildParameters.customJob);
|
||||
}
|
||||
|
||||
return await new BuildAutomationWorkflow().run(
|
||||
new CloudRunnerStepState(
|
||||
baseImage,
|
||||
TaskParameterSerializer.readBuildEnvironmentVariables(),
|
||||
CloudRunnerState.defaultSecrets,
|
||||
),
|
||||
new CloudRunnerStepState(baseImage, CloudRunner.cloudRunnerEnvironmentVariables, CloudRunner.defaultSecrets),
|
||||
);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { CloudRunnerStepState } from '../state/cloud-runner-step-state';
|
||||
import { CloudRunnerStepState } from '../cloud-runner-step-state';
|
||||
|
||||
export interface WorkflowInterface {
|
||||
run(
|
||||
|
||||
@@ -46,6 +46,7 @@ class Docker {
|
||||
|
||||
static getWindowsCommand(image: any, parameters: any): string {
|
||||
const { workspace, actionFolder, unitySerial, gitPrivateToken } = parameters;
|
||||
|
||||
return `docker run \
|
||||
--workdir /github/workspace \
|
||||
--rm \
|
||||
|
||||
@@ -21,6 +21,7 @@ class ImageEnvironmentFactory {
|
||||
|
||||
string += `--env ${p.name}="${p.value}" `;
|
||||
}
|
||||
|
||||
return string;
|
||||
}
|
||||
public static getEnvironmentVariables(parameters: BuildParameters) {
|
||||
@@ -45,6 +46,7 @@ class ImageEnvironmentFactory {
|
||||
{ name: 'ANDROID_KEYSTORE_PASS', value: parameters.androidKeystorePass },
|
||||
{ name: 'ANDROID_KEYALIAS_NAME', value: parameters.androidKeyaliasName },
|
||||
{ name: 'ANDROID_KEYALIAS_PASS', value: parameters.androidKeyaliasPass },
|
||||
{ name: 'ANDROID_TARGET_SDK_VERSION', value: parameters.androidTargetSdkVersion },
|
||||
{ name: 'ANDROID_SDK_MANAGER_PARAMETERS', value: parameters.androidSdkManagerParameters },
|
||||
{ name: 'CUSTOM_PARAMETERS', value: parameters.customParameters },
|
||||
{ name: 'CHOWN_FILES_TO', value: parameters.chownFilesTo },
|
||||
@@ -65,6 +67,7 @@ class ImageEnvironmentFactory {
|
||||
{ name: 'RUNNER_WORKSPACE', value: process.env.RUNNER_WORKSPACE },
|
||||
];
|
||||
if (parameters.sshAgent) environmentVariables.push({ name: 'SSH_AUTH_SOCK', value: '/ssh-agent' });
|
||||
|
||||
return environmentVariables;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import Platform from './platform';
|
||||
|
||||
import BuildParameters from './build-parameters';
|
||||
|
||||
class ImageTag {
|
||||
public repository: string;
|
||||
public name: string;
|
||||
public cloudRunnerBuilderPlatform!: string | undefined;
|
||||
public editorVersion: string;
|
||||
public targetPlatform: any;
|
||||
public builderPlatform: string;
|
||||
@@ -12,7 +14,7 @@ class ImageTag {
|
||||
public imagePlatformPrefix: string;
|
||||
|
||||
constructor(imageProperties: Partial<BuildParameters>) {
|
||||
const { editorVersion = '2019.2.11f1', targetPlatform, customImage } = imageProperties;
|
||||
const { editorVersion = '2019.2.11f1', targetPlatform, customImage, cloudRunnerBuilderPlatform } = imageProperties;
|
||||
|
||||
if (!ImageTag.versionPattern.test(editorVersion)) {
|
||||
throw new Error(`Invalid version "${editorVersion}".`);
|
||||
@@ -27,9 +29,13 @@ class ImageTag {
|
||||
this.name = 'editor';
|
||||
this.editorVersion = editorVersion;
|
||||
this.targetPlatform = targetPlatform;
|
||||
this.cloudRunnerBuilderPlatform = cloudRunnerBuilderPlatform;
|
||||
const isCloudRunnerLocal = cloudRunnerBuilderPlatform === 'local' || cloudRunnerBuilderPlatform === undefined;
|
||||
this.builderPlatform = ImageTag.getTargetPlatformToTargetPlatformSuffixMap(targetPlatform, editorVersion);
|
||||
this.imagePlatformPrefix = ImageTag.getImagePlatformPrefixes(process.platform);
|
||||
this.imageRollingVersion = 1; // will automatically roll to the latest non-breaking version.
|
||||
this.imagePlatformPrefix = ImageTag.getImagePlatformPrefixes(
|
||||
isCloudRunnerLocal ? process.platform : cloudRunnerBuilderPlatform,
|
||||
);
|
||||
this.imageRollingVersion = 1; // Will automatically roll to the latest non-breaking version.
|
||||
}
|
||||
|
||||
static get versionPattern() {
|
||||
@@ -69,6 +75,7 @@ class ImageTag {
|
||||
ImageTag.targetPlatformSuffixes;
|
||||
|
||||
const [major, minor] = version.split('.').map((digit) => Number(digit));
|
||||
|
||||
// @see: https://docs.unity3d.com/ScriptReference/BuildTarget.html
|
||||
switch (platform) {
|
||||
case Platform.types.StandaloneOSX:
|
||||
@@ -85,12 +92,14 @@ class ImageTag {
|
||||
If you are trying to build for windows-mono, please use a Linux based OS.`);
|
||||
}
|
||||
}
|
||||
|
||||
return windows;
|
||||
case Platform.types.StandaloneLinux64: {
|
||||
// Unity versions before 2019.3 do not support il2cpp
|
||||
if (major >= 2020 || (major === 2019 && minor >= 3)) {
|
||||
return linuxIl2cpp;
|
||||
}
|
||||
|
||||
return linux;
|
||||
}
|
||||
case Platform.types.iOS:
|
||||
@@ -103,6 +112,7 @@ class ImageTag {
|
||||
if (process.platform !== 'win32') {
|
||||
throw new Error(`WSAPlayer can only be built on a windows base OS`);
|
||||
}
|
||||
|
||||
return wsaPlayer;
|
||||
case Platform.types.PS4:
|
||||
return windows;
|
||||
@@ -112,9 +122,11 @@ class ImageTag {
|
||||
if (process.platform !== 'win32') {
|
||||
throw new Error(`tvOS can only be built on a windows base OS`);
|
||||
}
|
||||
|
||||
return tvos;
|
||||
case Platform.types.Switch:
|
||||
return windows;
|
||||
|
||||
// Unsupported
|
||||
case Platform.types.Lumin:
|
||||
return windows;
|
||||
@@ -155,5 +167,4 @@ class ImageTag {
|
||||
return `${image}:${tag}`; // '0' here represents the docker repo version
|
||||
}
|
||||
}
|
||||
|
||||
export default ImageTag;
|
||||
|
||||
12
src/model/input-readers/generic-input-reader.ts
Normal file
12
src/model/input-readers/generic-input-reader.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
||||
import Input from '../input';
|
||||
|
||||
export class GenericInputReader {
|
||||
public static async Run(command) {
|
||||
if (Input.cloudRunnerCluster === 'local') {
|
||||
return '';
|
||||
}
|
||||
|
||||
return await CloudRunnerSystem.Run(command, false, true);
|
||||
}
|
||||
}
|
||||
@@ -1,22 +1,31 @@
|
||||
import { assert } from 'console';
|
||||
import System from '../system';
|
||||
import fs from 'fs';
|
||||
import { CloudRunnerSystem } from '../cli/remote-client/remote-client-services/cloud-runner-system';
|
||||
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
||||
import CloudRunnerLogger from '../cloud-runner/services/cloud-runner-logger';
|
||||
import Input from '../input';
|
||||
|
||||
export class GitRepoReader {
|
||||
static GetSha() {
|
||||
return '';
|
||||
}
|
||||
|
||||
public static async GetRemote() {
|
||||
return (await CloudRunnerSystem.Run(`git remote -v`))
|
||||
.split(' ')[1]
|
||||
.split('https://github.com/')[1]
|
||||
.split('.git')[0];
|
||||
if (Input.cloudRunnerCluster === 'local') {
|
||||
return '';
|
||||
}
|
||||
assert(fs.existsSync(`.git`));
|
||||
const value = (await CloudRunnerSystem.Run(`git remote -v`, false, true)).replace(/ /g, ``);
|
||||
CloudRunnerLogger.log(`value ${value}`);
|
||||
assert(value.includes('github.com'));
|
||||
|
||||
return value.split('github.com/')[1].split('.git')[0];
|
||||
}
|
||||
|
||||
public static async GetBranch() {
|
||||
if (Input.cloudRunnerCluster === 'local') {
|
||||
return '';
|
||||
}
|
||||
assert(fs.existsSync(`.git`));
|
||||
return (await System.run(`git branch`, [], {}, false)).split('*')[1].split(`\n`)[0].replace(/ /g, ``);
|
||||
|
||||
return (await CloudRunnerSystem.Run(`git branch --show-current`, false, true))
|
||||
.split('\n')[0]
|
||||
.replace(/ /g, ``)
|
||||
.replace('/head', '');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,25 @@
|
||||
import { CloudRunnerSystem } from '../cli/remote-client/remote-client-services/cloud-runner-system';
|
||||
import { CloudRunnerSystem } from '../cloud-runner/services/cloud-runner-system';
|
||||
import * as core from '@actions/core';
|
||||
import Input from '../input';
|
||||
|
||||
export class GithubCliReader {
|
||||
static async GetGitHubAuthToken() {
|
||||
if (Input.cloudRunnerCluster === 'local') {
|
||||
return '';
|
||||
}
|
||||
try {
|
||||
const authStatus = await CloudRunnerSystem.Run(`gh auth status`, true);
|
||||
const authStatus = await CloudRunnerSystem.Run(`gh auth status`, true, true);
|
||||
if (authStatus.includes('You are not logged') || authStatus === '') {
|
||||
return '';
|
||||
}
|
||||
return (await CloudRunnerSystem.Run(`gh auth status -t`))
|
||||
|
||||
return (await CloudRunnerSystem.Run(`gh auth status -t`, false, true))
|
||||
.split(`Token: `)[1]
|
||||
.replace(/ /g, '')
|
||||
.replace(/\n/g, '');
|
||||
} catch (error: any) {
|
||||
core.info(error || 'Failed to get github auth token from gh cli');
|
||||
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import YAML from 'yaml';
|
||||
import Input from '../input';
|
||||
|
||||
export function ReadLicense() {
|
||||
if (Input.cloudRunnerCluster === 'local') {
|
||||
return '';
|
||||
}
|
||||
const pipelineFile = path.join(__dirname, `.github`, `workflows`, `cloud-runner-k8s-pipeline.yml`);
|
||||
|
||||
return fs.existsSync(pipelineFile) ? YAML.parse(fs.readFileSync(pipelineFile, 'utf8')).env.UNITY_LICENSE : '';
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { GitRepoReader } from './input-readers/git-repo';
|
||||
import { GithubCliReader } from './input-readers/github-cli';
|
||||
import { Cli } from './cli/cli';
|
||||
import CloudRunnerQueryOverride from './cloud-runner/services/cloud-runner-query-override';
|
||||
import Platform from './platform';
|
||||
|
||||
const core = require('@actions/core');
|
||||
@@ -14,64 +14,70 @@ const core = require('@actions/core');
|
||||
* Todo: rename to UserInput and remove anything that is not direct input from the user / ci workflow
|
||||
*/
|
||||
class Input {
|
||||
public static cliOptions;
|
||||
public static githubInputEnabled: boolean = true;
|
||||
|
||||
// also enabled debug logging for cloud runner
|
||||
static get cloudRunnerTests(): boolean {
|
||||
return Input.getInput(`cloudRunnerTests`) || Input.getInput(`CloudRunnerTests`) || false;
|
||||
}
|
||||
public static getInput(query) {
|
||||
if (Input.githubInputEnabled) {
|
||||
const coreInput = core.getInput(query);
|
||||
if (coreInput && coreInput !== '') {
|
||||
return coreInput;
|
||||
}
|
||||
}
|
||||
const alternativeQuery = Input.ToEnvVarFormat(query);
|
||||
|
||||
private static getInput(query) {
|
||||
const coreInput = core.getInput(query);
|
||||
if (Input.githubInputEnabled && coreInput && coreInput !== '') {
|
||||
return coreInput;
|
||||
// Query input sources
|
||||
if (Cli.query(query, alternativeQuery)) {
|
||||
return Cli.query(query, alternativeQuery);
|
||||
}
|
||||
|
||||
return Input.cliOptions !== undefined && Input.cliOptions[query] !== undefined
|
||||
? Input.cliOptions[query]
|
||||
: process.env[query] !== undefined
|
||||
? process.env[query]
|
||||
: process.env[Input.ToEnvVarFormat(query)]
|
||||
? process.env[Input.ToEnvVarFormat(query)]
|
||||
: '';
|
||||
if (CloudRunnerQueryOverride.query(query, alternativeQuery)) {
|
||||
return CloudRunnerQueryOverride.query(query, alternativeQuery);
|
||||
}
|
||||
|
||||
if (process.env[query] !== undefined) {
|
||||
return process.env[query];
|
||||
}
|
||||
|
||||
if (alternativeQuery !== query && process.env[alternativeQuery] !== undefined) {
|
||||
return process.env[alternativeQuery];
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static get region(): string {
|
||||
return Input.getInput('region') || 'eu-west-2';
|
||||
}
|
||||
|
||||
static async githubRepo() {
|
||||
return (
|
||||
Input.getInput('GITHUB_REPOSITORY') ||
|
||||
Input.getInput('GITHUB_REPO') ||
|
||||
// todo - move this to some class specific for determining additional information
|
||||
(await GitRepoReader.GetRemote()) ||
|
||||
'game-ci/unity-builder'
|
||||
);
|
||||
static get githubRepo() {
|
||||
return Input.getInput('GITHUB_REPOSITORY') || Input.getInput('GITHUB_REPO') || undefined;
|
||||
}
|
||||
|
||||
static async branch() {
|
||||
if (await GitRepoReader.GetBranch()) {
|
||||
// todo - move this to some class specific for determining additional information
|
||||
return await GitRepoReader.GetBranch();
|
||||
} else if (Input.getInput(`GITHUB_REF`)) {
|
||||
return Input.getInput(`GITHUB_REF`).replace('refs/', '').replace(`head/`, '');
|
||||
static get branch() {
|
||||
if (Input.getInput(`GITHUB_REF`)) {
|
||||
return Input.getInput(`GITHUB_REF`).replace('refs/', '').replace(`head/`, '').replace(`heads/`, '');
|
||||
} else if (Input.getInput('branch')) {
|
||||
return Input.getInput('branch');
|
||||
} else {
|
||||
return 'main';
|
||||
return '';
|
||||
}
|
||||
}
|
||||
static get cloudRunnerBuilderPlatform() {
|
||||
const input = Input.getInput('cloudRunnerBuilderPlatform');
|
||||
if (input) {
|
||||
return input;
|
||||
}
|
||||
if (Input.cloudRunnerCluster !== 'local') {
|
||||
return 'linux';
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static get gitSha() {
|
||||
if (Input.getInput(`GITHUB_SHA`)) {
|
||||
return Input.getInput(`GITHUB_SHA`);
|
||||
} else if (Input.getInput(`GitSHA`)) {
|
||||
return Input.getInput(`GitSHA`);
|
||||
} else if (GitRepoReader.GetSha()) {
|
||||
// todo - move this to some class specific for determining additional information
|
||||
return GitRepoReader.GetSha();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,7 +94,7 @@ class Input {
|
||||
}
|
||||
|
||||
static get customImage() {
|
||||
return Input.getInput('customImage');
|
||||
return Input.getInput('customImage') || '';
|
||||
}
|
||||
|
||||
static get projectPath() {
|
||||
@@ -112,7 +118,7 @@ class Input {
|
||||
}
|
||||
|
||||
static get buildMethod() {
|
||||
return Input.getInput('buildMethod') || ''; // processed in docker file
|
||||
return Input.getInput('buildMethod') || ''; // Processed in docker file
|
||||
}
|
||||
|
||||
static get customParameters() {
|
||||
@@ -165,8 +171,36 @@ class Input {
|
||||
return Input.getInput('sshAgent') || '';
|
||||
}
|
||||
|
||||
static async gitPrivateToken() {
|
||||
return Input.getInput('gitPrivateToken') || (await Input.githubToken());
|
||||
static get gitPrivateToken() {
|
||||
return core.getInput('gitPrivateToken') || false;
|
||||
}
|
||||
|
||||
static get customJob() {
|
||||
return Input.getInput('customJob') || '';
|
||||
}
|
||||
|
||||
static customJobHooks() {
|
||||
return Input.getInput('customJobHooks') || '';
|
||||
}
|
||||
|
||||
static cachePushOverrideCommand() {
|
||||
return Input.getInput('cachePushOverrideCommand') || '';
|
||||
}
|
||||
|
||||
static cachePullOverrideCommand() {
|
||||
return Input.getInput('cachePullOverrideCommand') || '';
|
||||
}
|
||||
|
||||
static readInputFromOverrideList() {
|
||||
return Input.getInput('readInputFromOverrideList') || '';
|
||||
}
|
||||
|
||||
static readInputOverrideCommand() {
|
||||
return Input.getInput('readInputOverrideCommand') || '';
|
||||
}
|
||||
|
||||
static get cloudRunnerBranch() {
|
||||
return Input.getInput('cloudRunnerBranch') || 'cloud-runner-develop';
|
||||
}
|
||||
|
||||
static get chownFilesTo() {
|
||||
@@ -187,29 +221,24 @@ class Input {
|
||||
return Input.getInput('preBuildSteps') || '';
|
||||
}
|
||||
|
||||
static get customJob() {
|
||||
return Input.getInput('customJob') || '';
|
||||
}
|
||||
|
||||
static get awsBaseStackName() {
|
||||
return Input.getInput('awsBaseStackName') || 'game-ci';
|
||||
}
|
||||
|
||||
static get cloudRunnerCluster() {
|
||||
if (Cli.isCliMode) {
|
||||
return Input.getInput('cloudRunnerCluster') || 'aws';
|
||||
}
|
||||
|
||||
return Input.getInput('cloudRunnerCluster') || 'local';
|
||||
}
|
||||
|
||||
static get cloudRunnerCpu() {
|
||||
return Input.getInput('cloudRunnerCpu') || '1.0';
|
||||
return Input.getInput('cloudRunnerCpu');
|
||||
}
|
||||
|
||||
static get cloudRunnerMemory() {
|
||||
return Input.getInput('cloudRunnerMemory') || '750M';
|
||||
}
|
||||
|
||||
static async githubToken() {
|
||||
// Todo - move GitHubCLI out of the simple input class. It is in fact not input from the user.
|
||||
return Input.getInput('githubToken') || (await GithubCliReader.GetGitHubAuthToken()) || '';
|
||||
return Input.getInput('cloudRunnerMemory');
|
||||
}
|
||||
|
||||
static get kubeConfig() {
|
||||
@@ -224,7 +253,31 @@ class Input {
|
||||
return Input.getInput('kubeVolumeSize') || '5Gi';
|
||||
}
|
||||
|
||||
static get kubeStorageClass(): string {
|
||||
return Input.getInput('kubeStorageClass') || '';
|
||||
}
|
||||
|
||||
static get checkDependencyHealthOverride(): string {
|
||||
return Input.getInput('checkDependencyHealthOverride') || '';
|
||||
}
|
||||
|
||||
static get startDependenciesOverride(): string {
|
||||
return Input.getInput('startDependenciesOverride') || '';
|
||||
}
|
||||
|
||||
static get cacheKey(): string {
|
||||
return Input.getInput('cacheKey') || Input.branch;
|
||||
}
|
||||
|
||||
static get cloudRunnerTests(): boolean {
|
||||
return Input.getInput(`cloudRunnerTests`) || false;
|
||||
}
|
||||
|
||||
public static ToEnvVarFormat(input: string) {
|
||||
if (input.toUpperCase() === input) {
|
||||
return input;
|
||||
}
|
||||
|
||||
return input
|
||||
.replace(/([A-Z])/g, ' $1')
|
||||
.trim()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { BuildParameters } from '.';
|
||||
import { SetupWindows, SetupMac } from './platform-setup/';
|
||||
import { SetupMac, SetupWindows } from './platform-setup/';
|
||||
import ValidateWindows from './platform-validation/validate-windows';
|
||||
|
||||
class PlatformSetup {
|
||||
@@ -12,7 +12,8 @@ class PlatformSetup {
|
||||
case 'darwin':
|
||||
await SetupMac.setup(buildParameters, actionFolder);
|
||||
break;
|
||||
//Add other baseOS's here
|
||||
|
||||
// Add other baseOS's here
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,8 +13,9 @@ class SetupWindows {
|
||||
if (!fs.existsSync('c:/regkeys')) {
|
||||
fs.mkdirSync('c:/regkeys');
|
||||
}
|
||||
|
||||
// These all need the Windows 10 SDK
|
||||
switch (targetPlatform) {
|
||||
//These all need the Windows 10 SDK
|
||||
case 'StandaloneWindows':
|
||||
case 'StandaloneWindows64':
|
||||
case 'WSAPlayer':
|
||||
|
||||
@@ -32,7 +32,7 @@ class ValidateWindows {
|
||||
}
|
||||
|
||||
private static checkForWin10SDK() {
|
||||
//Check for Windows 10 SDK on runner
|
||||
// Check for Windows 10 SDK on runner
|
||||
const windows10SDKPathExists = fs.existsSync('C:/Program Files (x86)/Windows Kits');
|
||||
if (!windows10SDKPathExists) {
|
||||
throw new Error(`Windows 10 SDK not found in default location. Make sure
|
||||
@@ -42,7 +42,7 @@ class ValidateWindows {
|
||||
}
|
||||
|
||||
private static checkForVisualStudio() {
|
||||
//Note: When upgrading to Server 2022, we will need to move to just "program files" since VS will be 64-bit
|
||||
// Note: When upgrading to Server 2022, we will need to move to just "program files" since VS will be 64-bit
|
||||
const visualStudioInstallPathExists = fs.existsSync('C:/Program Files (x86)/Microsoft Visual Studio');
|
||||
const visualStudioDataPathExists = fs.existsSync('C:/ProgramData/Microsoft/VisualStudio');
|
||||
|
||||
|
||||
@@ -17,12 +17,14 @@ class Platform {
|
||||
XboxOne: 'XboxOne',
|
||||
tvOS: 'tvOS',
|
||||
Switch: 'Switch',
|
||||
|
||||
// Unsupported
|
||||
Lumin: 'Lumin',
|
||||
BJM: 'BJM',
|
||||
Stadia: 'Stadia',
|
||||
Facebook: 'Facebook',
|
||||
NoTarget: 'NoTarget',
|
||||
|
||||
// Test specific
|
||||
Test: 'Test',
|
||||
};
|
||||
|
||||
@@ -34,6 +34,7 @@ describe('System', () => {
|
||||
it('outputs info', async () => {
|
||||
execSpy.mockImplementationOnce(async (input, _, options) => {
|
||||
options?.listeners?.stdout?.(Buffer.from(input, 'utf8'));
|
||||
|
||||
return 0;
|
||||
});
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ export default class UnityVersioning {
|
||||
if (unityVersion === 'auto') {
|
||||
return UnityVersioning.read(projectPath);
|
||||
}
|
||||
|
||||
return unityVersion;
|
||||
}
|
||||
|
||||
@@ -18,6 +19,7 @@ export default class UnityVersioning {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
throw new Error(`Project settings file not found at "${filePath}". Have you correctly set the projectPath?`);
|
||||
}
|
||||
|
||||
return UnityVersioning.parse(fs.readFileSync(filePath, 'utf8'));
|
||||
}
|
||||
|
||||
@@ -26,6 +28,7 @@ export default class UnityVersioning {
|
||||
if (!matches || matches.length === 0) {
|
||||
throw new Error(`Failed to parse version from "${projectVersionTxt}".`);
|
||||
}
|
||||
|
||||
return matches[0];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import * as core from '@actions/core';
|
||||
import NotImplementedException from './error/not-implemented-exception';
|
||||
import System from './system';
|
||||
import Versioning from './versioning';
|
||||
import { validVersionTagInputs, invalidVersionTagInputs } from './__data__/versions';
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
@@ -34,6 +35,26 @@ describe('Versioning', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('grepCompatibleInputVersionRegex', () => {
|
||||
// eslint-disable-next-line unicorn/consistent-function-scoping
|
||||
const matchInputUsingGrep = async (input) => {
|
||||
const output = await System.run('sh', undefined, {
|
||||
input: Buffer.from(`echo '${input}' | grep -E '${Versioning.grepCompatibleInputVersionRegex}'`),
|
||||
silent: true,
|
||||
});
|
||||
|
||||
return output.trim();
|
||||
};
|
||||
|
||||
it.concurrent.each(validVersionTagInputs)(`accepts valid tag input '%s'`, async (input) => {
|
||||
expect(await matchInputUsingGrep(input)).toStrictEqual(input);
|
||||
});
|
||||
|
||||
it.concurrent.each(invalidVersionTagInputs)(`rejects non-version tag input '%s'`, async (input) => {
|
||||
await expect(async () => matchInputUsingGrep(input)).rejects.toThrowError(/^Failed to run/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('branch', () => {
|
||||
it('returns headRef when set', () => {
|
||||
const headReference = jest.spyOn(Versioning, 'headRef', 'get').mockReturnValue('feature-branch-1');
|
||||
@@ -110,6 +131,7 @@ describe('Versioning', () => {
|
||||
|
||||
expect(logDiffSpy).toHaveBeenCalledTimes(1);
|
||||
expect(gitSpy).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Todo - this no longer works since typescript
|
||||
// const issuedCommand = System.run.mock.calls[0][2].input.toString();
|
||||
// expect(issuedCommand.indexOf('diff')).toBeGreaterThan(-1);
|
||||
@@ -137,6 +159,7 @@ describe('Versioning', () => {
|
||||
|
||||
test.each(['v0', 'v0.1', 'v0.1.2', 'v0.1-2', 'v0.1-2-g'])('does not like %s', (description) => {
|
||||
expect(Versioning.descriptionRegex1.test(description)).toBeFalsy();
|
||||
|
||||
// Also, never expect without the v to work for any of these cases.
|
||||
expect(Versioning.descriptionRegex1.test(description?.slice(1))).toBeFalsy();
|
||||
});
|
||||
|
||||
@@ -17,6 +17,10 @@ export default class Versioning {
|
||||
return { None: 'None', Semantic: 'Semantic', Tag: 'Tag', Custom: 'Custom' };
|
||||
}
|
||||
|
||||
static get grepCompatibleInputVersionRegex() {
|
||||
return '^v?([0-9]+\\.)*[0-9]+.*';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the branch name of the (related) branch
|
||||
*/
|
||||
@@ -130,6 +134,7 @@ export default class Versioning {
|
||||
if (!(await this.hasAnyVersionTags())) {
|
||||
const version = `0.0.${await this.getTotalNumberOfCommits()}`;
|
||||
core.info(`Generated version ${version} (no version tags found).`);
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
@@ -148,6 +153,7 @@ export default class Versioning {
|
||||
|
||||
const version = `0.0.${await this.getTotalNumberOfCommits()}`;
|
||||
core.info(`Generated version ${version} (semantic version couldn't be determined).`);
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
@@ -203,6 +209,7 @@ export default class Versioning {
|
||||
core.warning(
|
||||
`Failed to parse git describe output or version can not be determined through: "${description}".`,
|
||||
);
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -269,18 +276,20 @@ export default class Versioning {
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether or not the repository has any version tags yet.
|
||||
* Whether the current tree has any version tags yet.
|
||||
*
|
||||
* Note: Currently this is run in all OSes, so the syntax must be cross-platform.
|
||||
*/
|
||||
static async hasAnyVersionTags() {
|
||||
const numberOfCommitsAsString = await System.run('sh', undefined, {
|
||||
input: Buffer.from('git tag --list --merged HEAD | grep v[0-9]* | wc -l'),
|
||||
const numberOfTagsAsString = await System.run('sh', undefined, {
|
||||
input: Buffer.from(`git tag --list --merged HEAD | grep -E '${this.grepCompatibleInputVersionRegex}' | wc -l`),
|
||||
cwd: this.projectPath,
|
||||
silent: false,
|
||||
});
|
||||
|
||||
const numberOfCommits = Number.parseInt(numberOfCommitsAsString, 10);
|
||||
const numberOfTags = Number.parseInt(numberOfTagsAsString, 10);
|
||||
|
||||
return numberOfCommits !== 0;
|
||||
return numberOfTags !== 0;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"noImplicitAny": false /* Re-enable after fixing compatibility */ /* Raise error on expressions and declarations with an implied 'any' type. */,
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
},
|
||||
"exclude": ["node_modules", "**/*.test.ts"]
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
|
||||
38
yarn.lock
38
yarn.lock
@@ -2,12 +2,13 @@
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
"@actions/core@^1.6.0":
|
||||
version "1.6.0"
|
||||
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.6.0.tgz#0568e47039bfb6a9170393a73f3b7eb3b22462cb"
|
||||
integrity sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw==
|
||||
"@actions/core@^1.9.1":
|
||||
version "1.9.1"
|
||||
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.9.1.tgz#97c0201b1f9856df4f7c3a375cdcdb0c2a2f750b"
|
||||
integrity sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==
|
||||
dependencies:
|
||||
"@actions/http-client" "^1.0.11"
|
||||
"@actions/http-client" "^2.0.1"
|
||||
uuid "^8.3.2"
|
||||
|
||||
"@actions/exec@^1.1.0":
|
||||
version "1.1.0"
|
||||
@@ -33,6 +34,13 @@
|
||||
dependencies:
|
||||
tunnel "0.0.6"
|
||||
|
||||
"@actions/http-client@^2.0.1":
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@actions/http-client/-/http-client-2.0.1.tgz#873f4ca98fe32f6839462a6f046332677322f99c"
|
||||
integrity sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==
|
||||
dependencies:
|
||||
tunnel "^0.0.6"
|
||||
|
||||
"@actions/io@^1.0.1":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.npmjs.org/@actions/io/-/io-1.1.0.tgz"
|
||||
@@ -1157,7 +1165,7 @@
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/node@*", "@types/node@^17.0.21":
|
||||
"@types/node@*":
|
||||
version "17.0.21"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.21.tgz#864b987c0c68d07b4345845c3e63b75edd143644"
|
||||
integrity sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ==
|
||||
@@ -1167,6 +1175,11 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.60.tgz#35f3d6213daed95da7f0f73e75bcc6980e90597b"
|
||||
integrity sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==
|
||||
|
||||
"@types/node@^17.0.23":
|
||||
version "17.0.23"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.23.tgz#3b41a6e643589ac6442bdbd7a4a3ded62f33f7da"
|
||||
integrity sha512-UxDxWn7dl97rKVeVS61vErvw086aCYhDLyvRQZ5Rk65rZKepaFdm53GeqXaKBuOhED4e9uWq34IC3TdSdJJ2Gw==
|
||||
|
||||
"@types/normalize-package-data@^2.4.0":
|
||||
version "2.4.0"
|
||||
resolved "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz"
|
||||
@@ -3651,9 +3664,9 @@ jmespath@0.16.0:
|
||||
integrity sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==
|
||||
|
||||
jose@^1.27.1:
|
||||
version "1.28.1"
|
||||
resolved "https://registry.npmjs.org/jose/-/jose-1.28.1.tgz"
|
||||
integrity sha512-6JK28rFu5ENp/yxMwM+iN7YeaInnY9B9Bggjkz5fuwLiJhbVrl2O4SJr65bdNBPl9y27fdC3Mymh+FVCvozLIg==
|
||||
version "1.28.2"
|
||||
resolved "https://registry.yarnpkg.com/jose/-/jose-1.28.2.tgz#97f4aa608d0020ae5c1051a2a33247b957401e5a"
|
||||
integrity sha512-wWy51U2MXxYi3g8zk2lsQ8M6O1lartpkxuq1TYexzPKYLgHLZkCjklaATP36I5BUoWjF2sInB9U1Qf18fBZxNA==
|
||||
dependencies:
|
||||
"@panva/asn1.js" "^1.0.0"
|
||||
|
||||
@@ -5239,7 +5252,7 @@ tunnel-agent@^0.6.0:
|
||||
dependencies:
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
tunnel@0.0.6:
|
||||
tunnel@0.0.6, tunnel@^0.0.6:
|
||||
version "0.0.6"
|
||||
resolved "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz"
|
||||
integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==
|
||||
@@ -5366,6 +5379,11 @@ uuid@^3.3.2:
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
|
||||
integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
|
||||
|
||||
uuid@^8.3.2:
|
||||
version "8.3.2"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
|
||||
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
|
||||
|
||||
v8-compile-cache@^2.0.3:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz"
|
||||
|
||||
Reference in New Issue
Block a user