Compare commits
31 Commits
v2.3.1
...
users/eric
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
096b500552 | ||
|
|
59bb2dd7cd | ||
|
|
093dbebc2e | ||
|
|
d79ea53307 | ||
|
|
0fa906a067 | ||
|
|
0b63af4c8c | ||
|
|
31d9a4bd37 | ||
|
|
154a05918b | ||
|
|
7a2b445a4b | ||
|
|
eed20d30d5 | ||
|
|
4a1fa615de | ||
|
|
19fb09ae8f | ||
|
|
bb56c8569a | ||
|
|
ba329ee889 | ||
|
|
a039094e93 | ||
|
|
64fcc0c59a | ||
|
|
35bb830cfd | ||
|
|
54a7542872 | ||
|
|
8ade6aebfa | ||
|
|
bf32513e49 | ||
|
|
675d935214 | ||
|
|
e7d8850882 | ||
|
|
1475d13f7a | ||
|
|
34b9c46c61 | ||
|
|
255d69d4c5 | ||
|
|
c124b3fb75 | ||
|
|
afff79a5a2 | ||
|
|
306dc1c898 | ||
|
|
d415b27760 | ||
|
|
4af80cb867 | ||
|
|
ad6dd29a96 |
243
.github/workflows/test.yml
vendored
243
.github/workflows/test.yml
vendored
@@ -6,202 +6,79 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- releases/*
|
- releases/*
|
||||||
|
- users/ericsciple/*
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/setup-node@v1
|
- uses: actions/checkout@v2-beta
|
||||||
with:
|
# - run: npm ci
|
||||||
node-version: 12.x
|
# - run: npm run build
|
||||||
- uses: actions/checkout@v2
|
# - run: npm run format-check
|
||||||
- run: npm ci
|
# - run: npm run lint
|
||||||
- run: npm run build
|
# - run: npm run pack
|
||||||
- run: npm run format-check
|
# - run: npm run gendocs
|
||||||
- run: npm run lint
|
# - name: Verify no unstaged changes
|
||||||
- run: npm test
|
# run: __test__/verify-no-unstaged-changes.sh
|
||||||
- name: Verify no unstaged changes
|
|
||||||
run: __test__/verify-no-unstaged-changes.sh
|
|
||||||
|
|
||||||
test:
|
# test:
|
||||||
strategy:
|
# strategy:
|
||||||
matrix:
|
# matrix:
|
||||||
runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
# runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
runs-on: ${{ matrix.runs-on }}
|
# runs-on: ${{ matrix.runs-on }}
|
||||||
|
|
||||||
steps:
|
# steps:
|
||||||
# Clone this repo
|
# # Clone this repo
|
||||||
- name: Checkout
|
# - name: Checkout
|
||||||
uses: actions/checkout@v2
|
# uses: actions/checkout@v1 # todo: switch to V2
|
||||||
|
|
||||||
# Basic checkout
|
# Basic checkout
|
||||||
- name: Checkout basic
|
- name: Basic checkout
|
||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
ref: test-data/v2/basic
|
ref: test-data/v2/basic
|
||||||
path: basic
|
path: basic
|
||||||
- name: Verify basic
|
# - name: Verify basic
|
||||||
shell: bash
|
# shell: bash
|
||||||
run: __test__/verify-basic.sh
|
# run: __test__/verify-basic.sh
|
||||||
|
|
||||||
# Clean
|
# # Clean
|
||||||
- name: Modify work tree
|
# - name: Modify work tree
|
||||||
shell: bash
|
# shell: bash
|
||||||
run: __test__/modify-work-tree.sh
|
# run: __test__/modify-work-tree.sh
|
||||||
- name: Checkout clean
|
# - name: Clean checkout
|
||||||
uses: ./
|
# uses: ./
|
||||||
with:
|
# with:
|
||||||
ref: test-data/v2/basic
|
# ref: test-data/v2/basic
|
||||||
path: basic
|
# path: basic
|
||||||
- name: Verify clean
|
# - name: Verify clean
|
||||||
shell: bash
|
# shell: bash
|
||||||
run: __test__/verify-clean.sh
|
# run: __test__/verify-clean.sh
|
||||||
|
|
||||||
# Side by side
|
# # Side by side
|
||||||
- name: Checkout side by side 1
|
# - name: Side by side checkout 1
|
||||||
uses: ./
|
# uses: ./
|
||||||
with:
|
# with:
|
||||||
ref: test-data/v2/side-by-side-1
|
# ref: test-data/v2/side-by-side-1
|
||||||
path: side-by-side-1
|
# path: side-by-side-1
|
||||||
- name: Checkout side by side 2
|
# - name: Side by side checkout 2
|
||||||
uses: ./
|
# uses: ./
|
||||||
with:
|
# with:
|
||||||
ref: test-data/v2/side-by-side-2
|
# ref: test-data/v2/side-by-side-2
|
||||||
path: side-by-side-2
|
# path: side-by-side-2
|
||||||
- name: Verify side by side
|
# - name: Verify side by side
|
||||||
shell: bash
|
# shell: bash
|
||||||
run: __test__/verify-side-by-side.sh
|
# run: __test__/verify-side-by-side.sh
|
||||||
|
|
||||||
# LFS
|
# # LFS
|
||||||
- name: Checkout LFS
|
# - name: LFS checkout
|
||||||
uses: ./
|
# uses: ./
|
||||||
with:
|
# with:
|
||||||
repository: actions/checkout # hardcoded, otherwise doesn't work from a fork
|
# repository: actions/checkout # hardcoded, otherwise doesn't work from a fork
|
||||||
ref: test-data/v2/lfs
|
# ref: test-data/v2/lfs
|
||||||
path: lfs
|
# path: lfs
|
||||||
lfs: true
|
# lfs: true
|
||||||
- name: Verify LFS
|
# - name: Verify LFS
|
||||||
shell: bash
|
# shell: bash
|
||||||
run: __test__/verify-lfs.sh
|
# run: __test__/verify-lfs.sh
|
||||||
|
|
||||||
# Submodules false
|
|
||||||
- name: Checkout submodules false
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
ref: test-data/v2/submodule-ssh-url
|
|
||||||
path: submodules-false
|
|
||||||
- name: Verify submodules false
|
|
||||||
run: __test__/verify-submodules-false.sh
|
|
||||||
|
|
||||||
# Submodules one level
|
|
||||||
- name: Checkout submodules true
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
ref: test-data/v2/submodule-ssh-url
|
|
||||||
path: submodules-true
|
|
||||||
submodules: true
|
|
||||||
- name: Verify submodules true
|
|
||||||
run: __test__/verify-submodules-true.sh
|
|
||||||
|
|
||||||
# Submodules recursive
|
|
||||||
- name: Checkout submodules recursive
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
ref: test-data/v2/submodule-ssh-url
|
|
||||||
path: submodules-recursive
|
|
||||||
submodules: recursive
|
|
||||||
- name: Verify submodules recursive
|
|
||||||
run: __test__/verify-submodules-recursive.sh
|
|
||||||
|
|
||||||
# Basic checkout using REST API
|
|
||||||
- name: Remove basic
|
|
||||||
if: runner.os != 'windows'
|
|
||||||
run: rm -rf basic
|
|
||||||
- name: Remove basic (Windows)
|
|
||||||
if: runner.os == 'windows'
|
|
||||||
shell: cmd
|
|
||||||
run: rmdir /s /q basic
|
|
||||||
- name: Override git version
|
|
||||||
if: runner.os != 'windows'
|
|
||||||
run: __test__/override-git-version.sh
|
|
||||||
- name: Override git version (Windows)
|
|
||||||
if: runner.os == 'windows'
|
|
||||||
run: __test__\\override-git-version.cmd
|
|
||||||
- name: Checkout basic using REST API
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
ref: test-data/v2/basic
|
|
||||||
path: basic
|
|
||||||
- name: Verify basic
|
|
||||||
run: __test__/verify-basic.sh --archive
|
|
||||||
|
|
||||||
test-proxy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: alpine/git:latest
|
|
||||||
options: --dns 127.0.0.1
|
|
||||||
services:
|
|
||||||
squid-proxy:
|
|
||||||
image: datadog/squid:latest
|
|
||||||
ports:
|
|
||||||
- 3128:3128
|
|
||||||
env:
|
|
||||||
https_proxy: http://squid-proxy:3128
|
|
||||||
steps:
|
|
||||||
# Clone this repo
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
# Basic checkout using git
|
|
||||||
- name: Checkout basic
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
ref: test-data/v2/basic
|
|
||||||
path: basic
|
|
||||||
- name: Verify basic
|
|
||||||
run: __test__/verify-basic.sh
|
|
||||||
|
|
||||||
# Basic checkout using REST API
|
|
||||||
- name: Remove basic
|
|
||||||
run: rm -rf basic
|
|
||||||
- name: Override git version
|
|
||||||
run: __test__/override-git-version.sh
|
|
||||||
- name: Basic checkout using REST API
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
ref: test-data/v2/basic
|
|
||||||
path: basic
|
|
||||||
- name: Verify basic
|
|
||||||
run: __test__/verify-basic.sh --archive
|
|
||||||
|
|
||||||
test-bypass-proxy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
https_proxy: http://no-such-proxy:3128
|
|
||||||
no_proxy: api.github.com,github.com
|
|
||||||
steps:
|
|
||||||
# Clone this repo
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
# Basic checkout using git
|
|
||||||
- name: Checkout basic
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
ref: test-data/v2/basic
|
|
||||||
path: basic
|
|
||||||
- name: Verify basic
|
|
||||||
run: __test__/verify-basic.sh
|
|
||||||
- name: Remove basic
|
|
||||||
run: rm -rf basic
|
|
||||||
|
|
||||||
# Basic checkout using REST API
|
|
||||||
- name: Override git version
|
|
||||||
run: __test__/override-git-version.sh
|
|
||||||
- name: Checkout basic using REST API
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
ref: test-data/v2/basic
|
|
||||||
path: basic
|
|
||||||
- name: Verify basic
|
|
||||||
run: __test__/verify-basic.sh --archive
|
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,2 @@
|
|||||||
__test__/_temp
|
|
||||||
lib/
|
lib/
|
||||||
node_modules/
|
node_modules/
|
||||||
35
CHANGELOG.md
35
CHANGELOG.md
@@ -1,40 +1,5 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## v2.3.1
|
|
||||||
|
|
||||||
- [Fix default branch resolution for .wiki and when using SSH](https://github.com/actions/checkout/pull/284)
|
|
||||||
|
|
||||||
|
|
||||||
## v2.3.0
|
|
||||||
|
|
||||||
- [Fallback to the default branch](https://github.com/actions/checkout/pull/278)
|
|
||||||
|
|
||||||
## v2.2.0
|
|
||||||
|
|
||||||
- [Fetch all history for all tags and branches when fetch-depth=0](https://github.com/actions/checkout/pull/258)
|
|
||||||
|
|
||||||
## v2.1.1
|
|
||||||
|
|
||||||
- Changes to support GHES ([here](https://github.com/actions/checkout/pull/236) and [here](https://github.com/actions/checkout/pull/248))
|
|
||||||
|
|
||||||
## v2.1.0
|
|
||||||
|
|
||||||
- [Group output](https://github.com/actions/checkout/pull/191)
|
|
||||||
- [Changes to support GHES alpha release](https://github.com/actions/checkout/pull/199)
|
|
||||||
- [Persist core.sshCommand for submodules](https://github.com/actions/checkout/pull/184)
|
|
||||||
- [Add support ssh](https://github.com/actions/checkout/pull/163)
|
|
||||||
- [Convert submodule SSH URL to HTTPS, when not using SSH](https://github.com/actions/checkout/pull/179)
|
|
||||||
- [Add submodule support](https://github.com/actions/checkout/pull/157)
|
|
||||||
- [Follow proxy settings](https://github.com/actions/checkout/pull/144)
|
|
||||||
- [Fix ref for pr closed event when a pr is merged](https://github.com/actions/checkout/pull/141)
|
|
||||||
- [Fix issue checking detached when git less than 2.22](https://github.com/actions/checkout/pull/128)
|
|
||||||
|
|
||||||
## v2.0.0
|
|
||||||
|
|
||||||
- [Do not pass cred on command line](https://github.com/actions/checkout/pull/108)
|
|
||||||
- [Add input persist-credentials](https://github.com/actions/checkout/pull/107)
|
|
||||||
- [Fallback to REST API to download repo](https://github.com/actions/checkout/pull/104)
|
|
||||||
|
|
||||||
## v2 (beta)
|
## v2 (beta)
|
||||||
|
|
||||||
- Improved fetch performance
|
- Improved fetch performance
|
||||||
|
|||||||
173
README.md
173
README.md
@@ -2,31 +2,30 @@
|
|||||||
<a href="https://github.com/actions/checkout"><img alt="GitHub Actions status" src="https://github.com/actions/checkout/workflows/test-local/badge.svg"></a>
|
<a href="https://github.com/actions/checkout"><img alt="GitHub Actions status" src="https://github.com/actions/checkout/workflows/test-local/badge.svg"></a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
# Checkout V2
|
# Checkout V2 beta
|
||||||
|
|
||||||
This action checks-out your repository under `$GITHUB_WORKSPACE`, so your workflow can access it.
|
This action checks-out your repository under `$GITHUB_WORKSPACE`, so your workflow can access it.
|
||||||
|
|
||||||
Only a single commit is fetched by default, for the ref/SHA that triggered the workflow. Set `fetch-depth: 0` to fetch all history for all branches and tags. Refer [here](https://help.github.com/en/articles/events-that-trigger-workflows) to learn which commit `$GITHUB_SHA` points to for different events.
|
By default, the repository that triggered the workflow is checked-out, for the ref/SHA that triggered the event.
|
||||||
|
|
||||||
The auth token is persisted in the local git config. This enables your scripts to run authenticated git commands. The token is removed during post-job cleanup. Set `persist-credentials: false` to opt-out.
|
Refer [here](https://help.github.com/en/articles/events-that-trigger-workflows) to learn which commit `$GITHUB_SHA` points to for different events.
|
||||||
|
|
||||||
When Git 2.18 or higher is not in your PATH, falls back to the REST API to download the files.
|
|
||||||
|
|
||||||
# What's new
|
# What's new
|
||||||
|
|
||||||
- Improved performance
|
- Improved fetch performance
|
||||||
- Fetches only a single commit by default
|
- The default behavior now fetches only the SHA being checked-out
|
||||||
- Script authenticated git commands
|
- Script authenticated git commands
|
||||||
- Auth token persisted in the local git config
|
- Persists `with.token` in the local git config
|
||||||
- Supports SSH
|
- Enables your scripts to run authenticated git commands
|
||||||
|
- Post-job cleanup removes the token
|
||||||
|
- Coming soon: Opt out by setting `with.persist-credentials` to `false`
|
||||||
- Creates a local branch
|
- Creates a local branch
|
||||||
- No longer detached HEAD when checking out a branch
|
- No longer detached HEAD when checking out a branch
|
||||||
|
- A local branch is created with the corresponding upstream branch set
|
||||||
- Improved layout
|
- Improved layout
|
||||||
- The input `path` is always relative to $GITHUB_WORKSPACE
|
- `with.path` is always relative to `github.workspace`
|
||||||
- Aligns better with container actions, where $GITHUB_WORKSPACE gets mapped in
|
- Aligns better with container actions, where `github.workspace` gets mapped in
|
||||||
- Fallback to REST API download
|
- Removed input `submodules`
|
||||||
- When Git 2.18 or higher is not in the PATH, the REST API will be used to download the files
|
|
||||||
- When using a job container, the container's PATH is used
|
|
||||||
|
|
||||||
Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous versions.
|
Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous versions.
|
||||||
|
|
||||||
@@ -34,7 +33,7 @@ Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous
|
|||||||
|
|
||||||
<!-- start usage -->
|
<!-- start usage -->
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2-beta
|
||||||
with:
|
with:
|
||||||
# Repository name with owner. For example, actions/checkout
|
# Repository name with owner. For example, actions/checkout
|
||||||
# Default: ${{ github.repository }}
|
# Default: ${{ github.repository }}
|
||||||
@@ -42,46 +41,13 @@ Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous
|
|||||||
|
|
||||||
# The branch, tag or SHA to checkout. When checking out the repository that
|
# The branch, tag or SHA to checkout. When checking out the repository that
|
||||||
# triggered a workflow, this defaults to the reference or SHA for that event.
|
# triggered a workflow, this defaults to the reference or SHA for that event.
|
||||||
# Otherwise, uses the default branch.
|
# Otherwise, defaults to `master`.
|
||||||
ref: ''
|
ref: ''
|
||||||
|
|
||||||
# Personal access token (PAT) used to fetch the repository. The PAT is configured
|
# Access token for clone repository
|
||||||
# with the local git config, which enables your scripts to run authenticated git
|
|
||||||
# commands. The post-job step removes the PAT.
|
|
||||||
#
|
|
||||||
# We recommend using a service account with the least permissions necessary. Also
|
|
||||||
# when generating a new PAT, select the least scopes necessary.
|
|
||||||
#
|
|
||||||
# [Learn more about creating and using encrypted secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets)
|
|
||||||
#
|
|
||||||
# Default: ${{ github.token }}
|
# Default: ${{ github.token }}
|
||||||
token: ''
|
token: ''
|
||||||
|
|
||||||
# SSH key used to fetch the repository. The SSH key is configured with the local
|
|
||||||
# git config, which enables your scripts to run authenticated git commands. The
|
|
||||||
# post-job step removes the SSH key.
|
|
||||||
#
|
|
||||||
# We recommend using a service account with the least permissions necessary.
|
|
||||||
#
|
|
||||||
# [Learn more about creating and using encrypted secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets)
|
|
||||||
ssh-key: ''
|
|
||||||
|
|
||||||
# Known hosts in addition to the user and global host key database. The public SSH
|
|
||||||
# keys for a host may be obtained using the utility `ssh-keyscan`. For example,
|
|
||||||
# `ssh-keyscan github.com`. The public key for github.com is always implicitly
|
|
||||||
# added.
|
|
||||||
ssh-known-hosts: ''
|
|
||||||
|
|
||||||
# Whether to perform strict host key checking. When true, adds the options
|
|
||||||
# `StrictHostKeyChecking=yes` and `CheckHostIP=no` to the SSH command line. Use
|
|
||||||
# the input `ssh-known-hosts` to configure additional hosts.
|
|
||||||
# Default: true
|
|
||||||
ssh-strict: ''
|
|
||||||
|
|
||||||
# Whether to configure the token or SSH key with the local git config
|
|
||||||
# Default: true
|
|
||||||
persist-credentials: ''
|
|
||||||
|
|
||||||
# Relative path under $GITHUB_WORKSPACE to place the repository
|
# Relative path under $GITHUB_WORKSPACE to place the repository
|
||||||
path: ''
|
path: ''
|
||||||
|
|
||||||
@@ -96,121 +62,34 @@ Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous
|
|||||||
# Whether to download Git-LFS files
|
# Whether to download Git-LFS files
|
||||||
# Default: false
|
# Default: false
|
||||||
lfs: ''
|
lfs: ''
|
||||||
|
|
||||||
# Whether to checkout submodules: `true` to checkout submodules or `recursive` to
|
|
||||||
# recursively checkout submodules.
|
|
||||||
#
|
|
||||||
# When the `ssh-key` input is not provided, SSH URLs beginning with
|
|
||||||
# `git@github.com:` are converted to HTTPS.
|
|
||||||
#
|
|
||||||
# Default: false
|
|
||||||
submodules: ''
|
|
||||||
```
|
```
|
||||||
<!-- end usage -->
|
<!-- end usage -->
|
||||||
|
|
||||||
# Scenarios
|
|
||||||
|
|
||||||
- [Fetch all history for all tags and branches](#Fetch-all-history-for-all-tags-and-branches)
|
|
||||||
- [Checkout a different branch](#Checkout-a-different-branch)
|
|
||||||
- [Checkout HEAD^](#Checkout-HEAD)
|
|
||||||
- [Checkout multiple repos (side by side)](#Checkout-multiple-repos-side-by-side)
|
|
||||||
- [Checkout multiple repos (nested)](#Checkout-multiple-repos-nested)
|
|
||||||
- [Checkout multiple repos (private)](#Checkout-multiple-repos-private)
|
|
||||||
- [Checkout pull request HEAD commit instead of merge commit](#Checkout-pull-request-HEAD-commit-instead-of-merge-commit)
|
|
||||||
- [Checkout pull request on closed event](#Checkout-pull-request-on-closed-event)
|
|
||||||
|
|
||||||
## Fetch all history for all tags and branches
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
```
|
|
||||||
|
|
||||||
## Checkout a different branch
|
## Checkout a different branch
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2-beta
|
||||||
with:
|
with:
|
||||||
ref: my-branch
|
ref: some-branch
|
||||||
```
|
```
|
||||||
|
|
||||||
## Checkout HEAD^
|
## Checkout a different, private repository
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2-beta
|
||||||
with:
|
with:
|
||||||
fetch-depth: 2
|
repository: myAccount/myRepository
|
||||||
- run: git checkout HEAD^
|
ref: refs/heads/master
|
||||||
```
|
|
||||||
|
|
||||||
## Checkout multiple repos (side by side)
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
path: main
|
|
||||||
|
|
||||||
- name: Checkout tools repo
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: my-org/my-tools
|
|
||||||
path: my-tools
|
|
||||||
```
|
|
||||||
|
|
||||||
## Checkout multiple repos (nested)
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Checkout tools repo
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: my-org/my-tools
|
|
||||||
path: my-tools
|
|
||||||
```
|
|
||||||
|
|
||||||
## Checkout multiple repos (private)
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
path: main
|
|
||||||
|
|
||||||
- name: Checkout private tools
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: my-org/my-private-tools
|
|
||||||
token: ${{ secrets.GitHub_PAT }} # `GitHub_PAT` is a secret that contains your PAT
|
token: ${{ secrets.GitHub_PAT }} # `GitHub_PAT` is a secret that contains your PAT
|
||||||
path: my-tools
|
|
||||||
```
|
```
|
||||||
|
> - `${{ github.token }}` is scoped to the current repository, so if you want to checkout another repository that is private you will need to provide your own [PAT](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line).
|
||||||
|
|
||||||
> - `${{ github.token }}` is scoped to the current repository, so if you want to checkout a different repository that is private you will need to provide your own [PAT](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line).
|
## Checkout the HEAD commit of a PR, rather than the merge commit
|
||||||
|
|
||||||
|
|
||||||
## Checkout pull request HEAD commit instead of merge commit
|
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2-beta
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.after }}
|
||||||
```
|
|
||||||
|
|
||||||
## Checkout pull request on closed event
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
branches: [master]
|
|
||||||
types: [opened, synchronize, closed]
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
```
|
```
|
||||||
|
|
||||||
# License
|
# License
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,50 +1,47 @@
|
|||||||
import * as assert from 'assert'
|
import * as assert from 'assert'
|
||||||
import * as core from '@actions/core'
|
|
||||||
import * as fsHelper from '../lib/fs-helper'
|
|
||||||
import * as github from '@actions/github'
|
|
||||||
import * as inputHelper from '../lib/input-helper'
|
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import {IGitSourceSettings} from '../lib/git-source-settings'
|
import {ISourceSettings} from '../lib/git-source-provider'
|
||||||
|
|
||||||
const originalGitHubWorkspace = process.env['GITHUB_WORKSPACE']
|
const originalGitHubWorkspace = process.env['GITHUB_WORKSPACE']
|
||||||
const gitHubWorkspace = path.resolve('/checkout-tests/workspace')
|
const gitHubWorkspace = path.resolve('/checkout-tests/workspace')
|
||||||
|
|
||||||
// Inputs for mock @actions/core
|
// Late bind
|
||||||
let inputs = {} as any
|
let inputHelper: any
|
||||||
|
|
||||||
// Shallow clone original @actions/github context
|
// Mock @actions/core
|
||||||
let originalContext = {...github.context}
|
let inputs = {} as any
|
||||||
|
const mockCore = jest.genMockFromModule('@actions/core') as any
|
||||||
|
mockCore.getInput = (name: string) => {
|
||||||
|
return inputs[name]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mock @actions/github
|
||||||
|
const mockGitHub = jest.genMockFromModule('@actions/github') as any
|
||||||
|
mockGitHub.context = {
|
||||||
|
repo: {
|
||||||
|
owner: 'some-owner',
|
||||||
|
repo: 'some-repo'
|
||||||
|
},
|
||||||
|
ref: 'refs/heads/some-ref',
|
||||||
|
sha: '1234567890123456789012345678901234567890'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mock ./fs-helper
|
||||||
|
const mockFSHelper = jest.genMockFromModule('../lib/fs-helper') as any
|
||||||
|
mockFSHelper.directoryExistsSync = (path: string) => path == gitHubWorkspace
|
||||||
|
|
||||||
describe('input-helper tests', () => {
|
describe('input-helper tests', () => {
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
// Mock getInput
|
|
||||||
jest.spyOn(core, 'getInput').mockImplementation((name: string) => {
|
|
||||||
return inputs[name]
|
|
||||||
})
|
|
||||||
|
|
||||||
// Mock error/warning/info/debug
|
|
||||||
jest.spyOn(core, 'error').mockImplementation(jest.fn())
|
|
||||||
jest.spyOn(core, 'warning').mockImplementation(jest.fn())
|
|
||||||
jest.spyOn(core, 'info').mockImplementation(jest.fn())
|
|
||||||
jest.spyOn(core, 'debug').mockImplementation(jest.fn())
|
|
||||||
|
|
||||||
// Mock github context
|
|
||||||
jest.spyOn(github.context, 'repo', 'get').mockImplementation(() => {
|
|
||||||
return {
|
|
||||||
owner: 'some-owner',
|
|
||||||
repo: 'some-repo'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
github.context.ref = 'refs/heads/some-ref'
|
|
||||||
github.context.sha = '1234567890123456789012345678901234567890'
|
|
||||||
|
|
||||||
// Mock ./fs-helper directoryExistsSync()
|
|
||||||
jest
|
|
||||||
.spyOn(fsHelper, 'directoryExistsSync')
|
|
||||||
.mockImplementation((path: string) => path == gitHubWorkspace)
|
|
||||||
|
|
||||||
// GitHub workspace
|
// GitHub workspace
|
||||||
process.env['GITHUB_WORKSPACE'] = gitHubWorkspace
|
process.env['GITHUB_WORKSPACE'] = gitHubWorkspace
|
||||||
|
|
||||||
|
// Mocks
|
||||||
|
jest.setMock('@actions/core', mockCore)
|
||||||
|
jest.setMock('@actions/github', mockGitHub)
|
||||||
|
jest.setMock('../lib/fs-helper', mockFSHelper)
|
||||||
|
|
||||||
|
// Now import
|
||||||
|
inputHelper = require('../lib/input-helper')
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
@@ -53,24 +50,20 @@ describe('input-helper tests', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
afterAll(() => {
|
afterAll(() => {
|
||||||
// Restore GitHub workspace
|
// Reset GitHub workspace
|
||||||
delete process.env['GITHUB_WORKSPACE']
|
delete process.env['GITHUB_WORKSPACE']
|
||||||
if (originalGitHubWorkspace) {
|
if (originalGitHubWorkspace) {
|
||||||
process.env['GITHUB_WORKSPACE'] = originalGitHubWorkspace
|
process.env['GITHUB_WORKSPACE'] = originalGitHubWorkspace
|
||||||
}
|
}
|
||||||
|
|
||||||
// Restore @actions/github context
|
// Reset modules
|
||||||
github.context.ref = originalContext.ref
|
jest.resetModules()
|
||||||
github.context.sha = originalContext.sha
|
|
||||||
|
|
||||||
// Restore
|
|
||||||
jest.restoreAllMocks()
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('sets defaults', () => {
|
it('sets defaults', () => {
|
||||||
const settings: IGitSourceSettings = inputHelper.getInputs()
|
const settings: ISourceSettings = inputHelper.getInputs()
|
||||||
expect(settings).toBeTruthy()
|
expect(settings).toBeTruthy()
|
||||||
expect(settings.authToken).toBeFalsy()
|
expect(settings.accessToken).toBeFalsy()
|
||||||
expect(settings.clean).toBe(true)
|
expect(settings.clean).toBe(true)
|
||||||
expect(settings.commit).toBeTruthy()
|
expect(settings.commit).toBeTruthy()
|
||||||
expect(settings.commit).toBe('1234567890123456789012345678901234567890')
|
expect(settings.commit).toBe('1234567890123456789012345678901234567890')
|
||||||
@@ -82,19 +75,6 @@ describe('input-helper tests', () => {
|
|||||||
expect(settings.repositoryPath).toBe(gitHubWorkspace)
|
expect(settings.repositoryPath).toBe(gitHubWorkspace)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('qualifies ref', () => {
|
|
||||||
let originalRef = github.context.ref
|
|
||||||
try {
|
|
||||||
github.context.ref = 'some-unqualified-ref'
|
|
||||||
const settings: IGitSourceSettings = inputHelper.getInputs()
|
|
||||||
expect(settings).toBeTruthy()
|
|
||||||
expect(settings.commit).toBe('1234567890123456789012345678901234567890')
|
|
||||||
expect(settings.ref).toBe('refs/heads/some-unqualified-ref')
|
|
||||||
} finally {
|
|
||||||
github.context.ref = originalRef
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('requires qualified repo', () => {
|
it('requires qualified repo', () => {
|
||||||
inputs.repository = 'some-unqualified-repo'
|
inputs.repository = 'some-unqualified-repo'
|
||||||
assert.throws(() => {
|
assert.throws(() => {
|
||||||
@@ -104,23 +84,37 @@ describe('input-helper tests', () => {
|
|||||||
|
|
||||||
it('roots path', () => {
|
it('roots path', () => {
|
||||||
inputs.path = 'some-directory/some-subdirectory'
|
inputs.path = 'some-directory/some-subdirectory'
|
||||||
const settings: IGitSourceSettings = inputHelper.getInputs()
|
const settings: ISourceSettings = inputHelper.getInputs()
|
||||||
expect(settings.repositoryPath).toBe(
|
expect(settings.repositoryPath).toBe(
|
||||||
path.join(gitHubWorkspace, 'some-directory', 'some-subdirectory')
|
path.join(gitHubWorkspace, 'some-directory', 'some-subdirectory')
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('sets correct default ref/sha for other repo', () => {
|
||||||
|
inputs.repository = 'some-owner/some-other-repo'
|
||||||
|
const settings: ISourceSettings = inputHelper.getInputs()
|
||||||
|
expect(settings.ref).toBe('refs/heads/master')
|
||||||
|
expect(settings.commit).toBeFalsy()
|
||||||
|
})
|
||||||
|
|
||||||
it('sets ref to empty when explicit sha', () => {
|
it('sets ref to empty when explicit sha', () => {
|
||||||
inputs.ref = '1111111111222222222233333333334444444444'
|
inputs.ref = '1111111111222222222233333333334444444444'
|
||||||
const settings: IGitSourceSettings = inputHelper.getInputs()
|
const settings: ISourceSettings = inputHelper.getInputs()
|
||||||
expect(settings.ref).toBeFalsy()
|
expect(settings.ref).toBeFalsy()
|
||||||
expect(settings.commit).toBe('1111111111222222222233333333334444444444')
|
expect(settings.commit).toBe('1111111111222222222233333333334444444444')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('sets sha to empty when explicit ref', () => {
|
it('sets sha to empty when explicit ref', () => {
|
||||||
inputs.ref = 'refs/heads/some-other-ref'
|
inputs.ref = 'refs/heads/some-other-ref'
|
||||||
const settings: IGitSourceSettings = inputHelper.getInputs()
|
const settings: ISourceSettings = inputHelper.getInputs()
|
||||||
expect(settings.ref).toBe('refs/heads/some-other-ref')
|
expect(settings.ref).toBe('refs/heads/some-other-ref')
|
||||||
expect(settings.commit).toBeFalsy()
|
expect(settings.commit).toBeFalsy()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('gives good error message for submodules input', () => {
|
||||||
|
inputs.submodules = 'true'
|
||||||
|
assert.throws(() => {
|
||||||
|
inputHelper.getInputs()
|
||||||
|
}, /The input 'submodules' is not supported/)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
|
|
||||||
mkdir override-git-version
|
|
||||||
cd override-git-version
|
|
||||||
echo @echo override git version 1.2.3 > git.cmd
|
|
||||||
echo ::add-path::%CD%
|
|
||||||
cd ..
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
mkdir override-git-version
|
|
||||||
cd override-git-version
|
|
||||||
echo "#!/bin/sh" > git
|
|
||||||
echo "echo override git version 1.2.3" >> git
|
|
||||||
chmod +x git
|
|
||||||
echo "::add-path::$(pwd)"
|
|
||||||
cd ..
|
|
||||||
@@ -1,87 +0,0 @@
|
|||||||
import * as core from '@actions/core'
|
|
||||||
import {RetryHelper} from '../lib/retry-helper'
|
|
||||||
|
|
||||||
let info: string[]
|
|
||||||
let retryHelper: any
|
|
||||||
|
|
||||||
describe('retry-helper tests', () => {
|
|
||||||
beforeAll(() => {
|
|
||||||
// Mock @actions/core info()
|
|
||||||
jest.spyOn(core, 'info').mockImplementation((message: string) => {
|
|
||||||
info.push(message)
|
|
||||||
})
|
|
||||||
|
|
||||||
retryHelper = new RetryHelper(3, 0, 0)
|
|
||||||
})
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
// Reset info
|
|
||||||
info = []
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(() => {
|
|
||||||
// Restore
|
|
||||||
jest.restoreAllMocks()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('first attempt succeeds', async () => {
|
|
||||||
const actual = await retryHelper.execute(async () => {
|
|
||||||
return 'some result'
|
|
||||||
})
|
|
||||||
expect(actual).toBe('some result')
|
|
||||||
expect(info).toHaveLength(0)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('second attempt succeeds', async () => {
|
|
||||||
let attempts = 0
|
|
||||||
const actual = await retryHelper.execute(() => {
|
|
||||||
if (++attempts == 1) {
|
|
||||||
throw new Error('some error')
|
|
||||||
}
|
|
||||||
|
|
||||||
return Promise.resolve('some result')
|
|
||||||
})
|
|
||||||
expect(attempts).toBe(2)
|
|
||||||
expect(actual).toBe('some result')
|
|
||||||
expect(info).toHaveLength(2)
|
|
||||||
expect(info[0]).toBe('some error')
|
|
||||||
expect(info[1]).toMatch(/Waiting .+ seconds before trying again/)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('third attempt succeeds', async () => {
|
|
||||||
let attempts = 0
|
|
||||||
const actual = await retryHelper.execute(() => {
|
|
||||||
if (++attempts < 3) {
|
|
||||||
throw new Error(`some error ${attempts}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
return Promise.resolve('some result')
|
|
||||||
})
|
|
||||||
expect(attempts).toBe(3)
|
|
||||||
expect(actual).toBe('some result')
|
|
||||||
expect(info).toHaveLength(4)
|
|
||||||
expect(info[0]).toBe('some error 1')
|
|
||||||
expect(info[1]).toMatch(/Waiting .+ seconds before trying again/)
|
|
||||||
expect(info[2]).toBe('some error 2')
|
|
||||||
expect(info[3]).toMatch(/Waiting .+ seconds before trying again/)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('all attempts fail succeeds', async () => {
|
|
||||||
let attempts = 0
|
|
||||||
let error: Error = (null as unknown) as Error
|
|
||||||
try {
|
|
||||||
await retryHelper.execute(() => {
|
|
||||||
throw new Error(`some error ${++attempts}`)
|
|
||||||
})
|
|
||||||
} catch (err) {
|
|
||||||
error = err
|
|
||||||
}
|
|
||||||
expect(error.message).toBe('some error 3')
|
|
||||||
expect(attempts).toBe(3)
|
|
||||||
expect(info).toHaveLength(4)
|
|
||||||
expect(info[0]).toBe('some error 1')
|
|
||||||
expect(info[1]).toMatch(/Waiting .+ seconds before trying again/)
|
|
||||||
expect(info[2]).toBe('some error 2')
|
|
||||||
expect(info[3]).toMatch(/Waiting .+ seconds before trying again/)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@@ -1,24 +1,10 @@
|
|||||||
#!/bin/sh
|
#!/bin/bash
|
||||||
|
|
||||||
if [ ! -f "./basic/basic-file.txt" ]; then
|
if [ ! -f "./basic/basic-file.txt" ]; then
|
||||||
echo "Expected basic file does not exist"
|
echo "Expected basic file does not exist"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$1" = "--archive" ]; then
|
|
||||||
# Verify no .git folder
|
|
||||||
if [ -d "./basic/.git" ]; then
|
|
||||||
echo "Did not expect ./basic/.git folder to exist"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
# Verify .git folder
|
|
||||||
if [ ! -d "./basic/.git" ]; then
|
|
||||||
echo "Expected ./basic/.git folder to exist"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Verify auth token
|
# Verify auth token
|
||||||
cd basic
|
cd basic
|
||||||
git fetch --no-tags --depth=1 origin +refs/heads/master:refs/remotes/origin/master
|
git fetch
|
||||||
fi
|
|
||||||
@@ -12,6 +12,6 @@ if [[ "$(git status --porcelain)" != "" ]]; then
|
|||||||
echo ----------------------------------------
|
echo ----------------------------------------
|
||||||
echo Troubleshooting
|
echo Troubleshooting
|
||||||
echo ----------------------------------------
|
echo ----------------------------------------
|
||||||
echo "::error::Unstaged changes detected. Locally try running: git clean -ffdx && npm ci && npm run format && npm run build"
|
echo "::error::Unstaged changes detected. Locally try running: git clean -ffdx && npm ci && npm run all"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,11 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
if [ ! -f "./submodules-false/regular-file.txt" ]; then
|
|
||||||
echo "Expected regular file does not exist"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f "./submodules-false/submodule-level-1/submodule-file.txt" ]; then
|
|
||||||
echo "Unexpected submodule file exists"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
11
__test__/verify-submodules-not-checked-out.sh
Executable file
11
__test__/verify-submodules-not-checked-out.sh
Executable file
@@ -0,0 +1,11 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if [ ! -f "./submodules-not-checked-out/regular-file.txt" ]; then
|
||||||
|
echo "Expected regular file does not exist"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f "./submodules-not-checked-out/submodule-level-1/submodule-file.txt" ]; then
|
||||||
|
echo "Unexpected submodule file exists"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
if [ ! -f "./submodules-recursive/regular-file.txt" ]; then
|
|
||||||
echo "Expected regular file does not exist"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -f "./submodules-recursive/submodule-level-1/submodule-file.txt" ]; then
|
|
||||||
echo "Expected submodule file does not exist"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -f "./submodules-recursive/submodule-level-1/submodule-level-2/nested-submodule-file.txt" ]; then
|
|
||||||
echo "Expected nested submodule file does not exists"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Testing persisted credential"
|
|
||||||
pushd ./submodules-recursive/submodule-level-1/submodule-level-2
|
|
||||||
git config --local --name-only --get-regexp http.+extraheader && git fetch
|
|
||||||
if [ "$?" != "0" ]; then
|
|
||||||
echo "Failed to validate persisted credential"
|
|
||||||
popd
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
popd
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
if [ ! -f "./submodules-true/regular-file.txt" ]; then
|
|
||||||
echo "Expected regular file does not exist"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -f "./submodules-true/submodule-level-1/submodule-file.txt" ]; then
|
|
||||||
echo "Expected submodule file does not exist"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f "./submodules-true/submodule-level-1/submodule-level-2/nested-submodule-file.txt" ]; then
|
|
||||||
echo "Unexpected nested submodule file exists"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Testing persisted credential"
|
|
||||||
pushd ./submodules-true/submodule-level-1
|
|
||||||
git config --local --name-only --get-regexp http.+extraheader && git fetch
|
|
||||||
if [ "$?" != "0" ]; then
|
|
||||||
echo "Failed to validate persisted credential"
|
|
||||||
popd
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
popd
|
|
||||||
53
action.yml
53
action.yml
@@ -6,48 +6,12 @@ inputs:
|
|||||||
default: ${{ github.repository }}
|
default: ${{ github.repository }}
|
||||||
ref:
|
ref:
|
||||||
description: >
|
description: >
|
||||||
The branch, tag or SHA to checkout. When checking out the repository that
|
The branch, tag or SHA to checkout. When checking out the repository
|
||||||
triggered a workflow, this defaults to the reference or SHA for that
|
that triggered a workflow, this defaults to the reference or SHA for
|
||||||
event. Otherwise, uses the default branch.
|
that event. Otherwise, defaults to `master`.
|
||||||
token:
|
token:
|
||||||
description: >
|
description: 'Access token for clone repository'
|
||||||
Personal access token (PAT) used to fetch the repository. The PAT is configured
|
|
||||||
with the local git config, which enables your scripts to run authenticated git
|
|
||||||
commands. The post-job step removes the PAT.
|
|
||||||
|
|
||||||
|
|
||||||
We recommend using a service account with the least permissions necessary.
|
|
||||||
Also when generating a new PAT, select the least scopes necessary.
|
|
||||||
|
|
||||||
|
|
||||||
[Learn more about creating and using encrypted secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets)
|
|
||||||
default: ${{ github.token }}
|
default: ${{ github.token }}
|
||||||
ssh-key:
|
|
||||||
description: >
|
|
||||||
SSH key used to fetch the repository. The SSH key is configured with the local
|
|
||||||
git config, which enables your scripts to run authenticated git commands.
|
|
||||||
The post-job step removes the SSH key.
|
|
||||||
|
|
||||||
|
|
||||||
We recommend using a service account with the least permissions necessary.
|
|
||||||
|
|
||||||
|
|
||||||
[Learn more about creating and using
|
|
||||||
encrypted secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets)
|
|
||||||
ssh-known-hosts:
|
|
||||||
description: >
|
|
||||||
Known hosts in addition to the user and global host key database. The public
|
|
||||||
SSH keys for a host may be obtained using the utility `ssh-keyscan`. For example,
|
|
||||||
`ssh-keyscan github.com`. The public key for github.com is always implicitly added.
|
|
||||||
ssh-strict:
|
|
||||||
description: >
|
|
||||||
Whether to perform strict host key checking. When true, adds the options `StrictHostKeyChecking=yes`
|
|
||||||
and `CheckHostIP=no` to the SSH command line. Use the input `ssh-known-hosts` to
|
|
||||||
configure additional hosts.
|
|
||||||
default: true
|
|
||||||
persist-credentials:
|
|
||||||
description: 'Whether to configure the token or SSH key with the local git config'
|
|
||||||
default: true
|
|
||||||
path:
|
path:
|
||||||
description: 'Relative path under $GITHUB_WORKSPACE to place the repository'
|
description: 'Relative path under $GITHUB_WORKSPACE to place the repository'
|
||||||
clean:
|
clean:
|
||||||
@@ -59,15 +23,6 @@ inputs:
|
|||||||
lfs:
|
lfs:
|
||||||
description: 'Whether to download Git-LFS files'
|
description: 'Whether to download Git-LFS files'
|
||||||
default: false
|
default: false
|
||||||
submodules:
|
|
||||||
description: >
|
|
||||||
Whether to checkout submodules: `true` to checkout submodules or `recursive` to
|
|
||||||
recursively checkout submodules.
|
|
||||||
|
|
||||||
|
|
||||||
When the `ssh-key` input is not provided, SSH URLs beginning with `git@github.com:` are
|
|
||||||
converted to HTTPS.
|
|
||||||
default: false
|
|
||||||
runs:
|
runs:
|
||||||
using: node12
|
using: node12
|
||||||
main: dist/index.js
|
main: dist/index.js
|
||||||
|
|||||||
@@ -1,290 +0,0 @@
|
|||||||
# ADR 0153: Checkout v2
|
|
||||||
|
|
||||||
**Date**: 2019-10-21
|
|
||||||
|
|
||||||
**Status**: Accepted
|
|
||||||
|
|
||||||
## Context
|
|
||||||
|
|
||||||
This ADR details the behavior for `actions/checkout@v2`.
|
|
||||||
|
|
||||||
The new action will be written in typescript. We are moving away from runner-plugin actions.
|
|
||||||
|
|
||||||
We want to take this opportunity to make behavioral changes, from v1. This document is scoped to those differences.
|
|
||||||
|
|
||||||
## Decision
|
|
||||||
|
|
||||||
### Inputs
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
repository:
|
|
||||||
description: 'Repository name with owner. For example, actions/checkout'
|
|
||||||
default: ${{ github.repository }}
|
|
||||||
ref:
|
|
||||||
description: >
|
|
||||||
The branch, tag or SHA to checkout. When checking out the repository that
|
|
||||||
triggered a workflow, this defaults to the reference or SHA for that
|
|
||||||
event. Otherwise, defaults to `master`.
|
|
||||||
token:
|
|
||||||
description: >
|
|
||||||
Personal access token (PAT) used to fetch the repository. The PAT is configured
|
|
||||||
with the local git config, which enables your scripts to run authenticated git
|
|
||||||
commands. The post-job step removes the PAT.
|
|
||||||
|
|
||||||
|
|
||||||
We recommend using a service account with the least permissions necessary.
|
|
||||||
Also when generating a new PAT, select the least scopes necessary.
|
|
||||||
|
|
||||||
|
|
||||||
[Learn more about creating and using encrypted secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets)
|
|
||||||
default: ${{ github.token }}
|
|
||||||
ssh-key:
|
|
||||||
description: >
|
|
||||||
SSH key used to fetch the repository. The SSH key is configured with the local
|
|
||||||
git config, which enables your scripts to run authenticated git commands.
|
|
||||||
The post-job step removes the SSH key.
|
|
||||||
|
|
||||||
|
|
||||||
We recommend using a service account with the least permissions necessary.
|
|
||||||
|
|
||||||
|
|
||||||
[Learn more about creating and using
|
|
||||||
encrypted secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets)
|
|
||||||
ssh-known-hosts:
|
|
||||||
description: >
|
|
||||||
Known hosts in addition to the user and global host key database. The public
|
|
||||||
SSH keys for a host may be obtained using the utility `ssh-keyscan`. For example,
|
|
||||||
`ssh-keyscan github.com`. The public key for github.com is always implicitly added.
|
|
||||||
ssh-strict:
|
|
||||||
description: >
|
|
||||||
Whether to perform strict host key checking. When true, adds the options `StrictHostKeyChecking=yes`
|
|
||||||
and `CheckHostIP=no` to the SSH command line. Use the input `ssh-known-hosts` to
|
|
||||||
configure additional hosts.
|
|
||||||
default: true
|
|
||||||
persist-credentials:
|
|
||||||
description: 'Whether to configure the token or SSH key with the local git config'
|
|
||||||
default: true
|
|
||||||
path:
|
|
||||||
description: 'Relative path under $GITHUB_WORKSPACE to place the repository'
|
|
||||||
clean:
|
|
||||||
description: 'Whether to execute `git clean -ffdx && git reset --hard HEAD` before fetching'
|
|
||||||
default: true
|
|
||||||
fetch-depth:
|
|
||||||
description: 'Number of commits to fetch. 0 indicates all history for all tags and branches.'
|
|
||||||
default: 1
|
|
||||||
lfs:
|
|
||||||
description: 'Whether to download Git-LFS files'
|
|
||||||
default: false
|
|
||||||
submodules:
|
|
||||||
description: >
|
|
||||||
Whether to checkout submodules: `true` to checkout submodules or `recursive` to
|
|
||||||
recursively checkout submodules.
|
|
||||||
|
|
||||||
|
|
||||||
When the `ssh-key` input is not provided, SSH URLs beginning with `git@github.com:` are
|
|
||||||
converted to HTTPS.
|
|
||||||
default: false
|
|
||||||
```
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- SSH support is new
|
|
||||||
- `persist-credentials` is new
|
|
||||||
- `path` behavior is different (refer [below](#path) for details)
|
|
||||||
|
|
||||||
### Fallback to GitHub API
|
|
||||||
|
|
||||||
When a sufficient version of git is not in the PATH, fallback to the [web API](https://developer.github.com/v3/repos/contents/#get-archive-link) to download a tarball/zipball.
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- LFS files are not included in the archive. Therefore fail if LFS is set to true.
|
|
||||||
- Submodules are also not included in the archive.
|
|
||||||
|
|
||||||
### Persist credentials
|
|
||||||
|
|
||||||
The credentials will be persisted on disk. This will allow users to script authenticated git commands, like `git fetch`.
|
|
||||||
|
|
||||||
A post script will remove the credentials (cleanup for self-hosted).
|
|
||||||
|
|
||||||
Users may opt-out by specifying `persist-credentials: false`
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- Users scripting `git commit` may need to set the username and email. The service does not provide any reasonable default value. Users can add `git config user.name <NAME>` and `git config user.email <EMAIL>`. We will document this guidance.
|
|
||||||
|
|
||||||
#### PAT
|
|
||||||
|
|
||||||
When using the `${{github.token}}` or a PAT, the token will be persisted in the local git config. The config key `http.https://github.com/.extraheader` enables an auth header to be specified on all authenticated commands `AUTHORIZATION: basic <BASE64_U:P>`.
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- The auth header is scoped to all of github `http.https://github.com/.extraheader`
|
|
||||||
- Additional public remotes also just work.
|
|
||||||
- If users want to authenticate to an additional private remote, they should provide the `token` input.
|
|
||||||
|
|
||||||
#### SSH key
|
|
||||||
|
|
||||||
The SSH key will be written to disk under the `$RUNNER_TEMP` directory. The SSH key will
|
|
||||||
be removed by the action's post-job hook. Additionally, RUNNER_TEMP is cleared by the
|
|
||||||
runner between jobs.
|
|
||||||
|
|
||||||
The SSH key must be written with strict file permissions. The SSH client requires the file
|
|
||||||
to be read/write for the user, and not accessible by others.
|
|
||||||
|
|
||||||
The user host key database (`~/.ssh/known_hosts`) will be copied to a unique file under
|
|
||||||
`$RUNNER_TEMP`. And values from the input `ssh-known-hosts` will be added to the file.
|
|
||||||
|
|
||||||
The SSH command will be overridden for the local git config:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
git config core.sshCommand 'ssh -i "$RUNNER_TEMP/path-to-ssh-key" -o StrictHostKeyChecking=yes -o CheckHostIP=no -o "UserKnownHostsFile=$RUNNER_TEMP/path-to-known-hosts"'
|
|
||||||
```
|
|
||||||
|
|
||||||
When the input `ssh-strict` is set to `false`, the options `CheckHostIP` and `StrictHostKeyChecking` will not be overridden.
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- When `ssh-strict` is set to `true` (default), the SSH option `CheckHostIP` can safely be disabled.
|
|
||||||
Strict host checking verifies the server's public key. Therefore, IP verification is unnecessary
|
|
||||||
and noisy. For example:
|
|
||||||
> Warning: Permanently added the RSA host key for IP address '140.82.113.4' to the list of known hosts.
|
|
||||||
- Since GIT_SSH_COMMAND overrides core.sshCommand, temporarily set the env var when fetching the repo. When creds
|
|
||||||
are persisted, core.sshCommand is leveraged to avoid multiple checkout steps stomping over each other.
|
|
||||||
- Modify actions/runner to mount RUNNER_TEMP to enable scripting authenticated git commands from a container action.
|
|
||||||
- Refer [here](https://linux.die.net/man/5/ssh_config) for SSH config details.
|
|
||||||
|
|
||||||
### Fetch behavior
|
|
||||||
|
|
||||||
Fetch only the SHA being built and set depth=1. This significantly reduces the fetch time for large repos.
|
|
||||||
|
|
||||||
If a SHA isn't available (e.g. multi repo), then fetch only the specified ref with depth=1.
|
|
||||||
|
|
||||||
The input `fetch-depth` can be used to control the depth.
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- Fetching a single commit is supported by Git wire protocol version 2. The git client uses protocol version 0 by default. The desired protocol version can be overridden in the git config or on the fetch command line invocation (`-c protocol.version=2`). We will override on the fetch command line, for transparency.
|
|
||||||
- Git client version 2.18+ (released June 2018) is required for wire protocol version 2.
|
|
||||||
|
|
||||||
### Checkout behavior
|
|
||||||
|
|
||||||
For CI, checkout will create a local ref with the upstream set. This allows users to script git as they normally would.
|
|
||||||
|
|
||||||
For PR, continue to checkout detached head. The PR branch is special - the branch and merge commit are created by the server. It doesn't match a users' local workflow.
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- Consider deleting all local refs during cleanup if that helps avoid collisions. More testing required.
|
|
||||||
|
|
||||||
### Path
|
|
||||||
|
|
||||||
For the mainline scenario, the disk-layout behavior remains the same.
|
|
||||||
|
|
||||||
Remember, given the repo `johndoe/foo`, the mainline disk layout looks like:
|
|
||||||
|
|
||||||
```
|
|
||||||
GITHUB_WORKSPACE=/home/runner/work/foo/foo
|
|
||||||
RUNNER_WORKSPACE=/home/runner/work/foo
|
|
||||||
```
|
|
||||||
|
|
||||||
V2 introduces a new contraint on the checkout path. The location must now be under `github.workspace`. Whereas the checkout@v1 constraint was one level up, under `runner.workspace`.
|
|
||||||
|
|
||||||
V2 no longer changes `github.workspace` to follow wherever the self repo is checked-out.
|
|
||||||
|
|
||||||
These behavioral changes align better with container actions. The [documented filesystem contract](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/virtual-environments-for-github-hosted-runners#docker-container-filesystem) is:
|
|
||||||
|
|
||||||
- `/github/home`
|
|
||||||
- `/github/workspace` - Note: GitHub Actions must be run by the default Docker user (root). Ensure your Dockerfile does not set the USER instruction, otherwise you will not be able to access `GITHUB_WORKSPACE`.
|
|
||||||
- `/github/workflow`
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- The tracking config will not be updated to reflect the path of the workflow repo.
|
|
||||||
- Any existing workflow repo will not be moved when the checkout path changes. In fact some customers want to checkout the workflow repo twice, side by side against different branches.
|
|
||||||
- Actions that need to operate only against the root of the self repo, should expose a `path` input.
|
|
||||||
|
|
||||||
#### Default value for `path` input
|
|
||||||
|
|
||||||
The `path` input will default to `./` which is rooted against `github.workspace`.
|
|
||||||
|
|
||||||
This default fits the mainline scenario well: single checkout
|
|
||||||
|
|
||||||
For multi-checkout, users must specify the `path` input for at least one of the repositories.
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- An alternative is for the self repo to default to `./` and other repos default to `<REPO_NAME>`. However nested layout is an atypical git layout and therefore is not a good default. Users should supply the path info.
|
|
||||||
|
|
||||||
#### Example - Nested layout
|
|
||||||
|
|
||||||
The following example checks-out two repositories and creates a nested layout.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# Self repo - Checkout to $GITHUB_WORKSPACE
|
|
||||||
- uses: checkout@v2
|
|
||||||
|
|
||||||
# Other repo - Checkout to $GITHUB_WORKSPACE/myscripts
|
|
||||||
- uses: checkout@v2
|
|
||||||
with:
|
|
||||||
repository: myorg/myscripts
|
|
||||||
path: myscripts
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Example - Side by side layout
|
|
||||||
|
|
||||||
The following example checks-out two repositories and creates a side-by-side layout.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# Self repo - Checkout to $GITHUB_WORKSPACE/foo
|
|
||||||
- uses: checkout@v2
|
|
||||||
with:
|
|
||||||
path: foo
|
|
||||||
|
|
||||||
# Other repo - Checkout to $GITHUB_WORKSPACE/myscripts
|
|
||||||
- uses: checkout@v2
|
|
||||||
with:
|
|
||||||
repository: myorg/myscripts
|
|
||||||
path: myscripts
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Path impact to problem matchers
|
|
||||||
|
|
||||||
Problem matchers associate the source files with annotations.
|
|
||||||
|
|
||||||
Today the runner verifies the source file is under the `github.workspace`. Otherwise the source file property is dropped.
|
|
||||||
|
|
||||||
Multi-checkout complicates the matter. However even today submodules may cause this heuristic to be inaccurate.
|
|
||||||
|
|
||||||
A better solution is:
|
|
||||||
|
|
||||||
Given a source file path, walk up the directories until the first `.git/config` is found. Check if it matches the self repo (`url = https://github.com/OWNER/REPO`). If not, drop the source file path.
|
|
||||||
|
|
||||||
### Submodules
|
|
||||||
|
|
||||||
With both PAT and SSH key support, we should be able to provide frictionless support for
|
|
||||||
submodules scenarios: recursive, non-recursive, relative submodule paths.
|
|
||||||
|
|
||||||
When fetching submodules, follow the `fetch-depth` settings.
|
|
||||||
|
|
||||||
Also when fetching submodules, if the `ssh-key` input is not provided then convert SSH URLs to HTTPS: `-c url."https://github.com/".insteadOf "git@github.com:"`
|
|
||||||
|
|
||||||
Credentials will be persisted in the submodules local git config too.
|
|
||||||
|
|
||||||
### Port to typescript
|
|
||||||
|
|
||||||
The checkout action should be a typescript action on the GitHub graph, for the following reasons:
|
|
||||||
- Enables customers to fork the checkout repo and modify
|
|
||||||
- Serves as an example for customers
|
|
||||||
- Demystifies the checkout action manifest
|
|
||||||
- Simplifies the runner
|
|
||||||
- Reduce the amount of runner code to port (if we ever do)
|
|
||||||
|
|
||||||
Note:
|
|
||||||
- This means job-container images will need git in the PATH, for checkout.
|
|
||||||
|
|
||||||
### Branching strategy and release tags
|
|
||||||
|
|
||||||
- Create a servicing branch for V1: `releases/v1`
|
|
||||||
- Merge the changes into `master`
|
|
||||||
- Release using a new tag `preview`
|
|
||||||
- When stable, release using a new tag `v2`
|
|
||||||
|
|
||||||
## Consequences
|
|
||||||
|
|
||||||
- Update the checkout action and readme
|
|
||||||
- Update samples to consume `actions/checkout@v2`
|
|
||||||
- Job containers now require git in the PATH for checkout, otherwise fallback to REST API
|
|
||||||
- Minimum git version 2.18
|
|
||||||
- Update problem matcher logic regarding source file verification (runner)
|
|
||||||
16566
dist/index.js
vendored
16566
dist/index.js
vendored
File diff suppressed because one or more lines are too long
155
package-lock.json
generated
155
package-lock.json
generated
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "checkout",
|
"name": "checkout",
|
||||||
"version": "2.0.2",
|
"version": "2.0.0",
|
||||||
"lockfileVersion": 1,
|
"lockfileVersion": 1,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@@ -15,28 +15,12 @@
|
|||||||
"integrity": "sha512-nvFkxwiicvpzNiCBF4wFBDfnBvi7xp/as7LE1hBxBxKG2L29+gkIPBiLKMVORL+Hg3JNf07AKRfl0V5djoypjQ=="
|
"integrity": "sha512-nvFkxwiicvpzNiCBF4wFBDfnBvi7xp/as7LE1hBxBxKG2L29+gkIPBiLKMVORL+Hg3JNf07AKRfl0V5djoypjQ=="
|
||||||
},
|
},
|
||||||
"@actions/github": {
|
"@actions/github": {
|
||||||
"version": "2.2.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/github/-/github-2.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/github/-/github-2.0.0.tgz",
|
||||||
"integrity": "sha512-9UAZqn8ywdR70n3GwVle4N8ALosQs4z50N7XMXrSTUVOmVpaBC5kE3TRTT7qQdi3OaQV24mjGuJZsHUmhD+ZXw==",
|
"integrity": "sha512-sNpZ5dJyJyfJIO5lNYx8r/Gha4Tlm8R0MLO2cBkGdOnAAEn3t1M/MHVcoBhY/VPfjGVe5RNAUPz+6INrViiUPA==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/http-client": "^1.0.3",
|
|
||||||
"@octokit/graphql": "^4.3.1",
|
"@octokit/graphql": "^4.3.1",
|
||||||
"@octokit/rest": "^16.43.1"
|
"@octokit/rest": "^16.15.0"
|
||||||
}
|
|
||||||
},
|
|
||||||
"@actions/http-client": {
|
|
||||||
"version": "1.0.8",
|
|
||||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.8.tgz",
|
|
||||||
"integrity": "sha512-G4JjJ6f9Hb3Zvejj+ewLLKLf99ZC+9v+yCxoYf9vSyH+WkzPLB2LuUtRMGNkooMqdugGBFStIKXOuvH1W+EctA==",
|
|
||||||
"requires": {
|
|
||||||
"tunnel": "0.0.6"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"tunnel": {
|
|
||||||
"version": "0.0.6",
|
|
||||||
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
|
|
||||||
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@actions/io": {
|
"@actions/io": {
|
||||||
@@ -613,32 +597,14 @@
|
|||||||
"@types/yargs": "^13.0.0"
|
"@types/yargs": "^13.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@octokit/auth-token": {
|
|
||||||
"version": "2.4.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.4.0.tgz",
|
|
||||||
"integrity": "sha512-eoOVMjILna7FVQf96iWc3+ZtE/ZT6y8ob8ZzcqKY1ibSQCnu4O/B7pJvzMx5cyZ/RjAff6DAdEb0O0Cjcxidkg==",
|
|
||||||
"requires": {
|
|
||||||
"@octokit/types": "^2.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"@octokit/endpoint": {
|
"@octokit/endpoint": {
|
||||||
"version": "6.0.1",
|
"version": "5.5.1",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-5.5.1.tgz",
|
||||||
"integrity": "sha512-pOPHaSz57SFT/m3R5P8MUu4wLPszokn5pXcB/pzavLTQf2jbU+6iayTvzaY6/BiotuRS0qyEUkx3QglT4U958A==",
|
"integrity": "sha512-nBFhRUb5YzVTCX/iAK1MgQ4uWo89Gu0TH00qQHoYRCsE12dWcG1OiLd7v2EIo2+tpUKPMOQ62QFy9hy9Vg2ULg==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@octokit/types": "^2.11.1",
|
"@octokit/types": "^2.0.0",
|
||||||
"is-plain-object": "^3.0.0",
|
"is-plain-object": "^3.0.0",
|
||||||
"universal-user-agent": "^5.0.0"
|
"universal-user-agent": "^4.0.0"
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"universal-user-agent": {
|
|
||||||
"version": "5.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-5.0.0.tgz",
|
|
||||||
"integrity": "sha512-B5TPtzZleXyPrUMKCpEHFmVhMN6EhmJYjG5PQna9s7mXeSqGTLap4OpqLl5FCEFUI3UBmllkETwKf/db66Y54Q==",
|
|
||||||
"requires": {
|
|
||||||
"os-name": "^3.1.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@octokit/graphql": {
|
"@octokit/graphql": {
|
||||||
@@ -651,57 +617,25 @@
|
|||||||
"universal-user-agent": "^4.0.0"
|
"universal-user-agent": "^4.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@octokit/plugin-paginate-rest": {
|
|
||||||
"version": "1.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-1.1.2.tgz",
|
|
||||||
"integrity": "sha512-jbsSoi5Q1pj63sC16XIUboklNw+8tL9VOnJsWycWYR78TKss5PVpIPb1TUUcMQ+bBh7cY579cVAWmf5qG+dw+Q==",
|
|
||||||
"requires": {
|
|
||||||
"@octokit/types": "^2.0.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"@octokit/plugin-request-log": {
|
|
||||||
"version": "1.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.0.tgz",
|
|
||||||
"integrity": "sha512-ywoxP68aOT3zHCLgWZgwUJatiENeHE7xJzYjfz8WI0goynp96wETBF+d95b8g/uL4QmS6owPVlaxiz3wyMAzcw=="
|
|
||||||
},
|
|
||||||
"@octokit/plugin-rest-endpoint-methods": {
|
|
||||||
"version": "2.4.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-2.4.0.tgz",
|
|
||||||
"integrity": "sha512-EZi/AWhtkdfAYi01obpX0DF7U6b1VRr30QNQ5xSFPITMdLSfhcBqjamE3F+sKcxPbD7eZuMHu3Qkk2V+JGxBDQ==",
|
|
||||||
"requires": {
|
|
||||||
"@octokit/types": "^2.0.1",
|
|
||||||
"deprecation": "^2.3.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"@octokit/request": {
|
"@octokit/request": {
|
||||||
"version": "5.4.2",
|
"version": "5.3.1",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.4.2.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.3.1.tgz",
|
||||||
"integrity": "sha512-zKdnGuQ2TQ2vFk9VU8awFT4+EYf92Z/v3OlzRaSh4RIP0H6cvW1BFPXq4XYvNez+TPQjqN+0uSkCYnMFFhcFrw==",
|
"integrity": "sha512-5/X0AL1ZgoU32fAepTfEoggFinO3rxsMLtzhlUX+RctLrusn/CApJuGFCd0v7GMFhF+8UiCsTTfsu7Fh1HnEJg==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@octokit/endpoint": "^6.0.1",
|
"@octokit/endpoint": "^5.5.0",
|
||||||
"@octokit/request-error": "^2.0.0",
|
"@octokit/request-error": "^1.0.1",
|
||||||
"@octokit/types": "^2.11.1",
|
"@octokit/types": "^2.0.0",
|
||||||
"deprecation": "^2.0.0",
|
"deprecation": "^2.0.0",
|
||||||
"is-plain-object": "^3.0.0",
|
"is-plain-object": "^3.0.0",
|
||||||
"node-fetch": "^2.3.0",
|
"node-fetch": "^2.3.0",
|
||||||
"once": "^1.4.0",
|
"once": "^1.4.0",
|
||||||
"universal-user-agent": "^5.0.0"
|
"universal-user-agent": "^4.0.0"
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"universal-user-agent": {
|
|
||||||
"version": "5.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-5.0.0.tgz",
|
|
||||||
"integrity": "sha512-B5TPtzZleXyPrUMKCpEHFmVhMN6EhmJYjG5PQna9s7mXeSqGTLap4OpqLl5FCEFUI3UBmllkETwKf/db66Y54Q==",
|
|
||||||
"requires": {
|
|
||||||
"os-name": "^3.1.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@octokit/request-error": {
|
"@octokit/request-error": {
|
||||||
"version": "2.0.0",
|
"version": "1.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-1.2.0.tgz",
|
||||||
"integrity": "sha512-rtYicB4Absc60rUv74Rjpzek84UbVHGHJRu4fNVlZ1mCcyUPPuzFfG9Rn6sjHrd95DEsmjSt1Axlc699ZlbDkw==",
|
"integrity": "sha512-DNBhROBYjjV/I9n7A8kVkmQNkqFAMem90dSxqvPq57e2hBr7mNTX98y3R2zDpqMQHVRpBDjsvsfIGgBzy+4PAg==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@octokit/types": "^2.0.0",
|
"@octokit/types": "^2.0.0",
|
||||||
"deprecation": "^2.0.0",
|
"deprecation": "^2.0.0",
|
||||||
@@ -709,14 +643,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@octokit/rest": {
|
"@octokit/rest": {
|
||||||
"version": "16.43.1",
|
"version": "16.35.0",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-16.43.1.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-16.35.0.tgz",
|
||||||
"integrity": "sha512-gfFKwRT/wFxq5qlNjnW2dh+qh74XgTQ2B179UX5K1HYCluioWj8Ndbgqw2PVqa1NnVJkGHp2ovMpVn/DImlmkw==",
|
"integrity": "sha512-9ShFqYWo0CLoGYhA1FdtdykJuMzS/9H6vSbbQWDX4pWr4p9v+15MsH/wpd/3fIU+tSxylaNO48+PIHqOkBRx3w==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@octokit/auth-token": "^2.4.0",
|
|
||||||
"@octokit/plugin-paginate-rest": "^1.1.1",
|
|
||||||
"@octokit/plugin-request-log": "^1.0.0",
|
|
||||||
"@octokit/plugin-rest-endpoint-methods": "2.4.0",
|
|
||||||
"@octokit/request": "^5.2.0",
|
"@octokit/request": "^5.2.0",
|
||||||
"@octokit/request-error": "^1.0.2",
|
"@octokit/request-error": "^1.0.2",
|
||||||
"atob-lite": "^2.0.0",
|
"atob-lite": "^2.0.0",
|
||||||
@@ -729,24 +659,12 @@
|
|||||||
"octokit-pagination-methods": "^1.1.0",
|
"octokit-pagination-methods": "^1.1.0",
|
||||||
"once": "^1.4.0",
|
"once": "^1.4.0",
|
||||||
"universal-user-agent": "^4.0.0"
|
"universal-user-agent": "^4.0.0"
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@octokit/request-error": {
|
|
||||||
"version": "1.2.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-1.2.1.tgz",
|
|
||||||
"integrity": "sha512-+6yDyk1EES6WK+l3viRDElw96MvwfJxCt45GvmjDUKWjYIb3PJZQkq3i46TwGwoPD4h8NmTrENmtyA1FwbmhRA==",
|
|
||||||
"requires": {
|
|
||||||
"@octokit/types": "^2.0.0",
|
|
||||||
"deprecation": "^2.0.0",
|
|
||||||
"once": "^1.4.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@octokit/types": {
|
"@octokit/types": {
|
||||||
"version": "2.14.0",
|
"version": "2.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-2.14.0.tgz",
|
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-2.0.2.tgz",
|
||||||
"integrity": "sha512-1w2wxpN45rEXPDFeB7rGain7wcJ/aTRg8bdILITVnS0O7a4zEGELa3JmIe+jeLdekQjvZRbVfNPqS+mi5fKCKQ==",
|
"integrity": "sha512-StASIL2lgT3TRjxv17z9pAqbnI7HGu9DrJlg3sEBFfCLaMEqp+O3IQPUF6EZtQ4xkAu2ml6kMBBCtGxjvmtmuQ==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@types/node": ">= 8"
|
"@types/node": ">= 8"
|
||||||
}
|
}
|
||||||
@@ -849,15 +767,6 @@
|
|||||||
"integrity": "sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw==",
|
"integrity": "sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"@types/uuid": {
|
|
||||||
"version": "3.4.6",
|
|
||||||
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.6.tgz",
|
|
||||||
"integrity": "sha512-cCdlC/1kGEZdEglzOieLDYBxHsvEOIg7kp/2FYyVR9Pxakq+Qf/inL3RKQ+PA8gOlI/NnL+fXmQH12nwcGzsHw==",
|
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
|
||||||
"@types/node": "*"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"@types/yargs": {
|
"@types/yargs": {
|
||||||
"version": "13.0.3",
|
"version": "13.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-13.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-13.0.3.tgz",
|
||||||
@@ -6777,9 +6686,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"universal-user-agent": {
|
"universal-user-agent": {
|
||||||
"version": "4.0.1",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-4.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-4.0.0.tgz",
|
||||||
"integrity": "sha512-LnST3ebHwVL2aNe4mejI9IQh2HfZ1RLo8Io2HugSif8ekzD1TlWpHpColOB/eh8JHMLkGH3Akqf040I+4ylNxg==",
|
"integrity": "sha512-eM8knLpev67iBDizr/YtqkJsF3GK8gzDc6st/WKzrTuPtcsOKW/0IdL4cnMBsU69pOx0otavLWBDGTwg+dB0aA==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"os-name": "^3.1.0"
|
"os-name": "^3.1.0"
|
||||||
}
|
}
|
||||||
@@ -6958,9 +6867,9 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"windows-release": {
|
"windows-release": {
|
||||||
"version": "3.3.0",
|
"version": "3.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/windows-release/-/windows-release-3.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/windows-release/-/windows-release-3.2.0.tgz",
|
||||||
"integrity": "sha512-2HetyTg1Y+R+rUgrKeUEhAG/ZuOmTrI1NBb3ZyAGQMYmOJjBBPe4MTodghRkmLJZHwkuPi02anbeGP+Zf401LQ==",
|
"integrity": "sha512-QTlz2hKLrdqukrsapKsINzqMgOUpQW268eJ0OaOpJN32h272waxR9fkB9VoWRtK7uKHG5EHJcTXQBD8XZVJkFA==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"execa": "^1.0.0"
|
"execa": "^1.0.0"
|
||||||
}
|
}
|
||||||
|
|||||||
19
package.json
19
package.json
@@ -1,14 +1,17 @@
|
|||||||
{
|
{
|
||||||
"name": "checkout",
|
"name": "checkout",
|
||||||
"version": "2.0.2",
|
"version": "2.0.0",
|
||||||
"description": "checkout action",
|
"description": "checkout action",
|
||||||
"main": "lib/main.js",
|
"main": "lib/main.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc && ncc build && node lib/misc/generate-docs.js",
|
"build": "tsc",
|
||||||
"format": "prettier --write '**/*.ts'",
|
"format": "prettier --write **/*.ts",
|
||||||
"format-check": "prettier --check '**/*.ts'",
|
"format-check": "prettier --check **/*.ts",
|
||||||
"lint": "eslint src/**/*.ts",
|
"lint": "eslint src/**/*.ts",
|
||||||
"test": "jest"
|
"pack": "ncc build",
|
||||||
|
"gendocs": "node lib/misc/generate-docs.js",
|
||||||
|
"test": "jest",
|
||||||
|
"all": "npm run build && npm run format && npm run lint && npm run pack && npm run gendocs && npm test"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -28,15 +31,13 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.1.3",
|
"@actions/core": "^1.1.3",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
"@actions/github": "^2.2.0",
|
"@actions/github": "^2.0.0",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
"@actions/tool-cache": "^1.1.2",
|
"@actions/tool-cache": "^1.1.2"
|
||||||
"uuid": "^3.3.3"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^24.0.23",
|
"@types/jest": "^24.0.23",
|
||||||
"@types/node": "^12.7.12",
|
"@types/node": "^12.7.12",
|
||||||
"@types/uuid": "^3.4.6",
|
|
||||||
"@typescript-eslint/parser": "^2.8.0",
|
"@typescript-eslint/parser": "^2.8.0",
|
||||||
"@zeit/ncc": "^0.20.5",
|
"@zeit/ncc": "^0.20.5",
|
||||||
"eslint": "^5.16.0",
|
"eslint": "^5.16.0",
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,117 +0,0 @@
|
|||||||
import * as assert from 'assert'
|
|
||||||
import * as core from '@actions/core'
|
|
||||||
import * as fs from 'fs'
|
|
||||||
import * as fsHelper from './fs-helper'
|
|
||||||
import * as io from '@actions/io'
|
|
||||||
import * as path from 'path'
|
|
||||||
import {IGitCommandManager} from './git-command-manager'
|
|
||||||
|
|
||||||
export async function prepareExistingDirectory(
|
|
||||||
git: IGitCommandManager | undefined,
|
|
||||||
repositoryPath: string,
|
|
||||||
repositoryUrl: string,
|
|
||||||
clean: boolean,
|
|
||||||
ref: string
|
|
||||||
): Promise<void> {
|
|
||||||
assert.ok(repositoryPath, 'Expected repositoryPath to be defined')
|
|
||||||
assert.ok(repositoryUrl, 'Expected repositoryUrl to be defined')
|
|
||||||
|
|
||||||
// Indicates whether to delete the directory contents
|
|
||||||
let remove = false
|
|
||||||
|
|
||||||
// Check whether using git or REST API
|
|
||||||
if (!git) {
|
|
||||||
remove = true
|
|
||||||
}
|
|
||||||
// Fetch URL does not match
|
|
||||||
else if (
|
|
||||||
!fsHelper.directoryExistsSync(path.join(repositoryPath, '.git')) ||
|
|
||||||
repositoryUrl !== (await git.tryGetFetchUrl())
|
|
||||||
) {
|
|
||||||
remove = true
|
|
||||||
} else {
|
|
||||||
// Delete any index.lock and shallow.lock left by a previously canceled run or crashed git process
|
|
||||||
const lockPaths = [
|
|
||||||
path.join(repositoryPath, '.git', 'index.lock'),
|
|
||||||
path.join(repositoryPath, '.git', 'shallow.lock')
|
|
||||||
]
|
|
||||||
for (const lockPath of lockPaths) {
|
|
||||||
try {
|
|
||||||
await io.rmRF(lockPath)
|
|
||||||
} catch (error) {
|
|
||||||
core.debug(`Unable to delete '${lockPath}'. ${error.message}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
core.startGroup('Removing previously created refs, to avoid conflicts')
|
|
||||||
// Checkout detached HEAD
|
|
||||||
if (!(await git.isDetached())) {
|
|
||||||
await git.checkoutDetach()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove all refs/heads/*
|
|
||||||
let branches = await git.branchList(false)
|
|
||||||
for (const branch of branches) {
|
|
||||||
await git.branchDelete(false, branch)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove any conflicting refs/remotes/origin/*
|
|
||||||
// Example 1: Consider ref is refs/heads/foo and previously fetched refs/remotes/origin/foo/bar
|
|
||||||
// Example 2: Consider ref is refs/heads/foo/bar and previously fetched refs/remotes/origin/foo
|
|
||||||
if (ref) {
|
|
||||||
ref = ref.startsWith('refs/') ? ref : `refs/heads/${ref}`
|
|
||||||
if (ref.startsWith('refs/heads/')) {
|
|
||||||
const upperName1 = ref.toUpperCase().substr('REFS/HEADS/'.length)
|
|
||||||
const upperName1Slash = `${upperName1}/`
|
|
||||||
branches = await git.branchList(true)
|
|
||||||
for (const branch of branches) {
|
|
||||||
const upperName2 = branch.substr('origin/'.length).toUpperCase()
|
|
||||||
const upperName2Slash = `${upperName2}/`
|
|
||||||
if (
|
|
||||||
upperName1.startsWith(upperName2Slash) ||
|
|
||||||
upperName2.startsWith(upperName1Slash)
|
|
||||||
) {
|
|
||||||
await git.branchDelete(true, branch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
core.endGroup()
|
|
||||||
|
|
||||||
// Clean
|
|
||||||
if (clean) {
|
|
||||||
core.startGroup('Cleaning the repository')
|
|
||||||
if (!(await git.tryClean())) {
|
|
||||||
core.debug(
|
|
||||||
`The clean command failed. This might be caused by: 1) path too long, 2) permission issue, or 3) file in use. For futher investigation, manually run 'git clean -ffdx' on the directory '${repositoryPath}'.`
|
|
||||||
)
|
|
||||||
remove = true
|
|
||||||
} else if (!(await git.tryReset())) {
|
|
||||||
remove = true
|
|
||||||
}
|
|
||||||
core.endGroup()
|
|
||||||
|
|
||||||
if (remove) {
|
|
||||||
core.warning(
|
|
||||||
`Unable to clean or reset the repository. The repository will be recreated instead.`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
core.warning(
|
|
||||||
`Unable to prepare the existing repository. The repository will be recreated instead.`
|
|
||||||
)
|
|
||||||
remove = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (remove) {
|
|
||||||
// Delete the contents of the directory. Don't delete the directory itself
|
|
||||||
// since it might be the current working directory.
|
|
||||||
core.info(`Deleting the contents of '${repositoryPath}'`)
|
|
||||||
for (const file of await fs.promises.readdir(repositoryPath)) {
|
|
||||||
await io.rmRF(path.join(repositoryPath, file))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,76 +0,0 @@
|
|||||||
export interface IGitSourceSettings {
|
|
||||||
/**
|
|
||||||
* The location on disk where the repository will be placed
|
|
||||||
*/
|
|
||||||
repositoryPath: string
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The repository owner
|
|
||||||
*/
|
|
||||||
repositoryOwner: string
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The repository name
|
|
||||||
*/
|
|
||||||
repositoryName: string
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The ref to fetch
|
|
||||||
*/
|
|
||||||
ref: string
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The commit to checkout
|
|
||||||
*/
|
|
||||||
commit: string
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Indicates whether to clean the repository
|
|
||||||
*/
|
|
||||||
clean: boolean
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The depth when fetching
|
|
||||||
*/
|
|
||||||
fetchDepth: number
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Indicates whether to fetch LFS objects
|
|
||||||
*/
|
|
||||||
lfs: boolean
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Indicates whether to checkout submodules
|
|
||||||
*/
|
|
||||||
submodules: boolean
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Indicates whether to recursively checkout submodules
|
|
||||||
*/
|
|
||||||
nestedSubmodules: boolean
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The auth token to use when fetching the repository
|
|
||||||
*/
|
|
||||||
authToken: string
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The SSH key to configure
|
|
||||||
*/
|
|
||||||
sshKey: string
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Additional SSH known hosts
|
|
||||||
*/
|
|
||||||
sshKnownHosts: string
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Indicates whether the server must be a known host
|
|
||||||
*/
|
|
||||||
sshStrict: boolean
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Indicates whether to persist the credentials on disk to enable scripting authenticated git commands
|
|
||||||
*/
|
|
||||||
persistCredentials: boolean
|
|
||||||
}
|
|
||||||
@@ -1,138 +1,204 @@
|
|||||||
import * as assert from 'assert'
|
import * as assert from 'assert'
|
||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
|
import * as exec from '@actions/exec'
|
||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import * as github from '@actions/github'
|
import * as github from '@actions/github'
|
||||||
|
import * as https from 'https'
|
||||||
import * as io from '@actions/io'
|
import * as io from '@actions/io'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
|
import * as refHelper from './ref-helper'
|
||||||
import * as retryHelper from './retry-helper'
|
import * as retryHelper from './retry-helper'
|
||||||
import * as toolCache from '@actions/tool-cache'
|
import * as toolCache from '@actions/tool-cache'
|
||||||
import {default as uuid} from 'uuid/v4'
|
import {ExecOptions} from '@actions/exec/lib/interfaces'
|
||||||
import {Octokit} from '@octokit/rest'
|
import {IncomingMessage} from 'http'
|
||||||
|
import {RequestOptions, ReposGetArchiveLinkParams} from '@octokit/rest'
|
||||||
|
import {WriteStream} from 'fs'
|
||||||
|
|
||||||
const IS_WINDOWS = process.platform === 'win32'
|
const IS_WINDOWS = process.platform === 'win32'
|
||||||
|
|
||||||
export async function downloadRepository(
|
export async function downloadRepository(
|
||||||
authToken: string,
|
accessToken: string,
|
||||||
owner: string,
|
owner: string,
|
||||||
repo: string,
|
repo: string,
|
||||||
ref: string,
|
ref: string,
|
||||||
commit: string,
|
commit: string,
|
||||||
repositoryPath: string
|
repositoryPath: string
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
// Determine the default branch
|
// Determine archive path
|
||||||
if (!ref && !commit) {
|
const runnerTemp = process.env['RUNNER_TEMP'] as string
|
||||||
core.info('Determining the default branch')
|
assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
|
||||||
ref = await getDefaultBranch(authToken, owner, repo)
|
const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
|
||||||
}
|
|
||||||
|
// Ensure file does not exist
|
||||||
|
core.debug(`Ensuring archive file does not exist: ${archivePath}`)
|
||||||
|
await io.rmRF(archivePath)
|
||||||
|
|
||||||
// Download the archive
|
// Download the archive
|
||||||
let archiveData = await retryHelper.execute(async () => {
|
let archiveData = await retryHelper.execute(async () => {
|
||||||
core.info('Downloading the archive')
|
core.info('Downloading the archive using the REST API')
|
||||||
return await downloadArchive(authToken, owner, repo, ref, commit)
|
return await downloadArchive(accessToken, owner, repo, ref, commit)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Write archive to disk
|
// Write archive to disk
|
||||||
core.info('Writing archive to disk')
|
core.info('Writing archive to disk')
|
||||||
const uniqueId = uuid()
|
|
||||||
const archivePath = path.join(repositoryPath, `${uniqueId}.tar.gz`)
|
|
||||||
await fs.promises.writeFile(archivePath, archiveData)
|
await fs.promises.writeFile(archivePath, archiveData)
|
||||||
archiveData = Buffer.from('') // Free memory
|
archiveData = Buffer.from('') // Free memory
|
||||||
|
|
||||||
|
// // Get the archive URL using the REST API
|
||||||
|
// await retryHelper.execute(async () => {
|
||||||
|
// // Prepare the archive stream
|
||||||
|
// core.debug(`Preparing the archive stream: ${archivePath}`)
|
||||||
|
// await io.rmRF(archivePath)
|
||||||
|
// const fileStream = fs.createWriteStream(archivePath)
|
||||||
|
// const fileStreamClosed = getFileClosedPromise(fileStream)
|
||||||
|
|
||||||
|
// try {
|
||||||
|
// // Get the archive URL
|
||||||
|
// core.info('Getting archive URL')
|
||||||
|
// const archiveUrl = await getArchiveUrl(
|
||||||
|
// accessToken,
|
||||||
|
// owner,
|
||||||
|
// repo,
|
||||||
|
// ref,
|
||||||
|
// commit
|
||||||
|
// )
|
||||||
|
|
||||||
|
// // Download the archive
|
||||||
|
// core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
|
||||||
|
// await downloadFile(archiveUrl, fileStream)
|
||||||
|
// } finally {
|
||||||
|
// fileStream.end()
|
||||||
|
// await fileStreamClosed
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
|
||||||
// Extract archive
|
// Extract archive
|
||||||
core.info('Extracting the archive')
|
const extractPath = path.join(runnerTemp, `checkout`)
|
||||||
const extractPath = path.join(repositoryPath, uniqueId)
|
await io.rmRF(extractPath)
|
||||||
await io.mkdirP(extractPath)
|
await io.mkdirP(extractPath)
|
||||||
if (IS_WINDOWS) {
|
if (IS_WINDOWS) {
|
||||||
await toolCache.extractZip(archivePath, extractPath)
|
await toolCache.extractZip(archivePath, extractPath)
|
||||||
} else {
|
} else {
|
||||||
await toolCache.extractTar(archivePath, extractPath)
|
await toolCache.extractTar(archivePath, extractPath)
|
||||||
}
|
}
|
||||||
io.rmRF(archivePath)
|
|
||||||
|
|
||||||
// Determine the path of the repository content. The archive contains
|
// Determine the real directory to copy (ignore extra dir at root of the archive)
|
||||||
// a top-level folder and the repository content is inside.
|
|
||||||
const archiveFileNames = await fs.promises.readdir(extractPath)
|
const archiveFileNames = await fs.promises.readdir(extractPath)
|
||||||
assert.ok(
|
assert.ok(
|
||||||
archiveFileNames.length == 1,
|
archiveFileNames.length == 1,
|
||||||
'Expected exactly one directory inside archive'
|
'Expected exactly one directory inside archive'
|
||||||
)
|
)
|
||||||
const archiveVersion = archiveFileNames[0] // The top-level folder name includes the short SHA
|
const extraDirectoryName = archiveFileNames[0]
|
||||||
core.info(`Resolved version ${archiveVersion}`)
|
core.info(`Resolved ${extraDirectoryName}`) // contains the short SHA
|
||||||
const tempRepositoryPath = path.join(extractPath, archiveVersion)
|
const tempRepositoryPath = path.join(extractPath, extraDirectoryName)
|
||||||
|
|
||||||
// Move the files
|
// Move the files
|
||||||
for (const fileName of await fs.promises.readdir(tempRepositoryPath)) {
|
for (const fileName of await fs.promises.readdir(tempRepositoryPath)) {
|
||||||
const sourcePath = path.join(tempRepositoryPath, fileName)
|
const sourcePath = path.join(tempRepositoryPath, fileName)
|
||||||
const targetPath = path.join(repositoryPath, fileName)
|
const targetPath = path.join(repositoryPath, fileName)
|
||||||
if (IS_WINDOWS) {
|
|
||||||
await io.cp(sourcePath, targetPath, {recursive: true}) // Copy on Windows (Windows Defender may have a lock)
|
|
||||||
} else {
|
|
||||||
await io.mv(sourcePath, targetPath)
|
await io.mv(sourcePath, targetPath)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
io.rmRF(extractPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
await exec.exec(`find .`, [], {
|
||||||
* Looks up the default branch name
|
cwd: repositoryPath
|
||||||
*/
|
} as ExecOptions)
|
||||||
export async function getDefaultBranch(
|
|
||||||
authToken: string,
|
|
||||||
owner: string,
|
|
||||||
repo: string
|
|
||||||
): Promise<string> {
|
|
||||||
return await retryHelper.execute(async () => {
|
|
||||||
core.info('Retrieving the default branch name')
|
|
||||||
const octokit = new github.GitHub(authToken)
|
|
||||||
let result: string
|
|
||||||
try {
|
|
||||||
// Get the default branch from the repo info
|
|
||||||
const response = await octokit.repos.get({owner, repo})
|
|
||||||
result = response.data.default_branch
|
|
||||||
assert.ok(result, 'default_branch cannot be empty')
|
|
||||||
} catch (err) {
|
|
||||||
// Handle .wiki repo
|
|
||||||
if (err['status'] === 404 && repo.toUpperCase().endsWith('.WIKI')) {
|
|
||||||
result = 'master'
|
|
||||||
}
|
|
||||||
// Otherwise error
|
|
||||||
else {
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print the default branch
|
|
||||||
core.info(`Default branch '${result}'`)
|
|
||||||
|
|
||||||
// Prefix with 'refs/heads'
|
|
||||||
if (!result.startsWith('refs/')) {
|
|
||||||
result = `refs/heads/${result}`
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function downloadArchive(
|
async function downloadArchive(
|
||||||
authToken: string,
|
accessToken: string,
|
||||||
owner: string,
|
owner: string,
|
||||||
repo: string,
|
repo: string,
|
||||||
ref: string,
|
ref: string,
|
||||||
commit: string
|
commit: string
|
||||||
): Promise<Buffer> {
|
): Promise<Buffer> {
|
||||||
const octokit = new github.GitHub(authToken)
|
const octokit = new github.GitHub(accessToken)
|
||||||
const params: Octokit.ReposGetArchiveLinkParams = {
|
const params: ReposGetArchiveLinkParams = {
|
||||||
owner: owner,
|
owner: owner,
|
||||||
repo: repo,
|
repo: repo,
|
||||||
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||||
ref: commit || ref
|
ref: refHelper.getDownloadRef(ref, commit)
|
||||||
}
|
}
|
||||||
const response = await octokit.repos.getArchiveLink(params)
|
const response = await octokit.repos.getArchiveLink(params)
|
||||||
|
console.log('GOT THE RESPONSE')
|
||||||
|
console.log(`status=${response.status}`)
|
||||||
|
console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||||
|
console.log(`data=${JSON.stringify(response.data)}`)
|
||||||
if (response.status != 200) {
|
if (response.status != 200) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Unexpected response from GitHub API. Status: ${response.status}, Data: ${response.data}`
|
`Unexpected response from GitHub API. Status: '${response.status}'`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return Buffer.from(response.data) // response.data is ArrayBuffer
|
return Buffer.from(response.data) // response.data is ArrayBuffer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// async function getArchiveUrl(
|
||||||
|
// accessToken: string,
|
||||||
|
// owner: string,
|
||||||
|
// repo: string,
|
||||||
|
// ref: string,
|
||||||
|
// commit: string
|
||||||
|
// ): Promise<string> {
|
||||||
|
// const octokit = new github.GitHub(accessToken)
|
||||||
|
// const params: RequestOptions & ReposGetArchiveLinkParams = {
|
||||||
|
// method: 'HEAD',
|
||||||
|
// owner: owner,
|
||||||
|
// repo: repo,
|
||||||
|
// archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||||
|
// ref: refHelper.getDownloadRef(ref, commit)
|
||||||
|
// }
|
||||||
|
// const response = await octokit.repos.getArchiveLink(params)
|
||||||
|
// console.log('GOT THE RESPONSE')
|
||||||
|
// console.log(`status=${response.status}`)
|
||||||
|
// console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||||
|
// console.log(`data=${JSON.stringify(response.data)}`)
|
||||||
|
// if (response.status != 200) {
|
||||||
|
// throw new Error(
|
||||||
|
// `Unexpected response from GitHub API. Status: '${response.status}'`
|
||||||
|
// )
|
||||||
|
// }
|
||||||
|
// console.log('GETTING THE LOCATION')
|
||||||
|
// const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
|
||||||
|
// assert.ok(
|
||||||
|
// archiveUrl,
|
||||||
|
// `Expected GitHub API response to contain 'Location' header`
|
||||||
|
// )
|
||||||
|
// return archiveUrl
|
||||||
|
// }
|
||||||
|
|
||||||
|
// function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
|
||||||
|
// return new Promise((resolve, reject) => {
|
||||||
|
// try {
|
||||||
|
// https.get(url, (response: IncomingMessage) => {
|
||||||
|
// if (response.statusCode != 200) {
|
||||||
|
// reject(`Request failed with status '${response.statusCode}'`)
|
||||||
|
// response.resume() // Consume response data to free up memory
|
||||||
|
// return
|
||||||
|
// }
|
||||||
|
|
||||||
|
// response.on('data', chunk => {
|
||||||
|
// fileStream.write(chunk)
|
||||||
|
// })
|
||||||
|
// response.on('end', () => {
|
||||||
|
// resolve()
|
||||||
|
// })
|
||||||
|
// response.on('error', err => {
|
||||||
|
// reject(err)
|
||||||
|
// })
|
||||||
|
// })
|
||||||
|
// } catch (err) {
|
||||||
|
// reject(err)
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
|
||||||
|
// function getFileClosedPromise(stream: WriteStream): Promise<void> {
|
||||||
|
// return new Promise((resolve, reject) => {
|
||||||
|
// stream.on('error', err => {
|
||||||
|
// reject(err)
|
||||||
|
// })
|
||||||
|
// stream.on('finish', () => {
|
||||||
|
// resolve()
|
||||||
|
// })
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
|||||||
@@ -2,10 +2,10 @@ import * as core from '@actions/core'
|
|||||||
import * as fsHelper from './fs-helper'
|
import * as fsHelper from './fs-helper'
|
||||||
import * as github from '@actions/github'
|
import * as github from '@actions/github'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import {IGitSourceSettings} from './git-source-settings'
|
import {ISourceSettings} from './git-source-provider'
|
||||||
|
|
||||||
export function getInputs(): IGitSourceSettings {
|
export function getInputs(): ISourceSettings {
|
||||||
const result = ({} as unknown) as IGitSourceSettings
|
const result = ({} as unknown) as ISourceSettings
|
||||||
|
|
||||||
// GitHub workspace
|
// GitHub workspace
|
||||||
let githubWorkspacePath = process.env['GITHUB_WORKSPACE']
|
let githubWorkspacePath = process.env['GITHUB_WORKSPACE']
|
||||||
@@ -61,12 +61,10 @@ export function getInputs(): IGitSourceSettings {
|
|||||||
if (isWorkflowRepository) {
|
if (isWorkflowRepository) {
|
||||||
result.ref = github.context.ref
|
result.ref = github.context.ref
|
||||||
result.commit = github.context.sha
|
result.commit = github.context.sha
|
||||||
|
|
||||||
// Some events have an unqualifed ref. For example when a PR is merged (pull_request closed event),
|
|
||||||
// the ref is unqualifed like "master" instead of "refs/heads/master".
|
|
||||||
if (result.commit && result.ref && !result.ref.startsWith('refs/')) {
|
|
||||||
result.ref = `refs/heads/${result.ref}`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!result.ref && !result.commit) {
|
||||||
|
result.ref = 'refs/heads/master'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// SHA?
|
// SHA?
|
||||||
@@ -81,6 +79,13 @@ export function getInputs(): IGitSourceSettings {
|
|||||||
result.clean = (core.getInput('clean') || 'true').toUpperCase() === 'TRUE'
|
result.clean = (core.getInput('clean') || 'true').toUpperCase() === 'TRUE'
|
||||||
core.debug(`clean = ${result.clean}`)
|
core.debug(`clean = ${result.clean}`)
|
||||||
|
|
||||||
|
// Submodules
|
||||||
|
if (core.getInput('submodules')) {
|
||||||
|
throw new Error(
|
||||||
|
"The input 'submodules' is not supported in actions/checkout@v2"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// Fetch depth
|
// Fetch depth
|
||||||
result.fetchDepth = Math.floor(Number(core.getInput('fetch-depth') || '1'))
|
result.fetchDepth = Math.floor(Number(core.getInput('fetch-depth') || '1'))
|
||||||
if (isNaN(result.fetchDepth) || result.fetchDepth < 0) {
|
if (isNaN(result.fetchDepth) || result.fetchDepth < 0) {
|
||||||
@@ -92,31 +97,8 @@ export function getInputs(): IGitSourceSettings {
|
|||||||
result.lfs = (core.getInput('lfs') || 'false').toUpperCase() === 'TRUE'
|
result.lfs = (core.getInput('lfs') || 'false').toUpperCase() === 'TRUE'
|
||||||
core.debug(`lfs = ${result.lfs}`)
|
core.debug(`lfs = ${result.lfs}`)
|
||||||
|
|
||||||
// Submodules
|
// Access token
|
||||||
result.submodules = false
|
result.accessToken = core.getInput('token')
|
||||||
result.nestedSubmodules = false
|
|
||||||
const submodulesString = (core.getInput('submodules') || '').toUpperCase()
|
|
||||||
if (submodulesString == 'RECURSIVE') {
|
|
||||||
result.submodules = true
|
|
||||||
result.nestedSubmodules = true
|
|
||||||
} else if (submodulesString == 'TRUE') {
|
|
||||||
result.submodules = true
|
|
||||||
}
|
|
||||||
core.debug(`submodules = ${result.submodules}`)
|
|
||||||
core.debug(`recursive submodules = ${result.nestedSubmodules}`)
|
|
||||||
|
|
||||||
// Auth token
|
|
||||||
result.authToken = core.getInput('token', {required: true})
|
|
||||||
|
|
||||||
// SSH
|
|
||||||
result.sshKey = core.getInput('ssh-key')
|
|
||||||
result.sshKnownHosts = core.getInput('ssh-known-hosts')
|
|
||||||
result.sshStrict =
|
|
||||||
(core.getInput('ssh-strict') || 'true').toUpperCase() === 'TRUE'
|
|
||||||
|
|
||||||
// Persist credentials
|
|
||||||
result.persistCredentials =
|
|
||||||
(core.getInput('persist-credentials') || 'false').toUpperCase() === 'TRUE'
|
|
||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,7 +3,8 @@ import * as coreCommand from '@actions/core/lib/command'
|
|||||||
import * as gitSourceProvider from './git-source-provider'
|
import * as gitSourceProvider from './git-source-provider'
|
||||||
import * as inputHelper from './input-helper'
|
import * as inputHelper from './input-helper'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import * as stateHelper from './state-helper'
|
|
||||||
|
const cleanupRepositoryPath = process.env['STATE_repositoryPath'] as string
|
||||||
|
|
||||||
async function run(): Promise<void> {
|
async function run(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
@@ -30,14 +31,14 @@ async function run(): Promise<void> {
|
|||||||
|
|
||||||
async function cleanup(): Promise<void> {
|
async function cleanup(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
await gitSourceProvider.cleanup(stateHelper.RepositoryPath)
|
await gitSourceProvider.cleanup(cleanupRepositoryPath)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
core.warning(error.message)
|
core.warning(error.message)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Main
|
// Main
|
||||||
if (!stateHelper.IsPost) {
|
if (!cleanupRepositoryPath) {
|
||||||
run()
|
run()
|
||||||
}
|
}
|
||||||
// Post
|
// Post
|
||||||
|
|||||||
@@ -59,52 +59,28 @@ function updateUsage(
|
|||||||
|
|
||||||
// Constrain the width of the description
|
// Constrain the width of the description
|
||||||
const width = 80
|
const width = 80
|
||||||
let description = (input.description as string)
|
let description = input.description as string
|
||||||
.trimRight()
|
|
||||||
.replace(/\r\n/g, '\n') // Convert CR to LF
|
|
||||||
.replace(/ +/g, ' ') // Squash consecutive spaces
|
|
||||||
.replace(/ \n/g, '\n') // Squash space followed by newline
|
|
||||||
while (description) {
|
while (description) {
|
||||||
// Longer than width? Find a space to break apart
|
// Longer than width? Find a space to break apart
|
||||||
let segment: string = description
|
let segment: string = description
|
||||||
if (description.length > width) {
|
if (description.length > width) {
|
||||||
segment = description.substr(0, width + 1)
|
segment = description.substr(0, width + 1)
|
||||||
while (!segment.endsWith(' ') && !segment.endsWith('\n') && segment) {
|
while (!segment.endsWith(' ')) {
|
||||||
segment = segment.substr(0, segment.length - 1)
|
segment = segment.substr(0, segment.length - 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Trimmed too much?
|
|
||||||
if (segment.length < width * 0.67) {
|
|
||||||
segment = description
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
segment = description
|
segment = description
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for newline
|
description = description.substr(segment.length) // Remaining
|
||||||
const newlineIndex = segment.indexOf('\n')
|
segment = segment.trimRight() // Trim the trailing space
|
||||||
if (newlineIndex >= 0) {
|
newReadme.push(` # ${segment}`)
|
||||||
segment = segment.substr(0, newlineIndex + 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Append segment
|
|
||||||
newReadme.push(` # ${segment}`.trimRight())
|
|
||||||
|
|
||||||
// Remaining
|
|
||||||
description = description.substr(segment.length)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Input and default
|
||||||
if (input.default !== undefined) {
|
if (input.default !== undefined) {
|
||||||
// Append blank line if description had paragraphs
|
|
||||||
if ((input.description as string).trimRight().match(/\n[ ]*\r?\n/)) {
|
|
||||||
newReadme.push(` #`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Default
|
|
||||||
newReadme.push(` # Default: ${input.default}`)
|
newReadme.push(` # Default: ${input.default}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Input name
|
|
||||||
newReadme.push(` ${key}: ''`)
|
newReadme.push(` ${key}: ''`)
|
||||||
|
|
||||||
firstInput = false
|
firstInput = false
|
||||||
@@ -120,7 +96,7 @@ function updateUsage(
|
|||||||
}
|
}
|
||||||
|
|
||||||
updateUsage(
|
updateUsage(
|
||||||
'actions/checkout@v2',
|
'actions/checkout@v2-beta',
|
||||||
path.join(__dirname, '..', '..', 'action.yml'),
|
path.join(__dirname, '..', '..', 'action.yml'),
|
||||||
path.join(__dirname, '..', '..', 'README.md')
|
path.join(__dirname, '..', '..', 'README.md')
|
||||||
)
|
)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user