Compare commits
31 Commits
v2.0.0
...
users/eric
Author | SHA1 | Date | |
---|---|---|---|
096b500552 | |||
59bb2dd7cd | |||
093dbebc2e | |||
d79ea53307 | |||
0fa906a067 | |||
0b63af4c8c | |||
31d9a4bd37 | |||
154a05918b | |||
7a2b445a4b | |||
eed20d30d5 | |||
4a1fa615de | |||
19fb09ae8f | |||
bb56c8569a | |||
ba329ee889 | |||
a039094e93 | |||
64fcc0c59a | |||
35bb830cfd | |||
54a7542872 | |||
8ade6aebfa | |||
bf32513e49 | |||
675d935214 | |||
e7d8850882 | |||
1475d13f7a | |||
34b9c46c61 | |||
255d69d4c5 | |||
c124b3fb75 | |||
afff79a5a2 | |||
306dc1c898 | |||
d415b27760 | |||
4af80cb867 | |||
ad6dd29a96 |
135
.github/workflows/test.yml
vendored
135
.github/workflows/test.yml
vendored
@ -6,32 +6,32 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- releases/*
|
- releases/*
|
||||||
|
- users/ericsciple/*
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1 # todo: switch to v2
|
- uses: actions/checkout@v2-beta
|
||||||
- run: npm ci
|
# - run: npm ci
|
||||||
- run: npm run build
|
# - run: npm run build
|
||||||
- run: npm run format-check
|
# - run: npm run format-check
|
||||||
- run: npm run lint
|
# - run: npm run lint
|
||||||
- run: npm run pack
|
# - run: npm run pack
|
||||||
- run: npm run gendocs
|
# - run: npm run gendocs
|
||||||
- run: npm test
|
# - name: Verify no unstaged changes
|
||||||
- name: Verify no unstaged changes
|
# run: __test__/verify-no-unstaged-changes.sh
|
||||||
run: __test__/verify-no-unstaged-changes.sh
|
|
||||||
|
|
||||||
test:
|
# test:
|
||||||
strategy:
|
# strategy:
|
||||||
matrix:
|
# matrix:
|
||||||
runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
# runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
runs-on: ${{ matrix.runs-on }}
|
# runs-on: ${{ matrix.runs-on }}
|
||||||
|
|
||||||
steps:
|
# steps:
|
||||||
# Clone this repo
|
# # Clone this repo
|
||||||
- name: Checkout
|
# - name: Checkout
|
||||||
uses: actions/checkout@v2
|
# uses: actions/checkout@v1 # todo: switch to V2
|
||||||
|
|
||||||
# Basic checkout
|
# Basic checkout
|
||||||
- name: Basic checkout
|
- name: Basic checkout
|
||||||
@ -39,63 +39,46 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
ref: test-data/v2/basic
|
ref: test-data/v2/basic
|
||||||
path: basic
|
path: basic
|
||||||
- name: Verify basic
|
# - name: Verify basic
|
||||||
shell: bash
|
# shell: bash
|
||||||
run: __test__/verify-basic.sh
|
# run: __test__/verify-basic.sh
|
||||||
|
|
||||||
# Clean
|
# # Clean
|
||||||
- name: Modify work tree
|
# - name: Modify work tree
|
||||||
shell: bash
|
# shell: bash
|
||||||
run: __test__/modify-work-tree.sh
|
# run: __test__/modify-work-tree.sh
|
||||||
- name: Clean checkout
|
# - name: Clean checkout
|
||||||
uses: ./
|
# uses: ./
|
||||||
with:
|
# with:
|
||||||
ref: test-data/v2/basic
|
# ref: test-data/v2/basic
|
||||||
path: basic
|
# path: basic
|
||||||
- name: Verify clean
|
# - name: Verify clean
|
||||||
shell: bash
|
# shell: bash
|
||||||
run: __test__/verify-clean.sh
|
# run: __test__/verify-clean.sh
|
||||||
|
|
||||||
# Side by side
|
# # Side by side
|
||||||
- name: Side by side checkout 1
|
# - name: Side by side checkout 1
|
||||||
uses: ./
|
# uses: ./
|
||||||
with:
|
# with:
|
||||||
ref: test-data/v2/side-by-side-1
|
# ref: test-data/v2/side-by-side-1
|
||||||
path: side-by-side-1
|
# path: side-by-side-1
|
||||||
- name: Side by side checkout 2
|
# - name: Side by side checkout 2
|
||||||
uses: ./
|
# uses: ./
|
||||||
with:
|
# with:
|
||||||
ref: test-data/v2/side-by-side-2
|
# ref: test-data/v2/side-by-side-2
|
||||||
path: side-by-side-2
|
# path: side-by-side-2
|
||||||
- name: Verify side by side
|
# - name: Verify side by side
|
||||||
shell: bash
|
# shell: bash
|
||||||
run: __test__/verify-side-by-side.sh
|
# run: __test__/verify-side-by-side.sh
|
||||||
|
|
||||||
# LFS
|
# # LFS
|
||||||
- name: LFS checkout
|
# - name: LFS checkout
|
||||||
uses: ./
|
# uses: ./
|
||||||
with:
|
# with:
|
||||||
repository: actions/checkout # hardcoded, otherwise doesn't work from a fork
|
# repository: actions/checkout # hardcoded, otherwise doesn't work from a fork
|
||||||
ref: test-data/v2/lfs
|
# ref: test-data/v2/lfs
|
||||||
path: lfs
|
# path: lfs
|
||||||
lfs: true
|
# lfs: true
|
||||||
- name: Verify LFS
|
# - name: Verify LFS
|
||||||
shell: bash
|
# shell: bash
|
||||||
run: __test__/verify-lfs.sh
|
# run: __test__/verify-lfs.sh
|
||||||
|
|
||||||
test-job-container:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: alpine:latest
|
|
||||||
steps:
|
|
||||||
# Clone this repo
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
# Basic checkout
|
|
||||||
- name: Basic checkout
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
ref: test-data/v2/basic
|
|
||||||
path: basic
|
|
||||||
- name: Verify basic
|
|
||||||
run: __test__/verify-basic.sh --archive
|
|
||||||
|
32
README.md
32
README.md
@ -2,7 +2,7 @@
|
|||||||
<a href="https://github.com/actions/checkout"><img alt="GitHub Actions status" src="https://github.com/actions/checkout/workflows/test-local/badge.svg"></a>
|
<a href="https://github.com/actions/checkout"><img alt="GitHub Actions status" src="https://github.com/actions/checkout/workflows/test-local/badge.svg"></a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
# Checkout V2
|
# Checkout V2 beta
|
||||||
|
|
||||||
This action checks-out your repository under `$GITHUB_WORKSPACE`, so your workflow can access it.
|
This action checks-out your repository under `$GITHUB_WORKSPACE`, so your workflow can access it.
|
||||||
|
|
||||||
@ -13,20 +13,18 @@ Refer [here](https://help.github.com/en/articles/events-that-trigger-workflows)
|
|||||||
# What's new
|
# What's new
|
||||||
|
|
||||||
- Improved fetch performance
|
- Improved fetch performance
|
||||||
- The default behavior now fetches only the commit being checked-out
|
- The default behavior now fetches only the SHA being checked-out
|
||||||
- Script authenticated git commands
|
- Script authenticated git commands
|
||||||
- Persists the input `token` in the local git config
|
- Persists `with.token` in the local git config
|
||||||
- Enables your scripts to run authenticated git commands
|
- Enables your scripts to run authenticated git commands
|
||||||
- Post-job cleanup removes the token
|
- Post-job cleanup removes the token
|
||||||
- Opt out by setting the input `persist-credentials: false`
|
- Coming soon: Opt out by setting `with.persist-credentials` to `false`
|
||||||
- Creates a local branch
|
- Creates a local branch
|
||||||
- No longer detached HEAD when checking out a branch
|
- No longer detached HEAD when checking out a branch
|
||||||
- A local branch is created with the corresponding upstream branch set
|
- A local branch is created with the corresponding upstream branch set
|
||||||
- Improved layout
|
- Improved layout
|
||||||
- The input `path` is always relative to $GITHUB_WORKSPACE
|
- `with.path` is always relative to `github.workspace`
|
||||||
- Aligns better with container actions, where $GITHUB_WORKSPACE gets mapped in
|
- Aligns better with container actions, where `github.workspace` gets mapped in
|
||||||
- Fallback to REST API download
|
|
||||||
- When Git 2.18 or higher is not in the PATH, the REST API will be used to download the files
|
|
||||||
- Removed input `submodules`
|
- Removed input `submodules`
|
||||||
|
|
||||||
Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous versions.
|
Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous versions.
|
||||||
@ -35,7 +33,7 @@ Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous
|
|||||||
|
|
||||||
<!-- start usage -->
|
<!-- start usage -->
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2-beta
|
||||||
with:
|
with:
|
||||||
# Repository name with owner. For example, actions/checkout
|
# Repository name with owner. For example, actions/checkout
|
||||||
# Default: ${{ github.repository }}
|
# Default: ${{ github.repository }}
|
||||||
@ -46,16 +44,10 @@ Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous
|
|||||||
# Otherwise, defaults to `master`.
|
# Otherwise, defaults to `master`.
|
||||||
ref: ''
|
ref: ''
|
||||||
|
|
||||||
# Auth token used to fetch the repository. The token is stored in the local git
|
# Access token for clone repository
|
||||||
# config, which enables your scripts to run authenticated git commands. The
|
|
||||||
# post-job step removes the token from the git config.
|
|
||||||
# Default: ${{ github.token }}
|
# Default: ${{ github.token }}
|
||||||
token: ''
|
token: ''
|
||||||
|
|
||||||
# Whether to persist the token in the git config
|
|
||||||
# Default: true
|
|
||||||
persist-credentials: ''
|
|
||||||
|
|
||||||
# Relative path under $GITHUB_WORKSPACE to place the repository
|
# Relative path under $GITHUB_WORKSPACE to place the repository
|
||||||
path: ''
|
path: ''
|
||||||
|
|
||||||
@ -76,7 +68,7 @@ Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous
|
|||||||
## Checkout a different branch
|
## Checkout a different branch
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2-beta
|
||||||
with:
|
with:
|
||||||
ref: some-branch
|
ref: some-branch
|
||||||
```
|
```
|
||||||
@ -84,7 +76,7 @@ Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous
|
|||||||
## Checkout a different, private repository
|
## Checkout a different, private repository
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2-beta
|
||||||
with:
|
with:
|
||||||
repository: myAccount/myRepository
|
repository: myAccount/myRepository
|
||||||
ref: refs/heads/master
|
ref: refs/heads/master
|
||||||
@ -95,9 +87,9 @@ Refer [here](https://github.com/actions/checkout/blob/v1/README.md) for previous
|
|||||||
## Checkout the HEAD commit of a PR, rather than the merge commit
|
## Checkout the HEAD commit of a PR, rather than the merge commit
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2-beta
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.after }}
|
||||||
```
|
```
|
||||||
|
|
||||||
# License
|
# License
|
||||||
|
@ -63,7 +63,7 @@ describe('input-helper tests', () => {
|
|||||||
it('sets defaults', () => {
|
it('sets defaults', () => {
|
||||||
const settings: ISourceSettings = inputHelper.getInputs()
|
const settings: ISourceSettings = inputHelper.getInputs()
|
||||||
expect(settings).toBeTruthy()
|
expect(settings).toBeTruthy()
|
||||||
expect(settings.authToken).toBeFalsy()
|
expect(settings.accessToken).toBeFalsy()
|
||||||
expect(settings.clean).toBe(true)
|
expect(settings.clean).toBe(true)
|
||||||
expect(settings.commit).toBeTruthy()
|
expect(settings.commit).toBeTruthy()
|
||||||
expect(settings.commit).toBe('1234567890123456789012345678901234567890')
|
expect(settings.commit).toBe('1234567890123456789012345678901234567890')
|
||||||
|
@ -1,88 +0,0 @@
|
|||||||
const mockCore = jest.genMockFromModule('@actions/core') as any
|
|
||||||
mockCore.info = (message: string) => {
|
|
||||||
info.push(message)
|
|
||||||
}
|
|
||||||
let info: string[]
|
|
||||||
let retryHelper: any
|
|
||||||
|
|
||||||
describe('retry-helper tests', () => {
|
|
||||||
beforeAll(() => {
|
|
||||||
// Mocks
|
|
||||||
jest.setMock('@actions/core', mockCore)
|
|
||||||
|
|
||||||
// Now import
|
|
||||||
const retryHelperModule = require('../lib/retry-helper')
|
|
||||||
retryHelper = new retryHelperModule.RetryHelper(3, 0, 0)
|
|
||||||
})
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
// Reset info
|
|
||||||
info = []
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(() => {
|
|
||||||
// Reset modules
|
|
||||||
jest.resetModules()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('first attempt succeeds', async () => {
|
|
||||||
const actual = await retryHelper.execute(async () => {
|
|
||||||
return 'some result'
|
|
||||||
})
|
|
||||||
expect(actual).toBe('some result')
|
|
||||||
expect(info).toHaveLength(0)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('second attempt succeeds', async () => {
|
|
||||||
let attempts = 0
|
|
||||||
const actual = await retryHelper.execute(() => {
|
|
||||||
if (++attempts == 1) {
|
|
||||||
throw new Error('some error')
|
|
||||||
}
|
|
||||||
|
|
||||||
return Promise.resolve('some result')
|
|
||||||
})
|
|
||||||
expect(attempts).toBe(2)
|
|
||||||
expect(actual).toBe('some result')
|
|
||||||
expect(info).toHaveLength(2)
|
|
||||||
expect(info[0]).toBe('some error')
|
|
||||||
expect(info[1]).toMatch(/Waiting .+ seconds before trying again/)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('third attempt succeeds', async () => {
|
|
||||||
let attempts = 0
|
|
||||||
const actual = await retryHelper.execute(() => {
|
|
||||||
if (++attempts < 3) {
|
|
||||||
throw new Error(`some error ${attempts}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
return Promise.resolve('some result')
|
|
||||||
})
|
|
||||||
expect(attempts).toBe(3)
|
|
||||||
expect(actual).toBe('some result')
|
|
||||||
expect(info).toHaveLength(4)
|
|
||||||
expect(info[0]).toBe('some error 1')
|
|
||||||
expect(info[1]).toMatch(/Waiting .+ seconds before trying again/)
|
|
||||||
expect(info[2]).toBe('some error 2')
|
|
||||||
expect(info[3]).toMatch(/Waiting .+ seconds before trying again/)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('all attempts fail succeeds', async () => {
|
|
||||||
let attempts = 0
|
|
||||||
let error: Error = (null as unknown) as Error
|
|
||||||
try {
|
|
||||||
await retryHelper.execute(() => {
|
|
||||||
throw new Error(`some error ${++attempts}`)
|
|
||||||
})
|
|
||||||
} catch (err) {
|
|
||||||
error = err
|
|
||||||
}
|
|
||||||
expect(error.message).toBe('some error 3')
|
|
||||||
expect(attempts).toBe(3)
|
|
||||||
expect(info).toHaveLength(4)
|
|
||||||
expect(info[0]).toBe('some error 1')
|
|
||||||
expect(info[1]).toMatch(/Waiting .+ seconds before trying again/)
|
|
||||||
expect(info[2]).toBe('some error 2')
|
|
||||||
expect(info[3]).toMatch(/Waiting .+ seconds before trying again/)
|
|
||||||
})
|
|
||||||
})
|
|
@ -1,24 +1,10 @@
|
|||||||
#!/bin/sh
|
#!/bin/bash
|
||||||
|
|
||||||
if [ ! -f "./basic/basic-file.txt" ]; then
|
if [ ! -f "./basic/basic-file.txt" ]; then
|
||||||
echo "Expected basic file does not exist"
|
echo "Expected basic file does not exist"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$1" = "--archive" ]; then
|
|
||||||
# Verify no .git folder
|
|
||||||
if [ -d "./basic/.git" ]; then
|
|
||||||
echo "Did not expect ./basic/.git folder to exist"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
# Verify .git folder
|
|
||||||
if [ ! -d "./basic/.git" ]; then
|
|
||||||
echo "Expected ./basic/.git folder to exist"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Verify auth token
|
# Verify auth token
|
||||||
cd basic
|
cd basic
|
||||||
git fetch --no-tags --depth=1 origin +refs/heads/master:refs/remotes/origin/master
|
git fetch
|
||||||
fi
|
|
14
action.yml
14
action.yml
@ -6,18 +6,12 @@ inputs:
|
|||||||
default: ${{ github.repository }}
|
default: ${{ github.repository }}
|
||||||
ref:
|
ref:
|
||||||
description: >
|
description: >
|
||||||
The branch, tag or SHA to checkout. When checking out the repository that
|
The branch, tag or SHA to checkout. When checking out the repository
|
||||||
triggered a workflow, this defaults to the reference or SHA for that
|
that triggered a workflow, this defaults to the reference or SHA for
|
||||||
event. Otherwise, defaults to `master`.
|
that event. Otherwise, defaults to `master`.
|
||||||
token:
|
token:
|
||||||
description: >
|
description: 'Access token for clone repository'
|
||||||
Auth token used to fetch the repository. The token is stored in the local
|
|
||||||
git config, which enables your scripts to run authenticated git commands.
|
|
||||||
The post-job step removes the token from the git config.
|
|
||||||
default: ${{ github.token }}
|
default: ${{ github.token }}
|
||||||
persist-credentials:
|
|
||||||
description: 'Whether to persist the token in the git config'
|
|
||||||
default: true
|
|
||||||
path:
|
path:
|
||||||
description: 'Relative path under $GITHUB_WORKSPACE to place the repository'
|
description: 'Relative path under $GITHUB_WORKSPACE to place the repository'
|
||||||
clean:
|
clean:
|
||||||
|
372
dist/index.js
vendored
372
dist/index.js
vendored
@ -2597,44 +2597,6 @@ function paginatePlugin(octokit) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
|
||||||
|
|
||||||
/***/ 153:
|
|
||||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
|
||||||
|
|
||||||
"use strict";
|
|
||||||
|
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const coreCommand = __importStar(__webpack_require__(431));
|
|
||||||
/**
|
|
||||||
* Indicates whether the POST action is running
|
|
||||||
*/
|
|
||||||
exports.IsPost = !!process.env['STATE_isPost'];
|
|
||||||
/**
|
|
||||||
* The repository path for the POST action. The value is empty during the MAIN action.
|
|
||||||
*/
|
|
||||||
exports.RepositoryPath = process.env['STATE_repositoryPath'] || '';
|
|
||||||
/**
|
|
||||||
* Save the repository path so the POST action can retrieve the value.
|
|
||||||
*/
|
|
||||||
function setRepositoryPath(repositoryPath) {
|
|
||||||
coreCommand.issueCommand('save-state', { name: 'repositoryPath' }, repositoryPath);
|
|
||||||
}
|
|
||||||
exports.setRepositoryPath = setRepositoryPath;
|
|
||||||
// Publish a variable so that when the POST action runs, it can determine it should run the cleanup logic.
|
|
||||||
// This is necessary since we don't have a separate entry point.
|
|
||||||
if (!exports.IsPost) {
|
|
||||||
coreCommand.issueCommand('save-state', { name: 'isPost' }, 'true');
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 168:
|
/***/ 168:
|
||||||
@ -2789,7 +2751,7 @@ const coreCommand = __importStar(__webpack_require__(431));
|
|||||||
const gitSourceProvider = __importStar(__webpack_require__(293));
|
const gitSourceProvider = __importStar(__webpack_require__(293));
|
||||||
const inputHelper = __importStar(__webpack_require__(821));
|
const inputHelper = __importStar(__webpack_require__(821));
|
||||||
const path = __importStar(__webpack_require__(622));
|
const path = __importStar(__webpack_require__(622));
|
||||||
const stateHelper = __importStar(__webpack_require__(153));
|
const cleanupRepositoryPath = process.env['STATE_repositoryPath'];
|
||||||
function run() {
|
function run() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
@ -2813,7 +2775,7 @@ function run() {
|
|||||||
function cleanup() {
|
function cleanup() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
yield gitSourceProvider.cleanup(stateHelper.RepositoryPath);
|
yield gitSourceProvider.cleanup(cleanupRepositoryPath);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.warning(error.message);
|
core.warning(error.message);
|
||||||
@ -2821,7 +2783,7 @@ function cleanup() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
// Main
|
// Main
|
||||||
if (!stateHelper.IsPost) {
|
if (!cleanupRepositoryPath) {
|
||||||
run();
|
run();
|
||||||
}
|
}
|
||||||
// Post
|
// Post
|
||||||
@ -2861,6 +2823,14 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
function getDownloadRef(ref, commit) {
|
||||||
|
if (commit) {
|
||||||
|
return commit;
|
||||||
|
}
|
||||||
|
// todo fix this to work with refs/pull etc
|
||||||
|
return ref;
|
||||||
|
}
|
||||||
|
exports.getDownloadRef = getDownloadRef;
|
||||||
function getCheckoutInfo(git, ref, commit) {
|
function getCheckoutInfo(git, ref, commit) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if (!git) {
|
if (!git) {
|
||||||
@ -4838,7 +4808,7 @@ class GitCommandManager {
|
|||||||
}
|
}
|
||||||
config(configKey, configValue) {
|
config(configKey, configValue) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
yield this.execGit(['config', '--local', configKey, configValue]);
|
yield this.execGit(['config', configKey, configValue]);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
configExists(configKey) {
|
configExists(configKey) {
|
||||||
@ -4846,7 +4816,7 @@ class GitCommandManager {
|
|||||||
const pattern = configKey.replace(/[^a-zA-Z0-9_]/g, x => {
|
const pattern = configKey.replace(/[^a-zA-Z0-9_]/g, x => {
|
||||||
return `\\${x}`;
|
return `\\${x}`;
|
||||||
});
|
});
|
||||||
const output = yield this.execGit(['config', '--local', '--name-only', '--get-regexp', pattern], true);
|
const output = yield this.execGit(['config', '--name-only', '--get-regexp', pattern], true);
|
||||||
return output.exitCode === 0;
|
return output.exitCode === 0;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -4932,19 +4902,19 @@ class GitCommandManager {
|
|||||||
}
|
}
|
||||||
tryConfigUnset(configKey) {
|
tryConfigUnset(configKey) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const output = yield this.execGit(['config', '--local', '--unset-all', configKey], true);
|
const output = yield this.execGit(['config', '--unset-all', configKey], true);
|
||||||
return output.exitCode === 0;
|
return output.exitCode === 0;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
tryDisableAutomaticGarbageCollection() {
|
tryDisableAutomaticGarbageCollection() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const output = yield this.execGit(['config', '--local', 'gc.auto', '0'], true);
|
const output = yield this.execGit(['config', 'gc.auto', '0'], true);
|
||||||
return output.exitCode === 0;
|
return output.exitCode === 0;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
tryGetFetchUrl() {
|
tryGetFetchUrl() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const output = yield this.execGit(['config', '--local', '--get', 'remote.origin.url'], true);
|
const output = yield this.execGit(['config', '--get', 'remote.origin.url'], true);
|
||||||
if (output.exitCode !== 0) {
|
if (output.exitCode !== 0) {
|
||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
@ -5087,20 +5057,22 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
|
const coreCommand = __importStar(__webpack_require__(431));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
const fsHelper = __importStar(__webpack_require__(618));
|
const fsHelper = __importStar(__webpack_require__(618));
|
||||||
const gitCommandManager = __importStar(__webpack_require__(289));
|
const gitCommandManager = __importStar(__webpack_require__(289));
|
||||||
const githubApiHelper = __importStar(__webpack_require__(464));
|
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const path = __importStar(__webpack_require__(622));
|
const path = __importStar(__webpack_require__(622));
|
||||||
const refHelper = __importStar(__webpack_require__(227));
|
const refHelper = __importStar(__webpack_require__(227));
|
||||||
const stateHelper = __importStar(__webpack_require__(153));
|
const githubApiHelper = __importStar(__webpack_require__(464));
|
||||||
const authConfigKey = `http.https://github.com/.extraheader`;
|
const authConfigKey = `http.https://github.com/.extraheader`;
|
||||||
function getSource(settings) {
|
function getSource(settings) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Repository URL
|
// Repository URL
|
||||||
core.info(`Syncing repository: ${settings.repositoryOwner}/${settings.repositoryName}`);
|
core.info(`Syncing repository: ${settings.repositoryOwner}/${settings.repositoryName}`);
|
||||||
const repositoryUrl = `https://github.com/${encodeURIComponent(settings.repositoryOwner)}/${encodeURIComponent(settings.repositoryName)}`;
|
const repositoryUrl = `https://github.com/${encodeURIComponent(settings.repositoryOwner)}/${encodeURIComponent(settings.repositoryName)}`;
|
||||||
|
// Set intra-task state for cleanup
|
||||||
|
coreCommand.issueCommand('save-state', { name: 'repositoryPath' }, settings.repositoryPath);
|
||||||
// Remove conflicting file path
|
// Remove conflicting file path
|
||||||
if (fsHelper.fileExistsSync(settings.repositoryPath)) {
|
if (fsHelper.fileExistsSync(settings.repositoryPath)) {
|
||||||
yield io.rmRF(settings.repositoryPath);
|
yield io.rmRF(settings.repositoryPath);
|
||||||
@ -5112,20 +5084,28 @@ function getSource(settings) {
|
|||||||
yield io.mkdirP(settings.repositoryPath);
|
yield io.mkdirP(settings.repositoryPath);
|
||||||
}
|
}
|
||||||
// Git command manager
|
// Git command manager
|
||||||
const git = yield getGitCommandManager(settings);
|
core.info(`Working directory is '${settings.repositoryPath}'`);
|
||||||
|
let git = null;
|
||||||
|
try {
|
||||||
|
git = yield gitCommandManager.CreateCommandManager(settings.repositoryPath, settings.lfs);
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
// Git is required for LFS
|
||||||
|
if (settings.lfs) {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
// Otherwise fallback to REST API
|
||||||
|
}
|
||||||
// Prepare existing directory, otherwise recreate
|
// Prepare existing directory, otherwise recreate
|
||||||
if (isExisting) {
|
if (isExisting) {
|
||||||
yield prepareExistingDirectory(git, settings.repositoryPath, repositoryUrl, settings.clean);
|
yield prepareExistingDirectory(git, settings.repositoryPath, repositoryUrl, settings.clean);
|
||||||
}
|
}
|
||||||
if (!git) {
|
if (!git || `${1}` == '1') {
|
||||||
// Downloading using REST API
|
core.info(`Downloading the repository files using the GitHub REST API`);
|
||||||
core.info(`The repository will be downloaded using the GitHub REST API`);
|
core.info(`To create a local repository instead, add Git ${gitCommandManager.MinimumGitVersion} or higher to the PATH`);
|
||||||
core.info(`To create a local Git repository instead, add Git ${gitCommandManager.MinimumGitVersion} or higher to the PATH`);
|
yield githubApiHelper.downloadRepository(settings.accessToken, settings.repositoryOwner, settings.repositoryName, settings.ref, settings.commit, settings.repositoryPath);
|
||||||
yield githubApiHelper.downloadRepository(settings.authToken, settings.repositoryOwner, settings.repositoryName, settings.ref, settings.commit, settings.repositoryPath);
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// Save state for POST action
|
|
||||||
stateHelper.setRepositoryPath(settings.repositoryPath);
|
|
||||||
// Initialize the repository
|
// Initialize the repository
|
||||||
if (!fsHelper.directoryExistsSync(path.join(settings.repositoryPath, '.git'))) {
|
if (!fsHelper.directoryExistsSync(path.join(settings.repositoryPath, '.git'))) {
|
||||||
yield git.init();
|
yield git.init();
|
||||||
@ -5137,9 +5117,11 @@ function getSource(settings) {
|
|||||||
}
|
}
|
||||||
// Remove possible previous extraheader
|
// Remove possible previous extraheader
|
||||||
yield removeGitConfig(git, authConfigKey);
|
yield removeGitConfig(git, authConfigKey);
|
||||||
try {
|
// Add extraheader (auth)
|
||||||
// Config auth token
|
const base64Credentials = Buffer.from(`x-access-token:${settings.accessToken}`, 'utf8').toString('base64');
|
||||||
yield configureAuthToken(git, settings.authToken);
|
core.setSecret(base64Credentials);
|
||||||
|
const authConfigValue = `AUTHORIZATION: basic ${base64Credentials}`;
|
||||||
|
yield git.config(authConfigKey, authConfigValue);
|
||||||
// LFS install
|
// LFS install
|
||||||
if (settings.lfs) {
|
if (settings.lfs) {
|
||||||
yield git.lfsInstall();
|
yield git.lfsInstall();
|
||||||
@ -5160,12 +5142,6 @@ function getSource(settings) {
|
|||||||
// Dump some info about the checked out commit
|
// Dump some info about the checked out commit
|
||||||
yield git.log1();
|
yield git.log1();
|
||||||
}
|
}
|
||||||
finally {
|
|
||||||
if (!settings.persistCredentials) {
|
|
||||||
yield removeGitConfig(git, authConfigKey);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getSource = getSource;
|
exports.getSource = getSource;
|
||||||
@ -5182,23 +5158,6 @@ function cleanup(repositoryPath) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.cleanup = cleanup;
|
exports.cleanup = cleanup;
|
||||||
function getGitCommandManager(settings) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
core.info(`Working directory is '${settings.repositoryPath}'`);
|
|
||||||
let git = null;
|
|
||||||
try {
|
|
||||||
return yield gitCommandManager.CreateCommandManager(settings.repositoryPath, settings.lfs);
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
// Git is required for LFS
|
|
||||||
if (settings.lfs) {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
// Otherwise fallback to REST API
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function prepareExistingDirectory(git, repositoryPath, repositoryUrl, clean) {
|
function prepareExistingDirectory(git, repositoryPath, repositoryUrl, clean) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
let remove = false;
|
let remove = false;
|
||||||
@ -5269,34 +5228,23 @@ function prepareExistingDirectory(git, repositoryPath, repositoryUrl, clean) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function configureAuthToken(git, authToken) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
// Configure a placeholder value. This approach avoids the credential being captured
|
|
||||||
// by process creation audit events, which are commonly logged. For more information,
|
|
||||||
// refer to https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/manage/component-updates/command-line-process-auditing
|
|
||||||
const placeholder = `AUTHORIZATION: basic ***`;
|
|
||||||
yield git.config(authConfigKey, placeholder);
|
|
||||||
// Determine the basic credential value
|
|
||||||
const basicCredential = Buffer.from(`x-access-token:${authToken}`, 'utf8').toString('base64');
|
|
||||||
core.setSecret(basicCredential);
|
|
||||||
// Replace the value in the config file
|
|
||||||
const configPath = path.join(git.getWorkingDirectory(), '.git', 'config');
|
|
||||||
let content = (yield fs.promises.readFile(configPath)).toString();
|
|
||||||
const placeholderIndex = content.indexOf(placeholder);
|
|
||||||
if (placeholderIndex < 0 ||
|
|
||||||
placeholderIndex != content.lastIndexOf(placeholder)) {
|
|
||||||
throw new Error('Unable to replace auth placeholder in .git/config');
|
|
||||||
}
|
|
||||||
content = content.replace(placeholder, `AUTHORIZATION: basic ${basicCredential}`);
|
|
||||||
yield fs.promises.writeFile(configPath, content);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function removeGitConfig(git, configKey) {
|
function removeGitConfig(git, configKey) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if ((yield git.configExists(configKey)) &&
|
if ((yield git.configExists(configKey)) &&
|
||||||
!(yield git.tryConfigUnset(configKey))) {
|
!(yield git.tryConfigUnset(configKey))) {
|
||||||
// Load the config contents
|
// Load the config contents
|
||||||
core.warning(`Failed to remove '${configKey}' from the git config`);
|
core.warning(`Failed to remove '${configKey}' from the git config. Attempting to remove the config value by editing the file directly.`);
|
||||||
|
const configPath = path.join(git.getWorkingDirectory(), '.git', 'config');
|
||||||
|
fsHelper.fileExistsSync(configPath);
|
||||||
|
let contents = fs.readFileSync(configPath).toString() || '';
|
||||||
|
// Filter - only includes lines that do not contain the config key
|
||||||
|
const upperConfigKey = configKey.toUpperCase();
|
||||||
|
const split = contents
|
||||||
|
.split('\n')
|
||||||
|
.filter(x => !x.toUpperCase().includes(upperConfigKey));
|
||||||
|
contents = split.join('\n');
|
||||||
|
// Rewrite the config file
|
||||||
|
fs.writeFileSync(configPath, contents);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -8404,36 +8352,68 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
result["default"] = mod;
|
result["default"] = mod;
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const assert = __importStar(__webpack_require__(357));
|
const assert = __importStar(__webpack_require__(357));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
|
const exec = __importStar(__webpack_require__(986));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
const github = __importStar(__webpack_require__(469));
|
const github = __importStar(__webpack_require__(469));
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const path = __importStar(__webpack_require__(622));
|
const path = __importStar(__webpack_require__(622));
|
||||||
|
const refHelper = __importStar(__webpack_require__(227));
|
||||||
const retryHelper = __importStar(__webpack_require__(587));
|
const retryHelper = __importStar(__webpack_require__(587));
|
||||||
const toolCache = __importStar(__webpack_require__(533));
|
const toolCache = __importStar(__webpack_require__(533));
|
||||||
const v4_1 = __importDefault(__webpack_require__(826));
|
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
function downloadRepository(authToken, owner, repo, ref, commit, repositoryPath) {
|
function downloadRepository(accessToken, owner, repo, ref, commit, repositoryPath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
// Determine archive path
|
||||||
|
const runnerTemp = process.env['RUNNER_TEMP'];
|
||||||
|
assert.ok(runnerTemp, 'RUNNER_TEMP not defined');
|
||||||
|
const archivePath = path.join(runnerTemp, 'checkout.tar.gz');
|
||||||
|
// Ensure file does not exist
|
||||||
|
core.debug(`Ensuring archive file does not exist: ${archivePath}`);
|
||||||
|
yield io.rmRF(archivePath);
|
||||||
// Download the archive
|
// Download the archive
|
||||||
let archiveData = yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
let archiveData = yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
||||||
core.info('Downloading the archive');
|
core.info('Downloading the archive using the REST API');
|
||||||
return yield downloadArchive(authToken, owner, repo, ref, commit);
|
return yield downloadArchive(accessToken, owner, repo, ref, commit);
|
||||||
}));
|
}));
|
||||||
// Write archive to disk
|
// Write archive to disk
|
||||||
core.info('Writing archive to disk');
|
core.info('Writing archive to disk');
|
||||||
const uniqueId = v4_1.default();
|
|
||||||
const archivePath = path.join(repositoryPath, `${uniqueId}.tar.gz`);
|
|
||||||
yield fs.promises.writeFile(archivePath, archiveData);
|
yield fs.promises.writeFile(archivePath, archiveData);
|
||||||
archiveData = Buffer.from(''); // Free memory
|
archiveData = Buffer.from(''); // Free memory
|
||||||
|
// // Get the archive URL using the REST API
|
||||||
|
// await retryHelper.execute(async () => {
|
||||||
|
// // Prepare the archive stream
|
||||||
|
// core.debug(`Preparing the archive stream: ${archivePath}`)
|
||||||
|
// await io.rmRF(archivePath)
|
||||||
|
// const fileStream = fs.createWriteStream(archivePath)
|
||||||
|
// const fileStreamClosed = getFileClosedPromise(fileStream)
|
||||||
|
// try {
|
||||||
|
// // Get the archive URL
|
||||||
|
// core.info('Getting archive URL')
|
||||||
|
// const archiveUrl = await getArchiveUrl(
|
||||||
|
// accessToken,
|
||||||
|
// owner,
|
||||||
|
// repo,
|
||||||
|
// ref,
|
||||||
|
// commit
|
||||||
|
// )
|
||||||
|
// // Download the archive
|
||||||
|
// core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
|
||||||
|
// await downloadFile(archiveUrl, fileStream)
|
||||||
|
// } finally {
|
||||||
|
// fileStream.end()
|
||||||
|
// await fileStreamClosed
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
// await fs.promises.writeFile(archivePath, raw)
|
||||||
|
// // await exec.exec(`ls -la "${archiveFile}"`, [], {
|
||||||
|
// // cwd: repositoryPath
|
||||||
|
// // } as ExecOptions)
|
||||||
// Extract archive
|
// Extract archive
|
||||||
core.info('Extracting the archive');
|
const extractPath = path.join(runnerTemp, `checkout`);
|
||||||
const extractPath = path.join(repositoryPath, uniqueId);
|
yield io.rmRF(extractPath);
|
||||||
yield io.mkdirP(extractPath);
|
yield io.mkdirP(extractPath);
|
||||||
if (IS_WINDOWS) {
|
if (IS_WINDOWS) {
|
||||||
yield toolCache.extractZip(archivePath, extractPath);
|
yield toolCache.extractZip(archivePath, extractPath);
|
||||||
@ -8441,45 +8421,114 @@ function downloadRepository(authToken, owner, repo, ref, commit, repositoryPath)
|
|||||||
else {
|
else {
|
||||||
yield toolCache.extractTar(archivePath, extractPath);
|
yield toolCache.extractTar(archivePath, extractPath);
|
||||||
}
|
}
|
||||||
io.rmRF(archivePath);
|
// await exec.exec(`tar -xzf "${archiveFile}"`, [], {
|
||||||
// Determine the path of the repository content. The archive contains
|
// cwd: extractPath
|
||||||
// a top-level folder and the repository content is inside.
|
// } as ExecOptions)
|
||||||
|
// Determine the real directory to copy (ignore extra dir at root of the archive)
|
||||||
const archiveFileNames = yield fs.promises.readdir(extractPath);
|
const archiveFileNames = yield fs.promises.readdir(extractPath);
|
||||||
assert.ok(archiveFileNames.length == 1, 'Expected exactly one directory inside archive');
|
assert.ok(archiveFileNames.length == 1, 'Expected exactly one directory inside archive');
|
||||||
const archiveVersion = archiveFileNames[0]; // The top-level folder name includes the short SHA
|
const extraDirectoryName = archiveFileNames[0];
|
||||||
core.info(`Resolved version ${archiveVersion}`);
|
core.info(`Resolved ${extraDirectoryName}`); // contains the short SHA
|
||||||
const tempRepositoryPath = path.join(extractPath, archiveVersion);
|
const tempRepositoryPath = path.join(extractPath, extraDirectoryName);
|
||||||
// Move the files
|
// Move the files
|
||||||
for (const fileName of yield fs.promises.readdir(tempRepositoryPath)) {
|
for (const fileName of yield fs.promises.readdir(tempRepositoryPath)) {
|
||||||
const sourcePath = path.join(tempRepositoryPath, fileName);
|
const sourcePath = path.join(tempRepositoryPath, fileName);
|
||||||
const targetPath = path.join(repositoryPath, fileName);
|
const targetPath = path.join(repositoryPath, fileName);
|
||||||
if (IS_WINDOWS) {
|
|
||||||
yield io.cp(sourcePath, targetPath, { recursive: true }); // Copy on Windows (Windows Defender may have a lock)
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
yield io.mv(sourcePath, targetPath);
|
yield io.mv(sourcePath, targetPath);
|
||||||
}
|
}
|
||||||
}
|
yield exec.exec(`find .`, [], {
|
||||||
io.rmRF(extractPath);
|
cwd: repositoryPath
|
||||||
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadRepository = downloadRepository;
|
exports.downloadRepository = downloadRepository;
|
||||||
function downloadArchive(authToken, owner, repo, ref, commit) {
|
function downloadArchive(accessToken, owner, repo, ref, commit) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const octokit = new github.GitHub(authToken);
|
const octokit = new github.GitHub(accessToken);
|
||||||
const params = {
|
const params = {
|
||||||
owner: owner,
|
owner: owner,
|
||||||
repo: repo,
|
repo: repo,
|
||||||
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||||
ref: commit || ref
|
ref: refHelper.getDownloadRef(ref, commit)
|
||||||
};
|
};
|
||||||
const response = yield octokit.repos.getArchiveLink(params);
|
const response = yield octokit.repos.getArchiveLink(params);
|
||||||
|
console.log('GOT THE RESPONSE');
|
||||||
|
console.log(`status=${response.status}`);
|
||||||
|
console.log(`headers=${JSON.stringify(response.headers)}`);
|
||||||
|
console.log(`data=${JSON.stringify(response.data)}`);
|
||||||
if (response.status != 200) {
|
if (response.status != 200) {
|
||||||
throw new Error(`Unexpected response from GitHub API. Status: ${response.status}, Data: ${response.data}`);
|
throw new Error(`Unexpected response from GitHub API. Status: '${response.status}'`);
|
||||||
}
|
}
|
||||||
return Buffer.from(response.data); // response.data is ArrayBuffer
|
return Buffer.from(response.data); // response.data is ArrayBuffer
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// async function getArchiveUrl(
|
||||||
|
// accessToken: string,
|
||||||
|
// owner: string,
|
||||||
|
// repo: string,
|
||||||
|
// ref: string,
|
||||||
|
// commit: string
|
||||||
|
// ): Promise<string> {
|
||||||
|
// const octokit = new github.GitHub(accessToken)
|
||||||
|
// const params: RequestOptions & ReposGetArchiveLinkParams = {
|
||||||
|
// method: 'HEAD',
|
||||||
|
// owner: owner,
|
||||||
|
// repo: repo,
|
||||||
|
// archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||||
|
// ref: refHelper.getDownloadRef(ref, commit)
|
||||||
|
// }
|
||||||
|
// const response = await octokit.repos.getArchiveLink(params)
|
||||||
|
// console.log('GOT THE RESPONSE')
|
||||||
|
// console.log(`status=${response.status}`)
|
||||||
|
// console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||||
|
// console.log(`data=${JSON.stringify(response.data)}`)
|
||||||
|
// if (response.status != 200) {
|
||||||
|
// throw new Error(
|
||||||
|
// `Unexpected response from GitHub API. Status: '${response.status}'`
|
||||||
|
// )
|
||||||
|
// }
|
||||||
|
// console.log('GETTING THE LOCATION')
|
||||||
|
// const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
|
||||||
|
// assert.ok(
|
||||||
|
// archiveUrl,
|
||||||
|
// `Expected GitHub API response to contain 'Location' header`
|
||||||
|
// )
|
||||||
|
// return archiveUrl
|
||||||
|
// }
|
||||||
|
// function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
|
||||||
|
// return new Promise((resolve, reject) => {
|
||||||
|
// try {
|
||||||
|
// https.get(url, (response: IncomingMessage) => {
|
||||||
|
// if (response.statusCode != 200) {
|
||||||
|
// reject(`Request failed with status '${response.statusCode}'`)
|
||||||
|
// response.resume() // Consume response data to free up memory
|
||||||
|
// return
|
||||||
|
// }
|
||||||
|
// response.on('data', chunk => {
|
||||||
|
// fileStream.write(chunk)
|
||||||
|
// })
|
||||||
|
// response.on('end', () => {
|
||||||
|
// resolve()
|
||||||
|
// })
|
||||||
|
// response.on('error', err => {
|
||||||
|
// reject(err)
|
||||||
|
// })
|
||||||
|
// })
|
||||||
|
// } catch (err) {
|
||||||
|
// reject(err)
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
// function getFileClosedPromise(stream: WriteStream): Promise<void> {
|
||||||
|
// return new Promise((resolve, reject) => {
|
||||||
|
// stream.on('error', err => {
|
||||||
|
// reject(err)
|
||||||
|
// })
|
||||||
|
// stream.on('finish', () => {
|
||||||
|
// resolve()
|
||||||
|
// })
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@ -9814,22 +9863,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const defaultMaxAttempts = 3;
|
const maxAttempts = 3;
|
||||||
const defaultMinSeconds = 10;
|
const minSeconds = 10;
|
||||||
const defaultMaxSeconds = 20;
|
const maxSeconds = 20;
|
||||||
class RetryHelper {
|
function execute(action) {
|
||||||
constructor(maxAttempts = defaultMaxAttempts, minSeconds = defaultMinSeconds, maxSeconds = defaultMaxSeconds) {
|
|
||||||
this.maxAttempts = maxAttempts;
|
|
||||||
this.minSeconds = Math.floor(minSeconds);
|
|
||||||
this.maxSeconds = Math.floor(maxSeconds);
|
|
||||||
if (this.minSeconds > this.maxSeconds) {
|
|
||||||
throw new Error('min seconds should be less than or equal to max seconds');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
execute(action) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
let attempt = 1;
|
let attempt = 1;
|
||||||
while (attempt < this.maxAttempts) {
|
while (attempt < maxAttempts) {
|
||||||
// Try
|
// Try
|
||||||
try {
|
try {
|
||||||
return yield action();
|
return yield action();
|
||||||
@ -9838,33 +9878,26 @@ class RetryHelper {
|
|||||||
core.info(err.message);
|
core.info(err.message);
|
||||||
}
|
}
|
||||||
// Sleep
|
// Sleep
|
||||||
const seconds = this.getSleepAmount();
|
const seconds = getRandomIntInclusive(minSeconds, maxSeconds);
|
||||||
core.info(`Waiting ${seconds} seconds before trying again`);
|
core.info(`Waiting ${seconds} before trying again`);
|
||||||
yield this.sleep(seconds);
|
yield sleep(seconds * 1000);
|
||||||
attempt++;
|
attempt++;
|
||||||
}
|
}
|
||||||
// Last attempt
|
// Last attempt
|
||||||
return yield action();
|
return yield action();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
getSleepAmount() {
|
|
||||||
return (Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +
|
|
||||||
this.minSeconds);
|
|
||||||
}
|
|
||||||
sleep(seconds) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return new Promise(resolve => setTimeout(resolve, seconds * 1000));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.RetryHelper = RetryHelper;
|
|
||||||
function execute(action) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const retryHelper = new RetryHelper();
|
|
||||||
return yield retryHelper.execute(action);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.execute = execute;
|
exports.execute = execute;
|
||||||
|
function getRandomIntInclusive(minimum, maximum) {
|
||||||
|
minimum = Math.floor(minimum);
|
||||||
|
maximum = Math.floor(maximum);
|
||||||
|
return Math.floor(Math.random() * (maximum - minimum + 1)) + minimum;
|
||||||
|
}
|
||||||
|
function sleep(milliseconds) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise(resolve => setTimeout(resolve, milliseconds));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@ -12779,11 +12812,8 @@ function getInputs() {
|
|||||||
// LFS
|
// LFS
|
||||||
result.lfs = (core.getInput('lfs') || 'false').toUpperCase() === 'TRUE';
|
result.lfs = (core.getInput('lfs') || 'false').toUpperCase() === 'TRUE';
|
||||||
core.debug(`lfs = ${result.lfs}`);
|
core.debug(`lfs = ${result.lfs}`);
|
||||||
// Auth token
|
// Access token
|
||||||
result.authToken = core.getInput('token');
|
result.accessToken = core.getInput('token');
|
||||||
// Persist credentials
|
|
||||||
result.persistCredentials =
|
|
||||||
(core.getInput('persist-credentials') || 'false').toUpperCase() === 'TRUE';
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.getInputs = getInputs;
|
exports.getInputs = getInputs;
|
||||||
|
9
package-lock.json
generated
9
package-lock.json
generated
@ -767,15 +767,6 @@
|
|||||||
"integrity": "sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw==",
|
"integrity": "sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"@types/uuid": {
|
|
||||||
"version": "3.4.6",
|
|
||||||
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.6.tgz",
|
|
||||||
"integrity": "sha512-cCdlC/1kGEZdEglzOieLDYBxHsvEOIg7kp/2FYyVR9Pxakq+Qf/inL3RKQ+PA8gOlI/NnL+fXmQH12nwcGzsHw==",
|
|
||||||
"dev": true,
|
|
||||||
"requires": {
|
|
||||||
"@types/node": "*"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"@types/yargs": {
|
"@types/yargs": {
|
||||||
"version": "13.0.3",
|
"version": "13.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-13.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-13.0.3.tgz",
|
||||||
|
@ -33,13 +33,11 @@
|
|||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
"@actions/github": "^2.0.0",
|
"@actions/github": "^2.0.0",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
"@actions/tool-cache": "^1.1.2",
|
"@actions/tool-cache": "^1.1.2"
|
||||||
"uuid": "^3.3.3"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^24.0.23",
|
"@types/jest": "^24.0.23",
|
||||||
"@types/node": "^12.7.12",
|
"@types/node": "^12.7.12",
|
||||||
"@types/uuid": "^3.4.6",
|
|
||||||
"@typescript-eslint/parser": "^2.8.0",
|
"@typescript-eslint/parser": "^2.8.0",
|
||||||
"@zeit/ncc": "^0.20.5",
|
"@zeit/ncc": "^0.20.5",
|
||||||
"eslint": "^5.16.0",
|
"eslint": "^5.16.0",
|
||||||
|
@ -116,7 +116,7 @@ class GitCommandManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async config(configKey: string, configValue: string): Promise<void> {
|
async config(configKey: string, configValue: string): Promise<void> {
|
||||||
await this.execGit(['config', '--local', configKey, configValue])
|
await this.execGit(['config', configKey, configValue])
|
||||||
}
|
}
|
||||||
|
|
||||||
async configExists(configKey: string): Promise<boolean> {
|
async configExists(configKey: string): Promise<boolean> {
|
||||||
@ -124,7 +124,7 @@ class GitCommandManager {
|
|||||||
return `\\${x}`
|
return `\\${x}`
|
||||||
})
|
})
|
||||||
const output = await this.execGit(
|
const output = await this.execGit(
|
||||||
['config', '--local', '--name-only', '--get-regexp', pattern],
|
['config', '--name-only', '--get-regexp', pattern],
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
return output.exitCode === 0
|
return output.exitCode === 0
|
||||||
@ -211,23 +211,20 @@ class GitCommandManager {
|
|||||||
|
|
||||||
async tryConfigUnset(configKey: string): Promise<boolean> {
|
async tryConfigUnset(configKey: string): Promise<boolean> {
|
||||||
const output = await this.execGit(
|
const output = await this.execGit(
|
||||||
['config', '--local', '--unset-all', configKey],
|
['config', '--unset-all', configKey],
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
return output.exitCode === 0
|
return output.exitCode === 0
|
||||||
}
|
}
|
||||||
|
|
||||||
async tryDisableAutomaticGarbageCollection(): Promise<boolean> {
|
async tryDisableAutomaticGarbageCollection(): Promise<boolean> {
|
||||||
const output = await this.execGit(
|
const output = await this.execGit(['config', 'gc.auto', '0'], true)
|
||||||
['config', '--local', 'gc.auto', '0'],
|
|
||||||
true
|
|
||||||
)
|
|
||||||
return output.exitCode === 0
|
return output.exitCode === 0
|
||||||
}
|
}
|
||||||
|
|
||||||
async tryGetFetchUrl(): Promise<string> {
|
async tryGetFetchUrl(): Promise<string> {
|
||||||
const output = await this.execGit(
|
const output = await this.execGit(
|
||||||
['config', '--local', '--get', 'remote.origin.url'],
|
['config', '--get', 'remote.origin.url'],
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
|
import * as coreCommand from '@actions/core/lib/command'
|
||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import * as fsHelper from './fs-helper'
|
import * as fsHelper from './fs-helper'
|
||||||
import * as gitCommandManager from './git-command-manager'
|
import * as gitCommandManager from './git-command-manager'
|
||||||
import * as githubApiHelper from './github-api-helper'
|
|
||||||
import * as io from '@actions/io'
|
import * as io from '@actions/io'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import * as refHelper from './ref-helper'
|
import * as refHelper from './ref-helper'
|
||||||
import * as stateHelper from './state-helper'
|
import * as githubApiHelper from './github-api-helper'
|
||||||
import {IGitCommandManager} from './git-command-manager'
|
import {IGitCommandManager} from './git-command-manager'
|
||||||
|
|
||||||
const authConfigKey = `http.https://github.com/.extraheader`
|
const authConfigKey = `http.https://github.com/.extraheader`
|
||||||
@ -20,8 +20,7 @@ export interface ISourceSettings {
|
|||||||
clean: boolean
|
clean: boolean
|
||||||
fetchDepth: number
|
fetchDepth: number
|
||||||
lfs: boolean
|
lfs: boolean
|
||||||
authToken: string
|
accessToken: string
|
||||||
persistCredentials: boolean
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getSource(settings: ISourceSettings): Promise<void> {
|
export async function getSource(settings: ISourceSettings): Promise<void> {
|
||||||
@ -33,6 +32,13 @@ export async function getSource(settings: ISourceSettings): Promise<void> {
|
|||||||
settings.repositoryOwner
|
settings.repositoryOwner
|
||||||
)}/${encodeURIComponent(settings.repositoryName)}`
|
)}/${encodeURIComponent(settings.repositoryName)}`
|
||||||
|
|
||||||
|
// Set intra-task state for cleanup
|
||||||
|
coreCommand.issueCommand(
|
||||||
|
'save-state',
|
||||||
|
{name: 'repositoryPath'},
|
||||||
|
settings.repositoryPath
|
||||||
|
)
|
||||||
|
|
||||||
// Remove conflicting file path
|
// Remove conflicting file path
|
||||||
if (fsHelper.fileExistsSync(settings.repositoryPath)) {
|
if (fsHelper.fileExistsSync(settings.repositoryPath)) {
|
||||||
await io.rmRF(settings.repositoryPath)
|
await io.rmRF(settings.repositoryPath)
|
||||||
@ -46,7 +52,21 @@ export async function getSource(settings: ISourceSettings): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Git command manager
|
// Git command manager
|
||||||
const git = await getGitCommandManager(settings)
|
core.info(`Working directory is '${settings.repositoryPath}'`)
|
||||||
|
let git = (null as unknown) as IGitCommandManager
|
||||||
|
try {
|
||||||
|
git = await gitCommandManager.CreateCommandManager(
|
||||||
|
settings.repositoryPath,
|
||||||
|
settings.lfs
|
||||||
|
)
|
||||||
|
} catch (err) {
|
||||||
|
// Git is required for LFS
|
||||||
|
if (settings.lfs) {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise fallback to REST API
|
||||||
|
}
|
||||||
|
|
||||||
// Prepare existing directory, otherwise recreate
|
// Prepare existing directory, otherwise recreate
|
||||||
if (isExisting) {
|
if (isExisting) {
|
||||||
@ -58,14 +78,13 @@ export async function getSource(settings: ISourceSettings): Promise<void> {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!git) {
|
if (!git || `${1}` == '1') {
|
||||||
// Downloading using REST API
|
core.info(`Downloading the repository files using the GitHub REST API`)
|
||||||
core.info(`The repository will be downloaded using the GitHub REST API`)
|
|
||||||
core.info(
|
core.info(
|
||||||
`To create a local Git repository instead, add Git ${gitCommandManager.MinimumGitVersion} or higher to the PATH`
|
`To create a local repository instead, add Git ${gitCommandManager.MinimumGitVersion} or higher to the PATH`
|
||||||
)
|
)
|
||||||
await githubApiHelper.downloadRepository(
|
await githubApiHelper.downloadRepository(
|
||||||
settings.authToken,
|
settings.accessToken,
|
||||||
settings.repositoryOwner,
|
settings.repositoryOwner,
|
||||||
settings.repositoryName,
|
settings.repositoryName,
|
||||||
settings.ref,
|
settings.ref,
|
||||||
@ -73,9 +92,6 @@ export async function getSource(settings: ISourceSettings): Promise<void> {
|
|||||||
settings.repositoryPath
|
settings.repositoryPath
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
// Save state for POST action
|
|
||||||
stateHelper.setRepositoryPath(settings.repositoryPath)
|
|
||||||
|
|
||||||
// Initialize the repository
|
// Initialize the repository
|
||||||
if (
|
if (
|
||||||
!fsHelper.directoryExistsSync(path.join(settings.repositoryPath, '.git'))
|
!fsHelper.directoryExistsSync(path.join(settings.repositoryPath, '.git'))
|
||||||
@ -94,9 +110,14 @@ export async function getSource(settings: ISourceSettings): Promise<void> {
|
|||||||
// Remove possible previous extraheader
|
// Remove possible previous extraheader
|
||||||
await removeGitConfig(git, authConfigKey)
|
await removeGitConfig(git, authConfigKey)
|
||||||
|
|
||||||
try {
|
// Add extraheader (auth)
|
||||||
// Config auth token
|
const base64Credentials = Buffer.from(
|
||||||
await configureAuthToken(git, settings.authToken)
|
`x-access-token:${settings.accessToken}`,
|
||||||
|
'utf8'
|
||||||
|
).toString('base64')
|
||||||
|
core.setSecret(base64Credentials)
|
||||||
|
const authConfigValue = `AUTHORIZATION: basic ${base64Credentials}`
|
||||||
|
await git.config(authConfigKey, authConfigValue)
|
||||||
|
|
||||||
// LFS install
|
// LFS install
|
||||||
if (settings.lfs) {
|
if (settings.lfs) {
|
||||||
@ -126,11 +147,6 @@ export async function getSource(settings: ISourceSettings): Promise<void> {
|
|||||||
|
|
||||||
// Dump some info about the checked out commit
|
// Dump some info about the checked out commit
|
||||||
await git.log1()
|
await git.log1()
|
||||||
} finally {
|
|
||||||
if (!settings.persistCredentials) {
|
|
||||||
await removeGitConfig(git, authConfigKey)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -149,27 +165,6 @@ export async function cleanup(repositoryPath: string): Promise<void> {
|
|||||||
await removeGitConfig(git, authConfigKey)
|
await removeGitConfig(git, authConfigKey)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getGitCommandManager(
|
|
||||||
settings: ISourceSettings
|
|
||||||
): Promise<IGitCommandManager> {
|
|
||||||
core.info(`Working directory is '${settings.repositoryPath}'`)
|
|
||||||
let git = (null as unknown) as IGitCommandManager
|
|
||||||
try {
|
|
||||||
return await gitCommandManager.CreateCommandManager(
|
|
||||||
settings.repositoryPath,
|
|
||||||
settings.lfs
|
|
||||||
)
|
|
||||||
} catch (err) {
|
|
||||||
// Git is required for LFS
|
|
||||||
if (settings.lfs) {
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise fallback to REST API
|
|
||||||
return (null as unknown) as IGitCommandManager
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function prepareExistingDirectory(
|
async function prepareExistingDirectory(
|
||||||
git: IGitCommandManager,
|
git: IGitCommandManager,
|
||||||
repositoryPath: string,
|
repositoryPath: string,
|
||||||
@ -255,40 +250,6 @@ async function prepareExistingDirectory(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function configureAuthToken(
|
|
||||||
git: IGitCommandManager,
|
|
||||||
authToken: string
|
|
||||||
): Promise<void> {
|
|
||||||
// Configure a placeholder value. This approach avoids the credential being captured
|
|
||||||
// by process creation audit events, which are commonly logged. For more information,
|
|
||||||
// refer to https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/manage/component-updates/command-line-process-auditing
|
|
||||||
const placeholder = `AUTHORIZATION: basic ***`
|
|
||||||
await git.config(authConfigKey, placeholder)
|
|
||||||
|
|
||||||
// Determine the basic credential value
|
|
||||||
const basicCredential = Buffer.from(
|
|
||||||
`x-access-token:${authToken}`,
|
|
||||||
'utf8'
|
|
||||||
).toString('base64')
|
|
||||||
core.setSecret(basicCredential)
|
|
||||||
|
|
||||||
// Replace the value in the config file
|
|
||||||
const configPath = path.join(git.getWorkingDirectory(), '.git', 'config')
|
|
||||||
let content = (await fs.promises.readFile(configPath)).toString()
|
|
||||||
const placeholderIndex = content.indexOf(placeholder)
|
|
||||||
if (
|
|
||||||
placeholderIndex < 0 ||
|
|
||||||
placeholderIndex != content.lastIndexOf(placeholder)
|
|
||||||
) {
|
|
||||||
throw new Error('Unable to replace auth placeholder in .git/config')
|
|
||||||
}
|
|
||||||
content = content.replace(
|
|
||||||
placeholder,
|
|
||||||
`AUTHORIZATION: basic ${basicCredential}`
|
|
||||||
)
|
|
||||||
await fs.promises.writeFile(configPath, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function removeGitConfig(
|
async function removeGitConfig(
|
||||||
git: IGitCommandManager,
|
git: IGitCommandManager,
|
||||||
configKey: string
|
configKey: string
|
||||||
@ -298,6 +259,21 @@ async function removeGitConfig(
|
|||||||
!(await git.tryConfigUnset(configKey))
|
!(await git.tryConfigUnset(configKey))
|
||||||
) {
|
) {
|
||||||
// Load the config contents
|
// Load the config contents
|
||||||
core.warning(`Failed to remove '${configKey}' from the git config`)
|
core.warning(
|
||||||
|
`Failed to remove '${configKey}' from the git config. Attempting to remove the config value by editing the file directly.`
|
||||||
|
)
|
||||||
|
const configPath = path.join(git.getWorkingDirectory(), '.git', 'config')
|
||||||
|
fsHelper.fileExistsSync(configPath)
|
||||||
|
let contents = fs.readFileSync(configPath).toString() || ''
|
||||||
|
|
||||||
|
// Filter - only includes lines that do not contain the config key
|
||||||
|
const upperConfigKey = configKey.toUpperCase()
|
||||||
|
const split = contents
|
||||||
|
.split('\n')
|
||||||
|
.filter(x => !x.toUpperCase().includes(upperConfigKey))
|
||||||
|
contents = split.join('\n')
|
||||||
|
|
||||||
|
// Rewrite the config file
|
||||||
|
fs.writeFileSync(configPath, contents)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,92 +1,204 @@
|
|||||||
import * as assert from 'assert'
|
import * as assert from 'assert'
|
||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
|
import * as exec from '@actions/exec'
|
||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import * as github from '@actions/github'
|
import * as github from '@actions/github'
|
||||||
|
import * as https from 'https'
|
||||||
import * as io from '@actions/io'
|
import * as io from '@actions/io'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
|
import * as refHelper from './ref-helper'
|
||||||
import * as retryHelper from './retry-helper'
|
import * as retryHelper from './retry-helper'
|
||||||
import * as toolCache from '@actions/tool-cache'
|
import * as toolCache from '@actions/tool-cache'
|
||||||
import {default as uuid} from 'uuid/v4'
|
import {ExecOptions} from '@actions/exec/lib/interfaces'
|
||||||
import {ReposGetArchiveLinkParams} from '@octokit/rest'
|
import {IncomingMessage} from 'http'
|
||||||
|
import {RequestOptions, ReposGetArchiveLinkParams} from '@octokit/rest'
|
||||||
|
import {WriteStream} from 'fs'
|
||||||
|
|
||||||
const IS_WINDOWS = process.platform === 'win32'
|
const IS_WINDOWS = process.platform === 'win32'
|
||||||
|
|
||||||
export async function downloadRepository(
|
export async function downloadRepository(
|
||||||
authToken: string,
|
accessToken: string,
|
||||||
owner: string,
|
owner: string,
|
||||||
repo: string,
|
repo: string,
|
||||||
ref: string,
|
ref: string,
|
||||||
commit: string,
|
commit: string,
|
||||||
repositoryPath: string
|
repositoryPath: string
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
|
// Determine archive path
|
||||||
|
const runnerTemp = process.env['RUNNER_TEMP'] as string
|
||||||
|
assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
|
||||||
|
const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
|
||||||
|
|
||||||
|
// Ensure file does not exist
|
||||||
|
core.debug(`Ensuring archive file does not exist: ${archivePath}`)
|
||||||
|
await io.rmRF(archivePath)
|
||||||
|
|
||||||
// Download the archive
|
// Download the archive
|
||||||
let archiveData = await retryHelper.execute(async () => {
|
let archiveData = await retryHelper.execute(async () => {
|
||||||
core.info('Downloading the archive')
|
core.info('Downloading the archive using the REST API')
|
||||||
return await downloadArchive(authToken, owner, repo, ref, commit)
|
return await downloadArchive(accessToken, owner, repo, ref, commit)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Write archive to disk
|
// Write archive to disk
|
||||||
core.info('Writing archive to disk')
|
core.info('Writing archive to disk')
|
||||||
const uniqueId = uuid()
|
|
||||||
const archivePath = path.join(repositoryPath, `${uniqueId}.tar.gz`)
|
|
||||||
await fs.promises.writeFile(archivePath, archiveData)
|
await fs.promises.writeFile(archivePath, archiveData)
|
||||||
archiveData = Buffer.from('') // Free memory
|
archiveData = Buffer.from('') // Free memory
|
||||||
|
|
||||||
|
// // Get the archive URL using the REST API
|
||||||
|
// await retryHelper.execute(async () => {
|
||||||
|
// // Prepare the archive stream
|
||||||
|
// core.debug(`Preparing the archive stream: ${archivePath}`)
|
||||||
|
// await io.rmRF(archivePath)
|
||||||
|
// const fileStream = fs.createWriteStream(archivePath)
|
||||||
|
// const fileStreamClosed = getFileClosedPromise(fileStream)
|
||||||
|
|
||||||
|
// try {
|
||||||
|
// // Get the archive URL
|
||||||
|
// core.info('Getting archive URL')
|
||||||
|
// const archiveUrl = await getArchiveUrl(
|
||||||
|
// accessToken,
|
||||||
|
// owner,
|
||||||
|
// repo,
|
||||||
|
// ref,
|
||||||
|
// commit
|
||||||
|
// )
|
||||||
|
|
||||||
|
// // Download the archive
|
||||||
|
// core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
|
||||||
|
// await downloadFile(archiveUrl, fileStream)
|
||||||
|
// } finally {
|
||||||
|
// fileStream.end()
|
||||||
|
// await fileStreamClosed
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
|
||||||
// Extract archive
|
// Extract archive
|
||||||
core.info('Extracting the archive')
|
const extractPath = path.join(runnerTemp, `checkout`)
|
||||||
const extractPath = path.join(repositoryPath, uniqueId)
|
await io.rmRF(extractPath)
|
||||||
await io.mkdirP(extractPath)
|
await io.mkdirP(extractPath)
|
||||||
if (IS_WINDOWS) {
|
if (IS_WINDOWS) {
|
||||||
await toolCache.extractZip(archivePath, extractPath)
|
await toolCache.extractZip(archivePath, extractPath)
|
||||||
} else {
|
} else {
|
||||||
await toolCache.extractTar(archivePath, extractPath)
|
await toolCache.extractTar(archivePath, extractPath)
|
||||||
}
|
}
|
||||||
io.rmRF(archivePath)
|
|
||||||
|
|
||||||
// Determine the path of the repository content. The archive contains
|
// Determine the real directory to copy (ignore extra dir at root of the archive)
|
||||||
// a top-level folder and the repository content is inside.
|
|
||||||
const archiveFileNames = await fs.promises.readdir(extractPath)
|
const archiveFileNames = await fs.promises.readdir(extractPath)
|
||||||
assert.ok(
|
assert.ok(
|
||||||
archiveFileNames.length == 1,
|
archiveFileNames.length == 1,
|
||||||
'Expected exactly one directory inside archive'
|
'Expected exactly one directory inside archive'
|
||||||
)
|
)
|
||||||
const archiveVersion = archiveFileNames[0] // The top-level folder name includes the short SHA
|
const extraDirectoryName = archiveFileNames[0]
|
||||||
core.info(`Resolved version ${archiveVersion}`)
|
core.info(`Resolved ${extraDirectoryName}`) // contains the short SHA
|
||||||
const tempRepositoryPath = path.join(extractPath, archiveVersion)
|
const tempRepositoryPath = path.join(extractPath, extraDirectoryName)
|
||||||
|
|
||||||
// Move the files
|
// Move the files
|
||||||
for (const fileName of await fs.promises.readdir(tempRepositoryPath)) {
|
for (const fileName of await fs.promises.readdir(tempRepositoryPath)) {
|
||||||
const sourcePath = path.join(tempRepositoryPath, fileName)
|
const sourcePath = path.join(tempRepositoryPath, fileName)
|
||||||
const targetPath = path.join(repositoryPath, fileName)
|
const targetPath = path.join(repositoryPath, fileName)
|
||||||
if (IS_WINDOWS) {
|
|
||||||
await io.cp(sourcePath, targetPath, {recursive: true}) // Copy on Windows (Windows Defender may have a lock)
|
|
||||||
} else {
|
|
||||||
await io.mv(sourcePath, targetPath)
|
await io.mv(sourcePath, targetPath)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
io.rmRF(extractPath)
|
await exec.exec(`find .`, [], {
|
||||||
|
cwd: repositoryPath
|
||||||
|
} as ExecOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function downloadArchive(
|
async function downloadArchive(
|
||||||
authToken: string,
|
accessToken: string,
|
||||||
owner: string,
|
owner: string,
|
||||||
repo: string,
|
repo: string,
|
||||||
ref: string,
|
ref: string,
|
||||||
commit: string
|
commit: string
|
||||||
): Promise<Buffer> {
|
): Promise<Buffer> {
|
||||||
const octokit = new github.GitHub(authToken)
|
const octokit = new github.GitHub(accessToken)
|
||||||
const params: ReposGetArchiveLinkParams = {
|
const params: ReposGetArchiveLinkParams = {
|
||||||
owner: owner,
|
owner: owner,
|
||||||
repo: repo,
|
repo: repo,
|
||||||
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||||
ref: commit || ref
|
ref: refHelper.getDownloadRef(ref, commit)
|
||||||
}
|
}
|
||||||
const response = await octokit.repos.getArchiveLink(params)
|
const response = await octokit.repos.getArchiveLink(params)
|
||||||
|
console.log('GOT THE RESPONSE')
|
||||||
|
console.log(`status=${response.status}`)
|
||||||
|
console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||||
|
console.log(`data=${JSON.stringify(response.data)}`)
|
||||||
if (response.status != 200) {
|
if (response.status != 200) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Unexpected response from GitHub API. Status: ${response.status}, Data: ${response.data}`
|
`Unexpected response from GitHub API. Status: '${response.status}'`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return Buffer.from(response.data) // response.data is ArrayBuffer
|
return Buffer.from(response.data) // response.data is ArrayBuffer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// async function getArchiveUrl(
|
||||||
|
// accessToken: string,
|
||||||
|
// owner: string,
|
||||||
|
// repo: string,
|
||||||
|
// ref: string,
|
||||||
|
// commit: string
|
||||||
|
// ): Promise<string> {
|
||||||
|
// const octokit = new github.GitHub(accessToken)
|
||||||
|
// const params: RequestOptions & ReposGetArchiveLinkParams = {
|
||||||
|
// method: 'HEAD',
|
||||||
|
// owner: owner,
|
||||||
|
// repo: repo,
|
||||||
|
// archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||||
|
// ref: refHelper.getDownloadRef(ref, commit)
|
||||||
|
// }
|
||||||
|
// const response = await octokit.repos.getArchiveLink(params)
|
||||||
|
// console.log('GOT THE RESPONSE')
|
||||||
|
// console.log(`status=${response.status}`)
|
||||||
|
// console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||||
|
// console.log(`data=${JSON.stringify(response.data)}`)
|
||||||
|
// if (response.status != 200) {
|
||||||
|
// throw new Error(
|
||||||
|
// `Unexpected response from GitHub API. Status: '${response.status}'`
|
||||||
|
// )
|
||||||
|
// }
|
||||||
|
// console.log('GETTING THE LOCATION')
|
||||||
|
// const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
|
||||||
|
// assert.ok(
|
||||||
|
// archiveUrl,
|
||||||
|
// `Expected GitHub API response to contain 'Location' header`
|
||||||
|
// )
|
||||||
|
// return archiveUrl
|
||||||
|
// }
|
||||||
|
|
||||||
|
// function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
|
||||||
|
// return new Promise((resolve, reject) => {
|
||||||
|
// try {
|
||||||
|
// https.get(url, (response: IncomingMessage) => {
|
||||||
|
// if (response.statusCode != 200) {
|
||||||
|
// reject(`Request failed with status '${response.statusCode}'`)
|
||||||
|
// response.resume() // Consume response data to free up memory
|
||||||
|
// return
|
||||||
|
// }
|
||||||
|
|
||||||
|
// response.on('data', chunk => {
|
||||||
|
// fileStream.write(chunk)
|
||||||
|
// })
|
||||||
|
// response.on('end', () => {
|
||||||
|
// resolve()
|
||||||
|
// })
|
||||||
|
// response.on('error', err => {
|
||||||
|
// reject(err)
|
||||||
|
// })
|
||||||
|
// })
|
||||||
|
// } catch (err) {
|
||||||
|
// reject(err)
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
|
||||||
|
// function getFileClosedPromise(stream: WriteStream): Promise<void> {
|
||||||
|
// return new Promise((resolve, reject) => {
|
||||||
|
// stream.on('error', err => {
|
||||||
|
// reject(err)
|
||||||
|
// })
|
||||||
|
// stream.on('finish', () => {
|
||||||
|
// resolve()
|
||||||
|
// })
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
@ -97,12 +97,8 @@ export function getInputs(): ISourceSettings {
|
|||||||
result.lfs = (core.getInput('lfs') || 'false').toUpperCase() === 'TRUE'
|
result.lfs = (core.getInput('lfs') || 'false').toUpperCase() === 'TRUE'
|
||||||
core.debug(`lfs = ${result.lfs}`)
|
core.debug(`lfs = ${result.lfs}`)
|
||||||
|
|
||||||
// Auth token
|
// Access token
|
||||||
result.authToken = core.getInput('token')
|
result.accessToken = core.getInput('token')
|
||||||
|
|
||||||
// Persist credentials
|
|
||||||
result.persistCredentials =
|
|
||||||
(core.getInput('persist-credentials') || 'false').toUpperCase() === 'TRUE'
|
|
||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,8 @@ import * as coreCommand from '@actions/core/lib/command'
|
|||||||
import * as gitSourceProvider from './git-source-provider'
|
import * as gitSourceProvider from './git-source-provider'
|
||||||
import * as inputHelper from './input-helper'
|
import * as inputHelper from './input-helper'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import * as stateHelper from './state-helper'
|
|
||||||
|
const cleanupRepositoryPath = process.env['STATE_repositoryPath'] as string
|
||||||
|
|
||||||
async function run(): Promise<void> {
|
async function run(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
@ -30,14 +31,14 @@ async function run(): Promise<void> {
|
|||||||
|
|
||||||
async function cleanup(): Promise<void> {
|
async function cleanup(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
await gitSourceProvider.cleanup(stateHelper.RepositoryPath)
|
await gitSourceProvider.cleanup(cleanupRepositoryPath)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
core.warning(error.message)
|
core.warning(error.message)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Main
|
// Main
|
||||||
if (!stateHelper.IsPost) {
|
if (!cleanupRepositoryPath) {
|
||||||
run()
|
run()
|
||||||
}
|
}
|
||||||
// Post
|
// Post
|
||||||
|
@ -96,7 +96,7 @@ function updateUsage(
|
|||||||
}
|
}
|
||||||
|
|
||||||
updateUsage(
|
updateUsage(
|
||||||
'actions/checkout@v2',
|
'actions/checkout@v2-beta',
|
||||||
path.join(__dirname, '..', '..', 'action.yml'),
|
path.join(__dirname, '..', '..', 'action.yml'),
|
||||||
path.join(__dirname, '..', '..', 'README.md')
|
path.join(__dirname, '..', '..', 'README.md')
|
||||||
)
|
)
|
||||||
|
@ -5,6 +5,15 @@ export interface ICheckoutInfo {
|
|||||||
startPoint: string
|
startPoint: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getDownloadRef(ref: string, commit: string): string {
|
||||||
|
if (commit) {
|
||||||
|
return commit
|
||||||
|
}
|
||||||
|
|
||||||
|
// todo fix this to work with refs/pull etc
|
||||||
|
return ref
|
||||||
|
}
|
||||||
|
|
||||||
export async function getCheckoutInfo(
|
export async function getCheckoutInfo(
|
||||||
git: IGitCommandManager,
|
git: IGitCommandManager,
|
||||||
ref: string,
|
ref: string,
|
||||||
|
@ -1,30 +1,12 @@
|
|||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
|
|
||||||
const defaultMaxAttempts = 3
|
const maxAttempts = 3
|
||||||
const defaultMinSeconds = 10
|
const minSeconds = 10
|
||||||
const defaultMaxSeconds = 20
|
const maxSeconds = 20
|
||||||
|
|
||||||
export class RetryHelper {
|
export async function execute<T>(action: () => Promise<T>): Promise<T> {
|
||||||
private maxAttempts: number
|
|
||||||
private minSeconds: number
|
|
||||||
private maxSeconds: number
|
|
||||||
|
|
||||||
constructor(
|
|
||||||
maxAttempts: number = defaultMaxAttempts,
|
|
||||||
minSeconds: number = defaultMinSeconds,
|
|
||||||
maxSeconds: number = defaultMaxSeconds
|
|
||||||
) {
|
|
||||||
this.maxAttempts = maxAttempts
|
|
||||||
this.minSeconds = Math.floor(minSeconds)
|
|
||||||
this.maxSeconds = Math.floor(maxSeconds)
|
|
||||||
if (this.minSeconds > this.maxSeconds) {
|
|
||||||
throw new Error('min seconds should be less than or equal to max seconds')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute<T>(action: () => Promise<T>): Promise<T> {
|
|
||||||
let attempt = 1
|
let attempt = 1
|
||||||
while (attempt < this.maxAttempts) {
|
while (attempt < maxAttempts) {
|
||||||
// Try
|
// Try
|
||||||
try {
|
try {
|
||||||
return await action()
|
return await action()
|
||||||
@ -33,9 +15,9 @@ export class RetryHelper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Sleep
|
// Sleep
|
||||||
const seconds = this.getSleepAmount()
|
const seconds = getRandomIntInclusive(minSeconds, maxSeconds)
|
||||||
core.info(`Waiting ${seconds} seconds before trying again`)
|
core.info(`Waiting ${seconds} before trying again`)
|
||||||
await this.sleep(seconds)
|
await sleep(seconds * 1000)
|
||||||
attempt++
|
attempt++
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -43,19 +25,12 @@ export class RetryHelper {
|
|||||||
return await action()
|
return await action()
|
||||||
}
|
}
|
||||||
|
|
||||||
private getSleepAmount(): number {
|
function getRandomIntInclusive(minimum: number, maximum: number): number {
|
||||||
return (
|
minimum = Math.floor(minimum)
|
||||||
Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) +
|
maximum = Math.floor(maximum)
|
||||||
this.minSeconds
|
return Math.floor(Math.random() * (maximum - minimum + 1)) + minimum
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async sleep(seconds: number): Promise<void> {
|
async function sleep(milliseconds): Promise<void> {
|
||||||
return new Promise(resolve => setTimeout(resolve, seconds * 1000))
|
return new Promise(resolve => setTimeout(resolve, milliseconds))
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function execute<T>(action: () => Promise<T>): Promise<T> {
|
|
||||||
const retryHelper = new RetryHelper()
|
|
||||||
return await retryHelper.execute(action)
|
|
||||||
}
|
}
|
||||||
|
@ -1,30 +0,0 @@
|
|||||||
import * as core from '@actions/core'
|
|
||||||
import * as coreCommand from '@actions/core/lib/command'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Indicates whether the POST action is running
|
|
||||||
*/
|
|
||||||
export const IsPost = !!process.env['STATE_isPost']
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The repository path for the POST action. The value is empty during the MAIN action.
|
|
||||||
*/
|
|
||||||
export const RepositoryPath =
|
|
||||||
(process.env['STATE_repositoryPath'] as string) || ''
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Save the repository path so the POST action can retrieve the value.
|
|
||||||
*/
|
|
||||||
export function setRepositoryPath(repositoryPath: string) {
|
|
||||||
coreCommand.issueCommand(
|
|
||||||
'save-state',
|
|
||||||
{name: 'repositoryPath'},
|
|
||||||
repositoryPath
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Publish a variable so that when the POST action runs, it can determine it should run the cleanup logic.
|
|
||||||
// This is necessary since we don't have a separate entry point.
|
|
||||||
if (!IsPost) {
|
|
||||||
coreCommand.issueCommand('save-state', {name: 'isPost'}, 'true')
|
|
||||||
}
|
|
Reference in New Issue
Block a user