diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml
index ec093ea..90f7350 100644
--- a/.github/workflows/workflow.yml
+++ b/.github/workflows/workflow.yml
@@ -11,6 +11,7 @@ jobs:
build:
runs-on: ${{ matrix.operating-system }}
strategy:
+ fail-fast: false
matrix:
operating-system: [ubuntu-latest, windows-latest, macOS-latest]
steps:
@@ -19,7 +20,7 @@ jobs:
- name: Set Node.js 12
uses: actions/setup-node@v1
with:
- version: 12.x
+ node-version: 12.x
- run: npm ci
- run: npm run build
- run: npm run format-check
@@ -31,27 +32,50 @@ jobs:
test:
runs-on: ${{ matrix.operating-system }}
strategy:
+ fail-fast: false
matrix:
operating-system: [ubuntu-latest, windows-latest, macOS-latest]
steps:
- name: Checkout
uses: actions/checkout@v2
- - name: Clear tool cache
- run: mv "${{ runner.tool_cache }}" "${{ runner.tool_cache }}.old"
- - name: Setup dotnet 3.0.100
+ - name: Clear tool cache (macOS)
+ if: runner.os == 'macos'
+ run: |
+ echo $PATH
+ dotnet --info
+ rm -rf "/Users/runner/.dotnet"
+ - name: Clear tool cache (Ubuntu)
+ if: runner.os == 'linux'
+ run: |
+ echo $PATH
+ dotnet --info
+ rm -rf "/usr/share/dotnet"
+ - name: Clear tool cache (Windows)
+ if: runner.os == 'windows'
+ run: |
+ echo $env:PATH
+ dotnet --info
+ Remove-Item $env:LocalAppData\Microsoft\dotnet/* -Recurse -Force -ErrorAction SilentlyContinue
+ Remove-Item "$env:ProgramFiles\dotnet/*" -Recurse -Force -ErrorAction SilentlyContinue
+ # Side-by-side install of 2.2 and 3.1 used for the test project
+ - name: Setup dotnet 2.2.402
uses: ./
with:
- dotnet-version: 3.0.100
+ dotnet-version: 2.2.402
+ - name: Setup dotnet 3.1.201
+ uses: ./
+ with:
+ dotnet-version: 3.1.201
# We are including this veriable to force the generation of the nuget config file to verify that it is created in the correct place
source-url: https://api.nuget.org/v3/index.json
env:
NUGET_AUTH_TOKEN: NOTATOKEN
- name: Verify dotnet
if: runner.os != 'windows'
- run: __tests__/verify-dotnet.sh 3.0.100
+ run: __tests__/verify-dotnet.sh 3.1.201 2.2.402
- name: Verify dotnet (Windows)
if: runner.os == 'windows'
- run: __tests__/verify-dotnet.ps1 3.0.100
+ run: __tests__/verify-dotnet.ps1 3.1.201
test-proxy:
runs-on: ubuntu-latest
@@ -65,37 +89,42 @@ jobs:
- 3128:3128
env:
https_proxy: http://squid-proxy:3128
+ http_proxy: http://squid-proxy:3128
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Clear tool cache
- run: rm -rf $RUNNER_TOOL_CACHE/*
- - name: Setup dotnet 3.0.100
+ run: rm -rf "/usr/share/dotnet"
+ - name: Install curl
+ run: |
+ apt update
+ apt -y install curl
+ - name: Setup dotnet 3.1.201
uses: ./
with:
- dotnet-version: 3.0.100
+ dotnet-version: 3.1.201
source-url: https://api.nuget.org/v3/index.json
env:
NUGET_AUTH_TOKEN: NOTATOKEN
- name: Verify dotnet
- run: __tests__/verify-dotnet.sh 3.0.100
+ run: __tests__/verify-dotnet.sh 3.1.201
test-bypass-proxy:
runs-on: ubuntu-latest
env:
https_proxy: http://no-such-proxy:3128
- no_proxy: github.com,dotnetcli.blob.core.windows.net,download.visualstudio.microsoft.com,api.nuget.org
+ no_proxy: github.com,dotnetcli.blob.core.windows.net,download.visualstudio.microsoft.com,api.nuget.org,dotnetcli.azureedge.net
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Clear tool cache
- run: mv "${{ runner.tool_cache }}" "${{ runner.tool_cache }}.old"
- - name: Setup dotnet 3.0.100
+ run: rm -rf "/usr/share/dotnet"
+ - name: Setup dotnet 3.1.201
uses: ./
with:
- dotnet-version: 3.0.100
+ dotnet-version: 3.1.201
source-url: https://api.nuget.org/v3/index.json
env:
NUGET_AUTH_TOKEN: NOTATOKEN
- name: Verify dotnet
- run: __tests__/verify-dotnet.sh 3.0.100
+ run: __tests__/verify-dotnet.sh 3.1.201
diff --git a/.gitignore b/.gitignore
index 2d64bd9..b339e2c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -95,4 +95,5 @@ typings/
# DynamoDB Local files
.dynamodb/
-.vscode/*
\ No newline at end of file
+# Ignore .vscode files
+.vscode/
\ No newline at end of file
diff --git a/__tests__/authutil.test.ts b/__tests__/authutil.test.ts
index cf0b52a..f2e82d8 100644
--- a/__tests__/authutil.test.ts
+++ b/__tests__/authutil.test.ts
@@ -81,7 +81,11 @@ describe('authutil tests', () => {
beforeEach(async () => {
await io.rmRF(fakeSourcesDirForTesting);
await io.mkdirP(fakeSourcesDirForTesting);
- }, 100000);
+ }, 30000);
+
+ afterAll(async () => {
+ await io.rmRF(fakeSourcesDirForTesting);
+ }, 30000);
beforeEach(() => {
if (fs.existsSync(nugetConfigFile)) {
diff --git a/__tests__/installer.test.ts b/__tests__/installer.test.ts
index bafd74c..fe299e6 100644
--- a/__tests__/installer.test.ts
+++ b/__tests__/installer.test.ts
@@ -73,6 +73,9 @@ describe('version tests', () => {
describe('installer tests', () => {
beforeAll(async () => {
+ process.env.RUNNER_TOOL_CACHE = toolDir;
+ process.env.DOTNET_INSTALL_DIR = toolDir;
+ process.env.RUNNER_TEMP = tempDir;
await io.rmRF(toolDir);
await io.rmRF(tempDir);
});
@@ -84,23 +87,21 @@ describe('installer tests', () => {
} catch {
console.log('Failed to remove test directories');
}
- }, 100000);
+ }, 30000);
it('Resolving a normal generic version works', async () => {
const dotnetInstaller = new installer.DotnetCoreInstaller('3.1.x');
- let versInfo = await dotnetInstaller.resolveInfos(
- ['win-x64'],
+ let versInfo = await dotnetInstaller.resolveVersion(
new installer.DotNetVersionInfo('3.1.x')
);
- expect(versInfo.resolvedVersion.startsWith('3.1.'));
+ expect(versInfo.startsWith('3.1.'));
}, 100000);
it('Resolving a nonexistent generic version fails', async () => {
const dotnetInstaller = new installer.DotnetCoreInstaller('999.1.x');
try {
- await dotnetInstaller.resolveInfos(
- ['win-x64'],
+ await dotnetInstaller.resolveVersion(
new installer.DotNetVersionInfo('999.1.x')
);
fail();
@@ -111,53 +112,47 @@ describe('installer tests', () => {
it('Resolving a exact stable version works', async () => {
const dotnetInstaller = new installer.DotnetCoreInstaller('3.1.201');
- let versInfo = await dotnetInstaller.resolveInfos(
- ['win-x64'],
+ let versInfo = await dotnetInstaller.resolveVersion(
new installer.DotNetVersionInfo('3.1.201')
);
- expect(versInfo.resolvedVersion).toBe('3.1.201');
+ expect(versInfo).toBe('3.1.201');
}, 100000);
it('Resolving a exact preview version works', async () => {
const dotnetInstaller = new installer.DotnetCoreInstaller(
- '5.0.0-preview.4'
+ '5.0.0-preview.6'
);
- let versInfo = await dotnetInstaller.resolveInfos(
- ['win-x64'],
- new installer.DotNetVersionInfo('5.0.0-preview.4')
+ let versInfo = await dotnetInstaller.resolveVersion(
+ new installer.DotNetVersionInfo('5.0.0-preview.6')
);
- expect(versInfo.resolvedVersion).toBe('5.0.0-preview.4');
+ expect(versInfo).toBe('5.0.0-preview.6');
}, 100000);
it('Acquires version of dotnet if no matching version is installed', async () => {
- await getDotnet('2.2.205');
- const dotnetDir = path.join(toolDir, 'dncs', '2.2.205', os.arch());
-
- expect(fs.existsSync(`${dotnetDir}.complete`)).toBe(true);
+ await getDotnet('3.1.201');
+ expect(fs.existsSync(path.join(toolDir, 'sdk', '3.1.201'))).toBe(true);
if (IS_WINDOWS) {
- expect(fs.existsSync(path.join(dotnetDir, 'dotnet.exe'))).toBe(true);
+ expect(fs.existsSync(path.join(toolDir, 'dotnet.exe'))).toBe(true);
} else {
- expect(fs.existsSync(path.join(dotnetDir, 'dotnet'))).toBe(true);
+ expect(fs.existsSync(path.join(toolDir, 'dotnet'))).toBe(true);
}
}, 400000); //This needs some time to download on "slower" internet connections
- it('Acquires version of dotnet if no matching version is installed', async () => {
- const dotnetDir = path.join(toolDir, 'dncs', '2.2.105', os.arch());
-
+ it('Acquires version of dotnet from global.json if no matching version is installed', async () => {
const globalJsonPath = path.join(process.cwd(), 'global.json');
- const jsonContents = `{${os.EOL}"sdk": {${os.EOL}"version": "2.2.105"${os.EOL}}${os.EOL}}`;
+ const jsonContents = `{${os.EOL}"sdk": {${os.EOL}"version": "3.1.201"${os.EOL}}${os.EOL}}`;
if (!fs.existsSync(globalJsonPath)) {
fs.writeFileSync(globalJsonPath, jsonContents);
}
await setup.run();
- expect(fs.existsSync(`${dotnetDir}.complete`)).toBe(true);
+ expect(fs.existsSync(path.join(toolDir, 'sdk', '3.1.201'))).toBe(true);
if (IS_WINDOWS) {
- expect(fs.existsSync(path.join(dotnetDir, 'dotnet.exe'))).toBe(true);
+ expect(fs.existsSync(path.join(toolDir, 'dotnet.exe'))).toBe(true);
} else {
- expect(fs.existsSync(path.join(dotnetDir, 'dotnet'))).toBe(true);
+ expect(fs.existsSync(path.join(toolDir, 'dotnet'))).toBe(true);
}
fs.unlinkSync(globalJsonPath);
}, 100000);
@@ -170,30 +165,7 @@ describe('installer tests', () => {
thrown = true;
}
expect(thrown).toBe(true);
- }, 100000);
-
- it('Uses version of dotnet installed in cache', async () => {
- const dotnetDir: string = path.join(toolDir, 'dncs', '250.0.0', os.arch());
- await io.mkdirP(dotnetDir);
- fs.writeFileSync(`${dotnetDir}.complete`, 'hello');
- // This will throw if it doesn't find it in the cache (because no such version exists)
- await getDotnet('250.0.0');
- return;
- });
-
- it('Doesnt use version of dotnet that was only partially installed in cache', async () => {
- const dotnetDir: string = path.join(toolDir, 'dncs', '251.0.0', os.arch());
- await io.mkdirP(dotnetDir);
- let thrown = false;
- try {
- // This will throw if it doesn't find it in the cache (because no such version exists)
- await getDotnet('251.0.0');
- } catch {
- thrown = true;
- }
- expect(thrown).toBe(true);
- return;
- });
+ }, 30000);
it('Uses an up to date bash download script', async () => {
const httpCallbackClient = new hc.HttpClient('setup-dotnet-test', [], {
@@ -213,7 +185,7 @@ describe('installer tests', () => {
expect(normalizeFileContents(currentContents)).toBe(
normalizeFileContents(upToDateContents)
);
- }, 100000);
+ }, 30000);
it('Uses an up to date powershell download script', async () => {
var httpCallbackClient = new hc.HttpClient('setup-dotnet-test', [], {
@@ -233,7 +205,7 @@ describe('installer tests', () => {
expect(normalizeFileContents(currentContents)).toBe(
normalizeFileContents(upToDateContents)
);
- }, 100000);
+ }, 30000);
});
function normalizeFileContents(contents: string): string {
diff --git a/__tests__/sample-csproj/Program.cs b/__tests__/sample-csproj/Program.cs
index 807ab56..f14c939 100644
--- a/__tests__/sample-csproj/Program.cs
+++ b/__tests__/sample-csproj/Program.cs
@@ -1,14 +1,15 @@
-using System;
-using Newtonsoft.Json;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using System;
namespace sample_csproj
{
- class Program
+ [TestClass]
+ public class Program
{
- static void Main(string[] args)
+ [TestMethod]
+ public void TestMethod1()
{
- var json = JsonConvert.SerializeObject(new[] {"Hello", "World!" });
- Console.WriteLine(json);
+ Console.WriteLine("Hello, World!");
}
}
}
diff --git a/__tests__/sample-csproj/runtimeconfig.template.json b/__tests__/sample-csproj/runtimeconfig.template.json
deleted file mode 100644
index 18708fd..0000000
--- a/__tests__/sample-csproj/runtimeconfig.template.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "runtimeOptions": {
- "configProperties": {
- "System.Globalization.Invariant": true
- }
- }
-}
\ No newline at end of file
diff --git a/__tests__/sample-csproj/sample.csproj b/__tests__/sample-csproj/sample.csproj
index 1aeea5c..50fb5e4 100644
--- a/__tests__/sample-csproj/sample.csproj
+++ b/__tests__/sample-csproj/sample.csproj
@@ -1,13 +1,18 @@
- Exe
- netcoreapp3.0
+ netcoreapp3.1;netcoreapp2.2
sample_csproj
+
+ false
-
+
+
+
+
+
diff --git a/__tests__/setup-dotnet.test.ts b/__tests__/setup-dotnet.test.ts
new file mode 100644
index 0000000..d7be6e4
--- /dev/null
+++ b/__tests__/setup-dotnet.test.ts
@@ -0,0 +1,48 @@
+import io = require('@actions/io');
+import fs = require('fs');
+import os = require('os');
+import path = require('path');
+
+const toolDir = path.join(__dirname, 'runner', 'tools2');
+const tempDir = path.join(__dirname, 'runner', 'temp2');
+
+import * as setup from '../src/setup-dotnet';
+
+const IS_WINDOWS = process.platform === 'win32';
+
+describe('setup-dotnet tests', () => {
+ beforeAll(async () => {
+ process.env.RUNNER_TOOL_CACHE = toolDir;
+ process.env.DOTNET_INSTALL_DIR = toolDir;
+ process.env.RUNNER_TEMP = tempDir;
+ await io.rmRF(toolDir);
+ await io.rmRF(tempDir);
+ });
+
+ afterAll(async () => {
+ try {
+ await io.rmRF(path.join(process.cwd(), 'global.json'));
+ await io.rmRF(toolDir);
+ await io.rmRF(tempDir);
+ } catch {
+ console.log('Failed to remove test directories');
+ }
+ }, 30000);
+
+ it('Acquires version of dotnet if no matching version is installed', async () => {
+ const globalJsonPath = path.join(process.cwd(), 'global.json');
+ const jsonContents = `{${os.EOL}"sdk": {${os.EOL}"version": "3.1.201"${os.EOL}}${os.EOL}}`;
+ if (!fs.existsSync(globalJsonPath)) {
+ fs.writeFileSync(globalJsonPath, jsonContents);
+ }
+ await setup.run();
+
+ expect(fs.existsSync(path.join(toolDir, 'sdk', '3.1.201'))).toBe(true);
+ if (IS_WINDOWS) {
+ expect(fs.existsSync(path.join(toolDir, 'dotnet.exe'))).toBe(true);
+ } else {
+ expect(fs.existsSync(path.join(toolDir, 'dotnet'))).toBe(true);
+ }
+ fs.unlinkSync(globalJsonPath);
+ }, 100000);
+});
diff --git a/__tests__/verify-dotnet.ps1 b/__tests__/verify-dotnet.ps1
index 3bf68ca..6af4297 100755
--- a/__tests__/verify-dotnet.ps1
+++ b/__tests__/verify-dotnet.ps1
@@ -13,13 +13,23 @@ Write-Host "Found '$dotnet'"
$version = & $dotnet --version | Out-String | ForEach-Object { $_.Trim() }
Write-Host "Version $version"
-# if ($version -ne $args[0])
-# {
-# Write-Host "PATH='$env:path'"
-# Write-Host "gcm dotnet:"
-# gcm dotnet | fl
-# throw "Unexpected version"
-# }
+if ($version -ne $args[0])
+{
+ Write-Host "PATH='$env:path'"
+ throw "Unexpected version"
+}
+
+if ($args[1])
+{
+ # SDKs are listed on multiple lines with the path afterwards in square brackets
+ $version = & $dotnet --list-sdks | ForEach-Object { $_.SubString(0, $_.IndexOf('[')).Trim() }
+ Write-Host "Version $version"
+ if (-not ($version -contains $args[1]))
+ {
+ Write-Host "PATH='$env:path'"
+ throw "Unexpected version"
+ }
+}
Write-Host "Building sample csproj"
& $dotnet build __tests__/sample-csproj/ --no-cache
@@ -29,9 +39,20 @@ if ($LASTEXITCODE -ne 0)
}
Write-Host "Testing compiled app"
-$sample_output = "$(__tests__/sample-csproj/bin/Debug/netcoreapp3.0/sample.exe)".Trim()
+$sample_output = "$(dotnet test __tests__/sample-csproj/ --no-build)"
Write-Host "Sample output: $sample_output"
-if ($sample_output -notlike "*Hello*World*")
+# For Side-by-Side installs we want to run the tests twice, for a single install the tests will run once
+if ($args[1])
{
- throw "Unexpected output"
+ if ($sample_output -notlike "*Test Run Successful.*Test Run Successful.*")
+ {
+ throw "Unexpected output"
+ }
+}
+else
+{
+ if ($sample_output -notlike "*Test Run Successful.*")
+ {
+ throw "Unexpected output"
+ }
}
diff --git a/__tests__/verify-dotnet.sh b/__tests__/verify-dotnet.sh
index 21d02b1..098d076 100755
--- a/__tests__/verify-dotnet.sh
+++ b/__tests__/verify-dotnet.sh
@@ -15,13 +15,30 @@ if [ -z "$(echo $dotnet_version | grep $1)" ]; then
exit 1
fi
+if [ -n "$2" ]; then
+ dotnet_version="$(dotnet --list-sdks)"
+ echo "Found dotnet version '$dotnet_version'"
+ if [ -z "$(echo $dotnet_version | grep $2)" ]; then
+ echo "Unexpected version"
+ exit 1
+ fi
+fi
+
echo "Building sample csproj"
dotnet build __tests__/sample-csproj/ --no-cache || exit 1
echo "Testing compiled app"
-sample_output="$(__tests__/sample-csproj/bin/Debug/netcoreapp3.0/sample)"
+sample_output=$(dotnet test __tests__/sample-csproj/ --no-build)
echo "Sample output: $sample_output"
-if [ -z "$(echo $sample_output | grep Hello)" ]; then
- echo "Unexpected output"
- exit 1
+# For Side-by-Side installs we want to run the tests twice, for a single install the tests will run once
+if [ -n "$2" ]; then
+ if [ -z "$(echo $sample_output | grep "Test Run Successful.*Test Run Successful.")" ]; then
+ echo "Unexpected output"
+ exit 1
+ fi
+else
+ if [ -z "$(echo $sample_output | grep "Test Run Successful.")" ]; then
+ echo "Unexpected output"
+ exit 1
+ fi
fi
\ No newline at end of file
diff --git a/dist/index.js b/dist/index.js
index 831f219..dfd3eee 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -3876,293 +3876,6 @@ module.exports = uniq;
module.exports = require("child_process");
-/***/ }),
-
-/***/ 139:
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-// Unique ID creation requires a high quality random # generator. In node.js
-// this is pretty straight-forward - we use the crypto API.
-
-var crypto = __webpack_require__(417);
-
-module.exports = function nodeRNG() {
- return crypto.randomBytes(16);
-};
-
-
-/***/ }),
-
-/***/ 141:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-
-var net = __webpack_require__(631);
-var tls = __webpack_require__(16);
-var http = __webpack_require__(605);
-var https = __webpack_require__(211);
-var events = __webpack_require__(614);
-var assert = __webpack_require__(357);
-var util = __webpack_require__(669);
-
-
-exports.httpOverHttp = httpOverHttp;
-exports.httpsOverHttp = httpsOverHttp;
-exports.httpOverHttps = httpOverHttps;
-exports.httpsOverHttps = httpsOverHttps;
-
-
-function httpOverHttp(options) {
- var agent = new TunnelingAgent(options);
- agent.request = http.request;
- return agent;
-}
-
-function httpsOverHttp(options) {
- var agent = new TunnelingAgent(options);
- agent.request = http.request;
- agent.createSocket = createSecureSocket;
- agent.defaultPort = 443;
- return agent;
-}
-
-function httpOverHttps(options) {
- var agent = new TunnelingAgent(options);
- agent.request = https.request;
- return agent;
-}
-
-function httpsOverHttps(options) {
- var agent = new TunnelingAgent(options);
- agent.request = https.request;
- agent.createSocket = createSecureSocket;
- agent.defaultPort = 443;
- return agent;
-}
-
-
-function TunnelingAgent(options) {
- var self = this;
- self.options = options || {};
- self.proxyOptions = self.options.proxy || {};
- self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
- self.requests = [];
- self.sockets = [];
-
- self.on('free', function onFree(socket, host, port, localAddress) {
- var options = toOptions(host, port, localAddress);
- for (var i = 0, len = self.requests.length; i < len; ++i) {
- var pending = self.requests[i];
- if (pending.host === options.host && pending.port === options.port) {
- // Detect the request to connect same origin server,
- // reuse the connection.
- self.requests.splice(i, 1);
- pending.request.onSocket(socket);
- return;
- }
- }
- socket.destroy();
- self.removeSocket(socket);
- });
-}
-util.inherits(TunnelingAgent, events.EventEmitter);
-
-TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
- var self = this;
- var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
-
- if (self.sockets.length >= this.maxSockets) {
- // We are over limit so we'll add it to the queue.
- self.requests.push(options);
- return;
- }
-
- // If we are under maxSockets create a new one.
- self.createSocket(options, function(socket) {
- socket.on('free', onFree);
- socket.on('close', onCloseOrRemove);
- socket.on('agentRemove', onCloseOrRemove);
- req.onSocket(socket);
-
- function onFree() {
- self.emit('free', socket, options);
- }
-
- function onCloseOrRemove(err) {
- self.removeSocket(socket);
- socket.removeListener('free', onFree);
- socket.removeListener('close', onCloseOrRemove);
- socket.removeListener('agentRemove', onCloseOrRemove);
- }
- });
-};
-
-TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
- var self = this;
- var placeholder = {};
- self.sockets.push(placeholder);
-
- var connectOptions = mergeOptions({}, self.proxyOptions, {
- method: 'CONNECT',
- path: options.host + ':' + options.port,
- agent: false,
- headers: {
- host: options.host + ':' + options.port
- }
- });
- if (options.localAddress) {
- connectOptions.localAddress = options.localAddress;
- }
- if (connectOptions.proxyAuth) {
- connectOptions.headers = connectOptions.headers || {};
- connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
- new Buffer(connectOptions.proxyAuth).toString('base64');
- }
-
- debug('making CONNECT request');
- var connectReq = self.request(connectOptions);
- connectReq.useChunkedEncodingByDefault = false; // for v0.6
- connectReq.once('response', onResponse); // for v0.6
- connectReq.once('upgrade', onUpgrade); // for v0.6
- connectReq.once('connect', onConnect); // for v0.7 or later
- connectReq.once('error', onError);
- connectReq.end();
-
- function onResponse(res) {
- // Very hacky. This is necessary to avoid http-parser leaks.
- res.upgrade = true;
- }
-
- function onUpgrade(res, socket, head) {
- // Hacky.
- process.nextTick(function() {
- onConnect(res, socket, head);
- });
- }
-
- function onConnect(res, socket, head) {
- connectReq.removeAllListeners();
- socket.removeAllListeners();
-
- if (res.statusCode !== 200) {
- debug('tunneling socket could not be established, statusCode=%d',
- res.statusCode);
- socket.destroy();
- var error = new Error('tunneling socket could not be established, ' +
- 'statusCode=' + res.statusCode);
- error.code = 'ECONNRESET';
- options.request.emit('error', error);
- self.removeSocket(placeholder);
- return;
- }
- if (head.length > 0) {
- debug('got illegal response body from proxy');
- socket.destroy();
- var error = new Error('got illegal response body from proxy');
- error.code = 'ECONNRESET';
- options.request.emit('error', error);
- self.removeSocket(placeholder);
- return;
- }
- debug('tunneling connection has established');
- self.sockets[self.sockets.indexOf(placeholder)] = socket;
- return cb(socket);
- }
-
- function onError(cause) {
- connectReq.removeAllListeners();
-
- debug('tunneling socket could not be established, cause=%s\n',
- cause.message, cause.stack);
- var error = new Error('tunneling socket could not be established, ' +
- 'cause=' + cause.message);
- error.code = 'ECONNRESET';
- options.request.emit('error', error);
- self.removeSocket(placeholder);
- }
-};
-
-TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
- var pos = this.sockets.indexOf(socket)
- if (pos === -1) {
- return;
- }
- this.sockets.splice(pos, 1);
-
- var pending = this.requests.shift();
- if (pending) {
- // If we have pending requests and a socket gets closed a new one
- // needs to be created to take over in the pool for the one that closed.
- this.createSocket(pending, function(socket) {
- pending.request.onSocket(socket);
- });
- }
-};
-
-function createSecureSocket(options, cb) {
- var self = this;
- TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
- var hostHeader = options.request.getHeader('host');
- var tlsOptions = mergeOptions({}, self.options, {
- socket: socket,
- servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
- });
-
- // 0 is dummy port for v0.6
- var secureSocket = tls.connect(0, tlsOptions);
- self.sockets[self.sockets.indexOf(socket)] = secureSocket;
- cb(secureSocket);
- });
-}
-
-
-function toOptions(host, port, localAddress) {
- if (typeof host === 'string') { // since v0.10
- return {
- host: host,
- port: port,
- localAddress: localAddress
- };
- }
- return host; // for v0.11 or later
-}
-
-function mergeOptions(target) {
- for (var i = 1, len = arguments.length; i < len; ++i) {
- var overrides = arguments[i];
- if (typeof overrides === 'object') {
- var keys = Object.keys(overrides);
- for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
- var k = keys[j];
- if (overrides[k] !== undefined) {
- target[k] = overrides[k];
- }
- }
- }
- }
- return target;
-}
-
-
-var debug;
-if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
- debug = function() {
- var args = Array.prototype.slice.call(arguments);
- if (typeof args[0] === 'string') {
- args[0] = 'TUNNEL: ' + args[0];
- } else {
- args.unshift('TUNNEL:');
- }
- console.error.apply(console, args);
- }
-} else {
- debug = function() {};
-}
-exports.debug = debug; // for test
-
-
/***/ }),
/***/ 143:
@@ -5144,6 +4857,52 @@ module.exports = require("https");
module.exports = {"name":"@octokit/rest","version":"16.28.9","publishConfig":{"access":"public"},"description":"GitHub REST API client for Node.js","keywords":["octokit","github","rest","api-client"],"author":"Gregor Martynus (https://github.com/gr2m)","contributors":[{"name":"Mike de Boer","email":"info@mikedeboer.nl"},{"name":"Fabian Jakobs","email":"fabian@c9.io"},{"name":"Joe Gallo","email":"joe@brassafrax.com"},{"name":"Gregor Martynus","url":"https://github.com/gr2m"}],"repository":"https://github.com/octokit/rest.js","dependencies":{"@octokit/request":"^5.0.0","@octokit/request-error":"^1.0.2","atob-lite":"^2.0.0","before-after-hook":"^2.0.0","btoa-lite":"^1.0.0","deprecation":"^2.0.0","lodash.get":"^4.4.2","lodash.set":"^4.3.2","lodash.uniq":"^4.5.0","octokit-pagination-methods":"^1.1.0","once":"^1.4.0","universal-user-agent":"^4.0.0"},"devDependencies":{"@gimenete/type-writer":"^0.1.3","@octokit/fixtures-server":"^5.0.1","@octokit/routes":"20.9.2","@types/node":"^12.0.0","bundlesize":"^0.18.0","chai":"^4.1.2","compression-webpack-plugin":"^3.0.0","coveralls":"^3.0.0","glob":"^7.1.2","http-proxy-agent":"^2.1.0","lodash.camelcase":"^4.3.0","lodash.merge":"^4.6.1","lodash.upperfirst":"^4.3.1","mkdirp":"^0.5.1","mocha":"^6.0.0","mustache":"^3.0.0","nock":"^10.0.0","npm-run-all":"^4.1.2","nyc":"^14.0.0","prettier":"^1.14.2","proxy":"^0.2.4","semantic-release":"^15.0.0","sinon":"^7.2.4","sinon-chai":"^3.0.0","sort-keys":"^4.0.0","standard":"^14.0.2","string-to-arraybuffer":"^1.0.0","string-to-jsdoc-comment":"^1.0.0","typescript":"^3.3.1","webpack":"^4.0.0","webpack-bundle-analyzer":"^3.0.0","webpack-cli":"^3.0.0"},"types":"index.d.ts","scripts":{"coverage":"nyc report --reporter=html && open coverage/index.html","pretest":"standard","test":"nyc mocha test/mocha-node-setup.js \"test/*/**/*-test.js\"","test:browser":"cypress run --browser chrome","test:memory":"mocha test/memory-test","build":"npm-run-all build:*","build:ts":"node scripts/generate-types","prebuild:browser":"mkdirp dist/","build:browser":"npm-run-all build:browser:*","build:browser:development":"webpack --mode development --entry . --output-library=Octokit --output=./dist/octokit-rest.js --profile --json > dist/bundle-stats.json","build:browser:production":"webpack --mode production --entry . --plugin=compression-webpack-plugin --output-library=Octokit --output-path=./dist --output-filename=octokit-rest.min.js --devtool source-map","generate-bundle-report":"webpack-bundle-analyzer dist/bundle-stats.json --mode=static --no-open --report dist/bundle-report.html","generate-routes":"node scripts/generate-routes","prevalidate:ts":"npm run -s build:ts","validate:ts":"tsc --target es6 --noImplicitAny index.d.ts","postvalidate:ts":"tsc --noEmit --target es6 test/typescript-validate.ts","start-fixtures-server":"octokit-fixtures-server"},"license":"MIT","files":["index.js","index.d.ts","lib","plugins"],"nyc":{"ignore":["test"]},"release":{"publish":["@semantic-release/npm",{"path":"@semantic-release/github","assets":["dist/*","!dist/*.map.gz"]}]},"standard":{"globals":["describe","before","beforeEach","afterEach","after","it","expect","cy"],"ignore":["/docs"]},"bundlesize":[{"path":"./dist/octokit-rest.min.js.gz","maxSize":"33 kB"}],"_resolved":"https://registry.npmjs.org/@octokit/rest/-/rest-16.28.9.tgz","_integrity":"sha512-IKGnX+Tvzt7XHhs8f4ajqxyJvYAMNX5nWfoJm4CQj8LZToMiaJgutf5KxxpxoC3y5w7JTJpW5rnWnF4TsIvCLA==","_from":"@octokit/rest@16.28.9"};
+/***/ }),
+
+/***/ 216:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+// Generated by CoffeeScript 2.4.1
+(function() {
+ var NodeType, XMLDummy, XMLNode;
+
+ XMLNode = __webpack_require__(733);
+
+ NodeType = __webpack_require__(683);
+
+ // Represents a raw node
+ module.exports = XMLDummy = class XMLDummy extends XMLNode {
+ // Initializes a new instance of `XMLDummy`
+
+ // `XMLDummy` is a special node representing a node with
+ // a null value. Dummy nodes are created while recursively
+ // building the XML tree. Simply skipping null values doesn't
+ // work because that would break the recursive chain.
+ constructor(parent) {
+ super(parent);
+ this.type = NodeType.Dummy;
+ }
+
+ // Creates and returns a deep clone of `this`
+ clone() {
+ return Object.create(this);
+ }
+
+ // Converts the XML fragment to string
+
+ // `options.pretty` pretty prints the result
+ // `options.indent` indentation for pretty print
+ // `options.offset` how many indentations to add to every line for pretty print
+ // `options.newline` newline sequence for pretty print
+ toString(options) {
+ return '';
+ }
+
+ };
+
+}).call(this);
+
+
/***/ }),
/***/ 248:
@@ -7542,6 +7301,14 @@ isStream.transform = function (stream) {
};
+/***/ }),
+
+/***/ 335:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = __webpack_require__(850);
+
+
/***/ }),
/***/ 336:
@@ -8495,17 +8262,9 @@ function Octokit (plugins, options) {
/***/ }),
/***/ 413:
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-module.exports = __webpack_require__(141);
-
-
-/***/ }),
-
-/***/ 417:
/***/ (function(module) {
-module.exports = require("crypto");
+module.exports = require("stream");
/***/ }),
@@ -8537,7 +8296,7 @@ module.exports = require("crypto");
XMLProcessingInstruction = __webpack_require__(491);
- XMLDummy = __webpack_require__(956);
+ XMLDummy = __webpack_require__(216);
XMLDTDAttList = __webpack_require__(801);
@@ -9330,7 +9089,7 @@ Object.defineProperty(exports, '__esModule', { value: true });
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
-var Stream = _interopDefault(__webpack_require__(794));
+var Stream = _interopDefault(__webpack_require__(413));
var http = _interopDefault(__webpack_require__(605));
var Url = _interopDefault(__webpack_require__(835));
var https = _interopDefault(__webpack_require__(211));
@@ -12707,488 +12466,6 @@ const factory = __webpack_require__(47)
module.exports = factory()
-/***/ }),
-
-/***/ 533:
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const io = __importStar(__webpack_require__(1));
-const fs = __importStar(__webpack_require__(747));
-const os = __importStar(__webpack_require__(87));
-const path = __importStar(__webpack_require__(622));
-const httpm = __importStar(__webpack_require__(539));
-const semver = __importStar(__webpack_require__(280));
-const v4_1 = __importDefault(__webpack_require__(826));
-const exec_1 = __webpack_require__(986);
-const assert_1 = __webpack_require__(357);
-class HTTPError extends Error {
- constructor(httpStatusCode) {
- super(`Unexpected HTTP response: ${httpStatusCode}`);
- this.httpStatusCode = httpStatusCode;
- Object.setPrototypeOf(this, new.target.prototype);
- }
-}
-exports.HTTPError = HTTPError;
-const IS_WINDOWS = process.platform === 'win32';
-const userAgent = 'actions/tool-cache';
-// On load grab temp directory and cache directory and remove them from env (currently don't want to expose this)
-let tempDirectory = process.env['RUNNER_TEMP'] || '';
-let cacheRoot = process.env['RUNNER_TOOL_CACHE'] || '';
-// If directories not found, place them in common temp locations
-if (!tempDirectory || !cacheRoot) {
- let baseLocation;
- if (IS_WINDOWS) {
- // On windows use the USERPROFILE env variable
- baseLocation = process.env['USERPROFILE'] || 'C:\\';
- }
- else {
- if (process.platform === 'darwin') {
- baseLocation = '/Users';
- }
- else {
- baseLocation = '/home';
- }
- }
- if (!tempDirectory) {
- tempDirectory = path.join(baseLocation, 'actions', 'temp');
- }
- if (!cacheRoot) {
- cacheRoot = path.join(baseLocation, 'actions', 'cache');
- }
-}
-/**
- * Download a tool from an url and stream it into a file
- *
- * @param url url of tool to download
- * @param dest path to download tool
- * @returns path to downloaded tool
- */
-function downloadTool(url, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- // Wrap in a promise so that we can resolve from within stream callbacks
- return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
- try {
- const http = new httpm.HttpClient(userAgent, [], {
- allowRetries: true,
- maxRetries: 3
- });
- dest = dest || path.join(tempDirectory, v4_1.default());
- yield io.mkdirP(path.dirname(dest));
- core.debug(`Downloading ${url}`);
- core.debug(`Downloading ${dest}`);
- if (fs.existsSync(dest)) {
- throw new Error(`Destination file path ${dest} already exists`);
- }
- const response = yield http.get(url);
- if (response.message.statusCode !== 200) {
- const err = new HTTPError(response.message.statusCode);
- core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
- throw err;
- }
- const file = fs.createWriteStream(dest);
- file.on('open', () => __awaiter(this, void 0, void 0, function* () {
- try {
- const stream = response.message.pipe(file);
- stream.on('close', () => {
- core.debug('download complete');
- resolve(dest);
- });
- }
- catch (err) {
- core.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
- reject(err);
- }
- }));
- file.on('error', err => {
- file.end();
- reject(err);
- });
- }
- catch (err) {
- reject(err);
- }
- }));
- });
-}
-exports.downloadTool = downloadTool;
-/**
- * Extract a .7z file
- *
- * @param file path to the .7z file
- * @param dest destination directory. Optional.
- * @param _7zPath path to 7zr.exe. Optional, for long path support. Most .7z archives do not have this
- * problem. If your .7z archive contains very long paths, you can pass the path to 7zr.exe which will
- * gracefully handle long paths. By default 7zdec.exe is used because it is a very small program and is
- * bundled with the tool lib. However it does not support long paths. 7zr.exe is the reduced command line
- * interface, it is smaller than the full command line interface, and it does support long paths. At the
- * time of this writing, it is freely available from the LZMA SDK that is available on the 7zip website.
- * Be sure to check the current license agreement. If 7zr.exe is bundled with your action, then the path
- * to 7zr.exe can be pass to this function.
- * @returns path to the destination directory
- */
-function extract7z(file, dest, _7zPath) {
- return __awaiter(this, void 0, void 0, function* () {
- assert_1.ok(IS_WINDOWS, 'extract7z() not supported on current OS');
- assert_1.ok(file, 'parameter "file" is required');
- dest = yield _createExtractFolder(dest);
- const originalCwd = process.cwd();
- process.chdir(dest);
- if (_7zPath) {
- try {
- const args = [
- 'x',
- '-bb1',
- '-bd',
- '-sccUTF-8',
- file
- ];
- const options = {
- silent: true
- };
- yield exec_1.exec(`"${_7zPath}"`, args, options);
- }
- finally {
- process.chdir(originalCwd);
- }
- }
- else {
- const escapedScript = path
- .join(__dirname, '..', 'scripts', 'Invoke-7zdec.ps1')
- .replace(/'/g, "''")
- .replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
- const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, '');
- const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
- const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`;
- const args = [
- '-NoLogo',
- '-Sta',
- '-NoProfile',
- '-NonInteractive',
- '-ExecutionPolicy',
- 'Unrestricted',
- '-Command',
- command
- ];
- const options = {
- silent: true
- };
- try {
- const powershellPath = yield io.which('powershell', true);
- yield exec_1.exec(`"${powershellPath}"`, args, options);
- }
- finally {
- process.chdir(originalCwd);
- }
- }
- return dest;
- });
-}
-exports.extract7z = extract7z;
-/**
- * Extract a compressed tar archive
- *
- * @param file path to the tar
- * @param dest destination directory. Optional.
- * @param flags flags for the tar command to use for extraction. Defaults to 'xz' (extracting gzipped tars). Optional.
- * @returns path to the destination directory
- */
-function extractTar(file, dest, flags = 'xz') {
- return __awaiter(this, void 0, void 0, function* () {
- if (!file) {
- throw new Error("parameter 'file' is required");
- }
- // Create dest
- dest = yield _createExtractFolder(dest);
- // Determine whether GNU tar
- let versionOutput = '';
- yield exec_1.exec('tar --version', [], {
- ignoreReturnCode: true,
- listeners: {
- stdout: (data) => (versionOutput += data.toString()),
- stderr: (data) => (versionOutput += data.toString())
- }
- });
- const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');
- // Initialize args
- const args = [flags];
- let destArg = dest;
- let fileArg = file;
- if (IS_WINDOWS && isGnuTar) {
- args.push('--force-local');
- destArg = dest.replace(/\\/g, '/');
- // Technically only the dest needs to have `/` but for aesthetic consistency
- // convert slashes in the file arg too.
- fileArg = file.replace(/\\/g, '/');
- }
- if (isGnuTar) {
- // Suppress warnings when using GNU tar to extract archives created by BSD tar
- args.push('--warning=no-unknown-keyword');
- }
- args.push('-C', destArg, '-f', fileArg);
- yield exec_1.exec(`tar`, args);
- return dest;
- });
-}
-exports.extractTar = extractTar;
-/**
- * Extract a zip
- *
- * @param file path to the zip
- * @param dest destination directory. Optional.
- * @returns path to the destination directory
- */
-function extractZip(file, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- if (!file) {
- throw new Error("parameter 'file' is required");
- }
- dest = yield _createExtractFolder(dest);
- if (IS_WINDOWS) {
- yield extractZipWin(file, dest);
- }
- else {
- yield extractZipNix(file, dest);
- }
- return dest;
- });
-}
-exports.extractZip = extractZip;
-function extractZipWin(file, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- // build the powershell command
- const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
- const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, '');
- const command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
- // run powershell
- const powershellPath = yield io.which('powershell');
- const args = [
- '-NoLogo',
- '-Sta',
- '-NoProfile',
- '-NonInteractive',
- '-ExecutionPolicy',
- 'Unrestricted',
- '-Command',
- command
- ];
- yield exec_1.exec(`"${powershellPath}"`, args);
- });
-}
-function extractZipNix(file, dest) {
- return __awaiter(this, void 0, void 0, function* () {
- const unzipPath = yield io.which('unzip');
- yield exec_1.exec(`"${unzipPath}"`, [file], { cwd: dest });
- });
-}
-/**
- * Caches a directory and installs it into the tool cacheDir
- *
- * @param sourceDir the directory to cache into tools
- * @param tool tool name
- * @param version version of the tool. semver format
- * @param arch architecture of the tool. Optional. Defaults to machine architecture
- */
-function cacheDir(sourceDir, tool, version, arch) {
- return __awaiter(this, void 0, void 0, function* () {
- version = semver.clean(version) || version;
- arch = arch || os.arch();
- core.debug(`Caching tool ${tool} ${version} ${arch}`);
- core.debug(`source dir: ${sourceDir}`);
- if (!fs.statSync(sourceDir).isDirectory()) {
- throw new Error('sourceDir is not a directory');
- }
- // Create the tool dir
- const destPath = yield _createToolPath(tool, version, arch);
- // copy each child item. do not move. move can fail on Windows
- // due to anti-virus software having an open handle on a file.
- for (const itemName of fs.readdirSync(sourceDir)) {
- const s = path.join(sourceDir, itemName);
- yield io.cp(s, destPath, { recursive: true });
- }
- // write .complete
- _completeToolPath(tool, version, arch);
- return destPath;
- });
-}
-exports.cacheDir = cacheDir;
-/**
- * Caches a downloaded file (GUID) and installs it
- * into the tool cache with a given targetName
- *
- * @param sourceFile the file to cache into tools. Typically a result of downloadTool which is a guid.
- * @param targetFile the name of the file name in the tools directory
- * @param tool tool name
- * @param version version of the tool. semver format
- * @param arch architecture of the tool. Optional. Defaults to machine architecture
- */
-function cacheFile(sourceFile, targetFile, tool, version, arch) {
- return __awaiter(this, void 0, void 0, function* () {
- version = semver.clean(version) || version;
- arch = arch || os.arch();
- core.debug(`Caching tool ${tool} ${version} ${arch}`);
- core.debug(`source file: ${sourceFile}`);
- if (!fs.statSync(sourceFile).isFile()) {
- throw new Error('sourceFile is not a file');
- }
- // create the tool dir
- const destFolder = yield _createToolPath(tool, version, arch);
- // copy instead of move. move can fail on Windows due to
- // anti-virus software having an open handle on a file.
- const destPath = path.join(destFolder, targetFile);
- core.debug(`destination file ${destPath}`);
- yield io.cp(sourceFile, destPath);
- // write .complete
- _completeToolPath(tool, version, arch);
- return destFolder;
- });
-}
-exports.cacheFile = cacheFile;
-/**
- * Finds the path to a tool version in the local installed tool cache
- *
- * @param toolName name of the tool
- * @param versionSpec version of the tool
- * @param arch optional arch. defaults to arch of computer
- */
-function find(toolName, versionSpec, arch) {
- if (!toolName) {
- throw new Error('toolName parameter is required');
- }
- if (!versionSpec) {
- throw new Error('versionSpec parameter is required');
- }
- arch = arch || os.arch();
- // attempt to resolve an explicit version
- if (!_isExplicitVersion(versionSpec)) {
- const localVersions = findAllVersions(toolName, arch);
- const match = _evaluateVersions(localVersions, versionSpec);
- versionSpec = match;
- }
- // check for the explicit version in the cache
- let toolPath = '';
- if (versionSpec) {
- versionSpec = semver.clean(versionSpec) || '';
- const cachePath = path.join(cacheRoot, toolName, versionSpec, arch);
- core.debug(`checking cache: ${cachePath}`);
- if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) {
- core.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
- toolPath = cachePath;
- }
- else {
- core.debug('not found');
- }
- }
- return toolPath;
-}
-exports.find = find;
-/**
- * Finds the paths to all versions of a tool that are installed in the local tool cache
- *
- * @param toolName name of the tool
- * @param arch optional arch. defaults to arch of computer
- */
-function findAllVersions(toolName, arch) {
- const versions = [];
- arch = arch || os.arch();
- const toolPath = path.join(cacheRoot, toolName);
- if (fs.existsSync(toolPath)) {
- const children = fs.readdirSync(toolPath);
- for (const child of children) {
- if (_isExplicitVersion(child)) {
- const fullPath = path.join(toolPath, child, arch || '');
- if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) {
- versions.push(child);
- }
- }
- }
- }
- return versions;
-}
-exports.findAllVersions = findAllVersions;
-function _createExtractFolder(dest) {
- return __awaiter(this, void 0, void 0, function* () {
- if (!dest) {
- // create a temp dir
- dest = path.join(tempDirectory, v4_1.default());
- }
- yield io.mkdirP(dest);
- return dest;
- });
-}
-function _createToolPath(tool, version, arch) {
- return __awaiter(this, void 0, void 0, function* () {
- const folderPath = path.join(cacheRoot, tool, semver.clean(version) || version, arch || '');
- core.debug(`destination ${folderPath}`);
- const markerPath = `${folderPath}.complete`;
- yield io.rmRF(folderPath);
- yield io.rmRF(markerPath);
- yield io.mkdirP(folderPath);
- return folderPath;
- });
-}
-function _completeToolPath(tool, version, arch) {
- const folderPath = path.join(cacheRoot, tool, semver.clean(version) || version, arch || '');
- const markerPath = `${folderPath}.complete`;
- fs.writeFileSync(markerPath, '');
- core.debug('finished caching tool');
-}
-function _isExplicitVersion(versionSpec) {
- const c = semver.clean(versionSpec) || '';
- core.debug(`isExplicit: ${c}`);
- const valid = semver.valid(c) != null;
- core.debug(`explicit? ${valid}`);
- return valid;
-}
-function _evaluateVersions(versions, versionSpec) {
- let version = '';
- core.debug(`evaluating ${versions.length} versions`);
- versions = versions.sort((a, b) => {
- if (semver.gt(a, b)) {
- return 1;
- }
- return -1;
- });
- for (let i = versions.length - 1; i >= 0; i--) {
- const potential = versions[i];
- const satisfied = semver.satisfies(potential, versionSpec);
- if (satisfied) {
- version = potential;
- break;
- }
- }
- if (version) {
- core.debug(`matched: ${version}`);
- }
- else {
- core.debug('match not found');
- }
- return version;
-}
-//# sourceMappingURL=tool-cache.js.map
-
/***/ }),
/***/ 536:
@@ -13626,7 +12903,7 @@ class HttpClient {
if (useProxy) {
// If using proxy, need tunnel
if (!tunnel) {
- tunnel = __webpack_require__(413);
+ tunnel = __webpack_require__(335);
}
const agentOptions = {
maxSockets: maxSockets,
@@ -14870,7 +14147,7 @@ const CORE_PLUGINS = [
__webpack_require__(586),
__webpack_require__(430),
- __webpack_require__(850) // deprecated: remove in v17
+ __webpack_require__(956) // deprecated: remove in v17
]
module.exports = Octokit.plugin(CORE_PLUGINS)
@@ -15860,37 +15137,6 @@ module.exports = {"activity":{"checkStarringRepo":{"method":"GET","params":{"own
}).call(this);
-/***/ }),
-
-/***/ 722:
-/***/ (function(module) {
-
-/**
- * Convert array of 16 byte values to UUID string format of the form:
- * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
- */
-var byteToHex = [];
-for (var i = 0; i < 256; ++i) {
- byteToHex[i] = (i + 0x100).toString(16).substr(1);
-}
-
-function bytesToUuid(buf, offset) {
- var i = offset || 0;
- var bth = byteToHex;
- // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
- return ([bth[buf[i++]], bth[buf[i++]],
- bth[buf[i++]], bth[buf[i++]], '-',
- bth[buf[i++]], bth[buf[i++]], '-',
- bth[buf[i++]], bth[buf[i++]], '-',
- bth[buf[i++]], bth[buf[i++]], '-',
- bth[buf[i++]], bth[buf[i++]],
- bth[buf[i++]], bth[buf[i++]],
- bth[buf[i++]], bth[buf[i++]]]).join('');
-}
-
-module.exports = bytesToUuid;
-
-
/***/ }),
/***/ 724:
@@ -16044,7 +15290,7 @@ exports.convertToJson = convertToJson;
XMLRaw = __webpack_require__(660);
XMLText = __webpack_require__(708);
XMLProcessingInstruction = __webpack_require__(491);
- XMLDummy = __webpack_require__(956);
+ XMLDummy = __webpack_require__(216);
NodeType = __webpack_require__(683);
XMLNodeList = __webpack_require__(300);
XMLNamedNodeMap = __webpack_require__(451);
@@ -17394,39 +16640,21 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
Object.defineProperty(exports, "__esModule", { value: true });
exports.DotnetCoreInstaller = exports.DotNetVersionInfo = void 0;
// Load tempDirectory before it gets wiped by tool-cache
-let tempDirectory = process.env['RUNNER_TEMPDIRECTORY'] || '';
const core = __importStar(__webpack_require__(470));
const exec = __importStar(__webpack_require__(986));
const io = __importStar(__webpack_require__(1));
-const tc = __importStar(__webpack_require__(533));
const hc = __webpack_require__(539);
const fs_1 = __webpack_require__(747);
-const os = __importStar(__webpack_require__(87));
const path = __importStar(__webpack_require__(622));
const semver = __importStar(__webpack_require__(280));
const IS_WINDOWS = process.platform === 'win32';
-if (!tempDirectory) {
- let baseLocation;
- if (IS_WINDOWS) {
- // On windows use the USERPROFILE env variable
- baseLocation = process.env['USERPROFILE'] || 'C:\\';
- }
- else {
- if (process.platform === 'darwin') {
- baseLocation = '/Users';
- }
- else {
- baseLocation = '/home';
- }
- }
- tempDirectory = path.join(baseLocation, 'actions', 'temp');
-}
/**
* Represents the inputted version information
*/
class DotNetVersionInfo {
constructor(version) {
this.isExactVersionSet = false;
+ this.inputVersion = version;
// Check for exact match
if (semver.valid(semver.clean(version) || '') != null) {
this.fullversion = semver.clean(version);
@@ -17472,77 +16700,47 @@ class DotNetVersionInfo {
}
}
exports.DotNetVersionInfo = DotNetVersionInfo;
-/**
- * Represents a resolved version from the Web-Api
- */
-class ResolvedVersionInfo {
- constructor(downloadUrls, resolvedVersion) {
- if (downloadUrls.length === 0) {
- throw 'DownloadUrls can not be empty';
- }
- if (!resolvedVersion) {
- throw 'Resolved version is invalid';
- }
- this.downloadUrls = downloadUrls;
- this.resolvedVersion = resolvedVersion;
- }
-}
class DotnetCoreInstaller {
constructor(version) {
- this.versionInfo = new DotNetVersionInfo(version);
- this.cachedToolName = 'dncs';
- this.arch = 'x64';
+ this.version = version;
}
installDotnet() {
return __awaiter(this, void 0, void 0, function* () {
- // Check cache
- let toolPath = '';
- let osSuffixes = yield this.detectMachineOS();
- let parts = osSuffixes[0].split('-');
- if (parts.length > 1) {
- this.arch = parts[1];
- }
- // If version is not generic -> look up cache
- if (this.versionInfo.isExactVersion())
- toolPath = this.getLocalTool(this.versionInfo.version());
- if (!toolPath) {
- // download, extract, cache
- console.log('Getting a download url', this.versionInfo.version());
- let resolvedVersionInfo = yield this.resolveInfos(osSuffixes, this.versionInfo);
- //Check if cache exists for resolved version
- toolPath = this.getLocalTool(resolvedVersionInfo.resolvedVersion);
- if (!toolPath) {
- //If not exists install it
- toolPath = yield this.downloadAndInstall(resolvedVersionInfo);
- }
- else {
- console.log('Using cached tool');
- }
- }
- else {
- console.log('Using cached tool');
- }
- // Need to set this so that .NET Core global tools find the right locations.
- core.exportVariable('DOTNET_ROOT', toolPath);
- // Prepend the tools path. instructs the agent to prepend for future tasks
- core.addPath(toolPath);
- });
- }
- getLocalTool(version) {
- console.log('Checking tool cache', version);
- return tc.find(this.cachedToolName, version, this.arch);
- }
- detectMachineOS() {
- return __awaiter(this, void 0, void 0, function* () {
- let osSuffix = [];
let output = '';
let resultCode = 0;
+ let calculatedVersion = yield this.resolveVersion(new DotNetVersionInfo(this.version));
+ var envVariables = {};
+ for (let key in process.env) {
+ if (process.env[key]) {
+ let value = process.env[key];
+ envVariables[key] = value;
+ }
+ }
if (IS_WINDOWS) {
let escapedScript = path
- .join(__dirname, '..', 'externals', 'get-os-platform.ps1')
+ .join(__dirname, '..', 'externals', 'install-dotnet.ps1')
.replace(/'/g, "''");
let command = `& '${escapedScript}'`;
+ if (calculatedVersion) {
+ command += ` -Version ${calculatedVersion}`;
+ }
+ if (process.env['https_proxy'] != null) {
+ command += ` -ProxyAddress ${process.env['https_proxy']}`;
+ }
+ // This is not currently an option
+ if (process.env['no_proxy'] != null) {
+ command += ` -ProxyBypassList ${process.env['no_proxy']}`;
+ }
+ // process.env must be explicitly passed in for DOTNET_INSTALL_DIR to be used
const powershellPath = yield io.which('powershell', true);
+ var options = {
+ listeners: {
+ stdout: (data) => {
+ output += data.toString();
+ }
+ },
+ env: envVariables
+ };
resultCode = yield exec.exec(`"${powershellPath}"`, [
'-NoLogo',
'-Sta',
@@ -17552,78 +16750,54 @@ class DotnetCoreInstaller {
'Unrestricted',
'-Command',
command
- ], {
- listeners: {
- stdout: (data) => {
- output += data.toString();
- }
- }
- });
+ ], options);
}
else {
- let scriptPath = path.join(__dirname, '..', 'externals', 'get-os-distro.sh');
- fs_1.chmodSync(scriptPath, '777');
- const toolPath = yield io.which(scriptPath, true);
- resultCode = yield exec.exec(`"${toolPath}"`, [], {
+ let escapedScript = path
+ .join(__dirname, '..', 'externals', 'install-dotnet.sh')
+ .replace(/'/g, "''");
+ fs_1.chmodSync(escapedScript, '777');
+ const scriptPath = yield io.which(escapedScript, true);
+ let scriptArguments = [];
+ if (this.version) {
+ scriptArguments.push('--version', this.version);
+ }
+ // process.env must be explicitly passed in for DOTNET_INSTALL_DIR to be used
+ resultCode = yield exec.exec(`"${scriptPath}"`, scriptArguments, {
listeners: {
stdout: (data) => {
output += data.toString();
}
- }
+ },
+ env: envVariables
});
}
+ if (process.env['DOTNET_INSTALL_DIR']) {
+ core.addPath(process.env['DOTNET_INSTALL_DIR']);
+ }
+ else {
+ if (IS_WINDOWS) {
+ // This is the default set in install-dotnet.ps1
+ core.addPath(path.join(process.env['LocalAppData'] + '', 'Microsoft', 'dotnet'));
+ core.exportVariable('DOTNET_ROOT', path.join(process.env['LocalAppData'] + '', 'Microsoft', 'dotnet'));
+ }
+ else {
+ // This is the default set in install-dotnet.sh
+ core.addPath(path.join(process.env['HOME'] + '', '.dotnet'));
+ }
+ }
+ console.log(process.env['PATH']);
if (resultCode != 0) {
- throw `Failed to detect os with result code ${resultCode}. Output: ${output}`;
+ throw `Failed to install dotnet ${resultCode}. ${output}`;
}
- let index;
- if ((index = output.indexOf('Primary:')) >= 0) {
- let primary = output.substr(index + 'Primary:'.length).split(os.EOL)[0];
- osSuffix.push(primary);
- }
- if ((index = output.indexOf('Legacy:')) >= 0) {
- let legacy = output.substr(index + 'Legacy:'.length).split(os.EOL)[0];
- osSuffix.push(legacy);
- }
- if (osSuffix.length == 0) {
- throw 'Could not detect platform';
- }
- return osSuffix;
});
}
- downloadAndInstall(resolvedVersionInfo) {
- return __awaiter(this, void 0, void 0, function* () {
- let downloaded = false;
- let downloadPath = '';
- for (const url of resolvedVersionInfo.downloadUrls) {
- try {
- downloadPath = yield tc.downloadTool(url);
- downloaded = true;
- break;
- }
- catch (error) {
- console.log('Could not Download', url, JSON.stringify(error));
- }
- }
- if (!downloaded) {
- throw 'Failed to download package';
- }
- // extract
- console.log('Extracting Package', downloadPath);
- let extPath = IS_WINDOWS
- ? yield tc.extractZip(downloadPath)
- : yield tc.extractTar(downloadPath);
- // cache tool
- console.log('Caching tool');
- let cachedDir = yield tc.cacheDir(extPath, this.cachedToolName, resolvedVersionInfo.resolvedVersion, this.arch);
- console.log('Successfully installed', resolvedVersionInfo.resolvedVersion);
- return cachedDir;
- });
- }
- // OsSuffixes - The suffix which is a part of the file name ex- linux-x64, windows-x86
- // Type - SDK / Runtime
// versionInfo - versionInfo of the SDK/Runtime
- resolveInfos(osSuffixes, versionInfo) {
+ resolveVersion(versionInfo) {
return __awaiter(this, void 0, void 0, function* () {
+ if (versionInfo.isExactVersion()) {
+ return versionInfo.version();
+ }
const httpClient = new hc.HttpClient('actions/setup-dotnet', [], {
allowRetries: true,
maxRetries: 3
@@ -17637,48 +16811,15 @@ class DotnetCoreInstaller {
semver.satisfies(releaseInfo['sdk']['version-display'], versionInfo.version()));
});
// Exclude versions that are newer than the latest if using not exact
- if (!versionInfo.isExactVersion()) {
- let latestSdk = releasesResult['latest-sdk'];
- releasesInfo = releasesInfo.filter((releaseInfo) => semver.lte(releaseInfo['sdk']['version'], latestSdk));
- }
+ let latestSdk = releasesResult['latest-sdk'];
+ releasesInfo = releasesInfo.filter((releaseInfo) => semver.lte(releaseInfo['sdk']['version'], latestSdk));
// Sort for latest version
releasesInfo = releasesInfo.sort((a, b) => semver.rcompare(a['sdk']['version'], b['sdk']['version']));
- let downloadedVersion = '';
- let downloadUrls = [];
- if (releasesInfo.length != 0) {
- let release = releasesInfo[0];
- downloadedVersion = release['sdk']['version'];
- let files = release['sdk']['files'];
- files = files.filter((file) => {
- if (file['rid'] == osSuffixes[0] || file['rid'] == osSuffixes[1]) {
- return (file['url'].endsWith('.zip') || file['url'].endsWith('.tar.gz'));
- }
- });
- if (files.length > 0) {
- files.forEach((file) => {
- downloadUrls.push(file['url']);
- });
- }
- else {
- throw `The specified version's download links are not correctly formed in the supported versions document => ${releasesJsonUrl}`;
- }
+ if (releasesInfo.length == 0) {
+ throw `Could not find dotnet core version. Please ensure that specified version ${versionInfo.inputVersion} is valid.`;
}
- else {
- console.log(`Could not fetch download information for version ${versionInfo.version()}`);
- if (versionInfo.isExactVersion()) {
- console.log('Using fallback');
- downloadUrls = yield this.getFallbackDownloadUrls(versionInfo.version());
- downloadedVersion = versionInfo.version();
- }
- else {
- console.log('Unable to use fallback, version is generic!');
- }
- }
- if (downloadUrls.length == 0) {
- throw `Could not construct download URL. Please ensure that specified version ${versionInfo.version()}/${downloadedVersion} is valid.`;
- }
- core.debug(`Got download urls ${downloadUrls}`);
- return new ResolvedVersionInfo(downloadUrls, downloadedVersion);
+ let release = releasesInfo[0];
+ return release['sdk']['version'];
});
}
getReleasesJsonUrl(httpClient, versionParts) {
@@ -17700,85 +16841,6 @@ class DotnetCoreInstaller {
return releasesInfo[0]['releases.json'];
});
}
- getFallbackDownloadUrls(version) {
- return __awaiter(this, void 0, void 0, function* () {
- let primaryUrlSearchString;
- let legacyUrlSearchString;
- let output = '';
- let resultCode = 0;
- if (IS_WINDOWS) {
- let escapedScript = path
- .join(__dirname, '..', 'externals', 'install-dotnet.ps1')
- .replace(/'/g, "''");
- let command = `& '${escapedScript}' -Version ${version} -DryRun`;
- const powershellPath = yield io.which('powershell', true);
- resultCode = yield exec.exec(`"${powershellPath}"`, [
- '-NoLogo',
- '-Sta',
- '-NoProfile',
- '-NonInteractive',
- '-ExecutionPolicy',
- 'Unrestricted',
- '-Command',
- command
- ], {
- listeners: {
- stdout: (data) => {
- output += data.toString();
- }
- }
- });
- primaryUrlSearchString = 'dotnet-install: Primary named payload URL: ';
- legacyUrlSearchString = 'dotnet-install: Legacy named payload URL: ';
- }
- else {
- let escapedScript = path
- .join(__dirname, '..', 'externals', 'install-dotnet.sh')
- .replace(/'/g, "''");
- fs_1.chmodSync(escapedScript, '777');
- const scriptPath = yield io.which(escapedScript, true);
- resultCode = yield exec.exec(`"${scriptPath}"`, ['--version', version, '--dry-run'], {
- listeners: {
- stdout: (data) => {
- output += data.toString();
- }
- }
- });
- primaryUrlSearchString = 'dotnet-install: Primary named payload URL: ';
- legacyUrlSearchString = 'dotnet-install: Legacy named payload URL: ';
- }
- if (resultCode != 0) {
- throw `Failed to get download urls with result code ${resultCode}. ${output}`;
- }
- let primaryUrl = '';
- let legacyUrl = '';
- if (!!output && output.length > 0) {
- let lines = output.split(os.EOL);
- // Fallback to \n if initial split doesn't work (not consistent across versions)
- if (lines.length === 1) {
- lines = output.split('\n');
- }
- if (!!lines && lines.length > 0) {
- lines.forEach((line) => {
- if (!line) {
- return;
- }
- var primarySearchStringIndex = line.indexOf(primaryUrlSearchString);
- if (primarySearchStringIndex > -1) {
- primaryUrl = line.substring(primarySearchStringIndex + primaryUrlSearchString.length);
- return;
- }
- var legacySearchStringIndex = line.indexOf(legacyUrlSearchString);
- if (legacySearchStringIndex > -1) {
- legacyUrl = line.substring(legacySearchStringIndex + legacyUrlSearchString.length);
- return;
- }
- });
- }
- }
- return [primaryUrl, legacyUrl];
- });
- }
}
exports.DotnetCoreInstaller = DotnetCoreInstaller;
const DotNetCoreIndexUrl = 'https://dotnetcli.blob.core.windows.net/dotnet/release-metadata/releases-index.json';
@@ -18051,13 +17113,6 @@ function getFirstPage (octokit, link, headers) {
}
-/***/ }),
-
-/***/ 794:
-/***/ (function(module) {
-
-module.exports = require("stream");
-
/***/ }),
/***/ 796:
@@ -18714,42 +17769,6 @@ function sync (path, options) {
}
-/***/ }),
-
-/***/ 826:
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-var rng = __webpack_require__(139);
-var bytesToUuid = __webpack_require__(722);
-
-function v4(options, buf, offset) {
- var i = buf && offset || 0;
-
- if (typeof(options) == 'string') {
- buf = options === 'binary' ? new Array(16) : null;
- options = null;
- }
- options = options || {};
-
- var rnds = options.random || (options.rng || rng)();
-
- // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
- rnds[6] = (rnds[6] & 0x0f) | 0x40;
- rnds[8] = (rnds[8] & 0x3f) | 0x80;
-
- // Copy bytes to buffer, if provided
- if (buf) {
- for (var ii = 0; ii < 16; ++ii) {
- buf[i + ii] = rnds[ii];
- }
- }
-
- return buf || bytesToUuid(rnds);
-}
-
-module.exports = v4;
-
-
/***/ }),
/***/ 835:
@@ -18760,21 +17779,274 @@ module.exports = require("url");
/***/ }),
/***/ 850:
-/***/ (function(module, __unusedexports, __webpack_require__) {
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
-module.exports = paginationMethodsPlugin
+"use strict";
-function paginationMethodsPlugin (octokit) {
- octokit.getFirstPage = __webpack_require__(777).bind(null, octokit)
- octokit.getLastPage = __webpack_require__(649).bind(null, octokit)
- octokit.getNextPage = __webpack_require__(550).bind(null, octokit)
- octokit.getPreviousPage = __webpack_require__(563).bind(null, octokit)
- octokit.hasFirstPage = __webpack_require__(536)
- octokit.hasLastPage = __webpack_require__(336)
- octokit.hasNextPage = __webpack_require__(929)
- octokit.hasPreviousPage = __webpack_require__(558)
+
+var net = __webpack_require__(631);
+var tls = __webpack_require__(16);
+var http = __webpack_require__(605);
+var https = __webpack_require__(211);
+var events = __webpack_require__(614);
+var assert = __webpack_require__(357);
+var util = __webpack_require__(669);
+
+
+exports.httpOverHttp = httpOverHttp;
+exports.httpsOverHttp = httpsOverHttp;
+exports.httpOverHttps = httpOverHttps;
+exports.httpsOverHttps = httpsOverHttps;
+
+
+function httpOverHttp(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = http.request;
+ return agent;
}
+function httpsOverHttp(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = http.request;
+ agent.createSocket = createSecureSocket;
+ agent.defaultPort = 443;
+ return agent;
+}
+
+function httpOverHttps(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = https.request;
+ return agent;
+}
+
+function httpsOverHttps(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = https.request;
+ agent.createSocket = createSecureSocket;
+ agent.defaultPort = 443;
+ return agent;
+}
+
+
+function TunnelingAgent(options) {
+ var self = this;
+ self.options = options || {};
+ self.proxyOptions = self.options.proxy || {};
+ self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
+ self.requests = [];
+ self.sockets = [];
+
+ self.on('free', function onFree(socket, host, port, localAddress) {
+ var options = toOptions(host, port, localAddress);
+ for (var i = 0, len = self.requests.length; i < len; ++i) {
+ var pending = self.requests[i];
+ if (pending.host === options.host && pending.port === options.port) {
+ // Detect the request to connect same origin server,
+ // reuse the connection.
+ self.requests.splice(i, 1);
+ pending.request.onSocket(socket);
+ return;
+ }
+ }
+ socket.destroy();
+ self.removeSocket(socket);
+ });
+}
+util.inherits(TunnelingAgent, events.EventEmitter);
+
+TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
+ var self = this;
+ var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
+
+ if (self.sockets.length >= this.maxSockets) {
+ // We are over limit so we'll add it to the queue.
+ self.requests.push(options);
+ return;
+ }
+
+ // If we are under maxSockets create a new one.
+ self.createSocket(options, function(socket) {
+ socket.on('free', onFree);
+ socket.on('close', onCloseOrRemove);
+ socket.on('agentRemove', onCloseOrRemove);
+ req.onSocket(socket);
+
+ function onFree() {
+ self.emit('free', socket, options);
+ }
+
+ function onCloseOrRemove(err) {
+ self.removeSocket(socket);
+ socket.removeListener('free', onFree);
+ socket.removeListener('close', onCloseOrRemove);
+ socket.removeListener('agentRemove', onCloseOrRemove);
+ }
+ });
+};
+
+TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
+ var self = this;
+ var placeholder = {};
+ self.sockets.push(placeholder);
+
+ var connectOptions = mergeOptions({}, self.proxyOptions, {
+ method: 'CONNECT',
+ path: options.host + ':' + options.port,
+ agent: false,
+ headers: {
+ host: options.host + ':' + options.port
+ }
+ });
+ if (options.localAddress) {
+ connectOptions.localAddress = options.localAddress;
+ }
+ if (connectOptions.proxyAuth) {
+ connectOptions.headers = connectOptions.headers || {};
+ connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
+ new Buffer(connectOptions.proxyAuth).toString('base64');
+ }
+
+ debug('making CONNECT request');
+ var connectReq = self.request(connectOptions);
+ connectReq.useChunkedEncodingByDefault = false; // for v0.6
+ connectReq.once('response', onResponse); // for v0.6
+ connectReq.once('upgrade', onUpgrade); // for v0.6
+ connectReq.once('connect', onConnect); // for v0.7 or later
+ connectReq.once('error', onError);
+ connectReq.end();
+
+ function onResponse(res) {
+ // Very hacky. This is necessary to avoid http-parser leaks.
+ res.upgrade = true;
+ }
+
+ function onUpgrade(res, socket, head) {
+ // Hacky.
+ process.nextTick(function() {
+ onConnect(res, socket, head);
+ });
+ }
+
+ function onConnect(res, socket, head) {
+ connectReq.removeAllListeners();
+ socket.removeAllListeners();
+
+ if (res.statusCode !== 200) {
+ debug('tunneling socket could not be established, statusCode=%d',
+ res.statusCode);
+ socket.destroy();
+ var error = new Error('tunneling socket could not be established, ' +
+ 'statusCode=' + res.statusCode);
+ error.code = 'ECONNRESET';
+ options.request.emit('error', error);
+ self.removeSocket(placeholder);
+ return;
+ }
+ if (head.length > 0) {
+ debug('got illegal response body from proxy');
+ socket.destroy();
+ var error = new Error('got illegal response body from proxy');
+ error.code = 'ECONNRESET';
+ options.request.emit('error', error);
+ self.removeSocket(placeholder);
+ return;
+ }
+ debug('tunneling connection has established');
+ self.sockets[self.sockets.indexOf(placeholder)] = socket;
+ return cb(socket);
+ }
+
+ function onError(cause) {
+ connectReq.removeAllListeners();
+
+ debug('tunneling socket could not be established, cause=%s\n',
+ cause.message, cause.stack);
+ var error = new Error('tunneling socket could not be established, ' +
+ 'cause=' + cause.message);
+ error.code = 'ECONNRESET';
+ options.request.emit('error', error);
+ self.removeSocket(placeholder);
+ }
+};
+
+TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
+ var pos = this.sockets.indexOf(socket)
+ if (pos === -1) {
+ return;
+ }
+ this.sockets.splice(pos, 1);
+
+ var pending = this.requests.shift();
+ if (pending) {
+ // If we have pending requests and a socket gets closed a new one
+ // needs to be created to take over in the pool for the one that closed.
+ this.createSocket(pending, function(socket) {
+ pending.request.onSocket(socket);
+ });
+ }
+};
+
+function createSecureSocket(options, cb) {
+ var self = this;
+ TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
+ var hostHeader = options.request.getHeader('host');
+ var tlsOptions = mergeOptions({}, self.options, {
+ socket: socket,
+ servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
+ });
+
+ // 0 is dummy port for v0.6
+ var secureSocket = tls.connect(0, tlsOptions);
+ self.sockets[self.sockets.indexOf(socket)] = secureSocket;
+ cb(secureSocket);
+ });
+}
+
+
+function toOptions(host, port, localAddress) {
+ if (typeof host === 'string') { // since v0.10
+ return {
+ host: host,
+ port: port,
+ localAddress: localAddress
+ };
+ }
+ return host; // for v0.11 or later
+}
+
+function mergeOptions(target) {
+ for (var i = 1, len = arguments.length; i < len; ++i) {
+ var overrides = arguments[i];
+ if (typeof overrides === 'object') {
+ var keys = Object.keys(overrides);
+ for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
+ var k = keys[j];
+ if (overrides[k] !== undefined) {
+ target[k] = overrides[k];
+ }
+ }
+ }
+ }
+ return target;
+}
+
+
+var debug;
+if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
+ debug = function() {
+ var args = Array.prototype.slice.call(arguments);
+ if (typeof args[0] === 'string') {
+ args[0] = 'TUNNEL: ' + args[0];
+ } else {
+ args.unshift('TUNNEL:');
+ }
+ console.error.apply(console, args);
+ }
+} else {
+ debug = function() {};
+}
+exports.debug = debug; // for test
+
/***/ }),
@@ -21765,45 +21037,18 @@ module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd,
/***/ 956:
/***/ (function(module, __unusedexports, __webpack_require__) {
-// Generated by CoffeeScript 2.4.1
-(function() {
- var NodeType, XMLDummy, XMLNode;
+module.exports = paginationMethodsPlugin
- XMLNode = __webpack_require__(733);
-
- NodeType = __webpack_require__(683);
-
- // Represents a raw node
- module.exports = XMLDummy = class XMLDummy extends XMLNode {
- // Initializes a new instance of `XMLDummy`
-
- // `XMLDummy` is a special node representing a node with
- // a null value. Dummy nodes are created while recursively
- // building the XML tree. Simply skipping null values doesn't
- // work because that would break the recursive chain.
- constructor(parent) {
- super(parent);
- this.type = NodeType.Dummy;
- }
-
- // Creates and returns a deep clone of `this`
- clone() {
- return Object.create(this);
- }
-
- // Converts the XML fragment to string
-
- // `options.pretty` pretty prints the result
- // `options.indent` indentation for pretty print
- // `options.offset` how many indentations to add to every line for pretty print
- // `options.newline` newline sequence for pretty print
- toString(options) {
- return '';
- }
-
- };
-
-}).call(this);
+function paginationMethodsPlugin (octokit) {
+ octokit.getFirstPage = __webpack_require__(777).bind(null, octokit)
+ octokit.getLastPage = __webpack_require__(649).bind(null, octokit)
+ octokit.getNextPage = __webpack_require__(550).bind(null, octokit)
+ octokit.getPreviousPage = __webpack_require__(563).bind(null, octokit)
+ octokit.hasFirstPage = __webpack_require__(536)
+ octokit.hasLastPage = __webpack_require__(336)
+ octokit.hasNextPage = __webpack_require__(929)
+ octokit.hasPreviousPage = __webpack_require__(558)
+}
/***/ }),
@@ -21965,7 +21210,7 @@ exports.convert2nimn = convert2nimn;
"use strict";
-const {PassThrough} = __webpack_require__(794);
+const {PassThrough} = __webpack_require__(413);
module.exports = options => {
options = Object.assign({}, options);
diff --git a/externals/get-os-distro.sh b/externals/get-os-distro.sh
deleted file mode 100755
index 029dc6e..0000000
--- a/externals/get-os-distro.sh
+++ /dev/null
@@ -1,192 +0,0 @@
-#!/usr/bin/env bash
-# Copyright (c) .NET Foundation and contributors. All rights reserved.
-# Licensed under the MIT license. See LICENSE file in the project root for full license information.
-#
-
-# Stop script on NZEC
-set -e
-# Stop script if unbound variable found (use ${var:-} if intentional)
-set -u
-# By default cmd1 | cmd2 returns exit code of cmd2 regardless of cmd1 success
-# This is causing it to fail
-set -o pipefail
-
-# Use in the the functions: eval $invocation
-invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"'
-
-# standard output may be used as a return value in the functions
-# we need a way to write text on the screen in the functions so that
-# it won't interfere with the return value.
-# Exposing stream 3 as a pipe to standard output of the script itself
-exec 3>&1
-
-say_err() {
- printf "%b\n" "get-os-distro: Error: $1" >&2
-}
-
-# This platform list is finite - if the SDK/Runtime has supported Linux distribution-specific assets,
-# then and only then should the Linux distribution appear in this list.
-# Adding a Linux distribution to this list does not imply distribution-specific support.
-get_legacy_os_name_from_platform() {
-
- platform="$1"
- case "$platform" in
- "centos.7")
- echo "centos"
- return 0
- ;;
- "debian.8")
- echo "debian"
- return 0
- ;;
- "fedora.23")
- echo "fedora.23"
- return 0
- ;;
- "fedora.27")
- echo "fedora.27"
- return 0
- ;;
- "fedora.24")
- echo "fedora.24"
- return 0
- ;;
- "opensuse.13.2")
- echo "opensuse.13.2"
- return 0
- ;;
- "opensuse.42.1")
- echo "opensuse.42.1"
- return 0
- ;;
- "opensuse.42.3")
- echo "opensuse.42.3"
- return 0
- ;;
- "rhel.7"*)
- echo "rhel"
- return 0
- ;;
- "ubuntu.14.04")
- echo "ubuntu"
- return 0
- ;;
- "ubuntu.16.04")
- echo "ubuntu.16.04"
- return 0
- ;;
- "ubuntu.16.10")
- echo "ubuntu.16.10"
- return 0
- ;;
- "ubuntu.18.04")
- echo "ubuntu.18.04"
- return 0
- ;;
- "alpine.3.4.3")
- echo "alpine"
- return 0
- ;;
- esac
- return 1
-}
-
-get_linux_platform_name() {
-
- if [ -e /etc/os-release ]; then
- . /etc/os-release
- echo "$ID.$VERSION_ID"
- return 0
- elif [ -e /etc/redhat-release ]; then
- local redhatRelease=$( /dev/null; then
- CPUName=$(uname -m)
- case $CPUName in
- armv7l)
- echo "arm"
- return 0
- ;;
- aarch64)
- echo "arm64"
- return 0
- ;;
- esac
- fi
-
- # Always default to 'x64'
- echo "x64"
- return 0
-}
-
-osName=$(get_current_os_name || echo "")
-legacyOsName=$(get_legacy_os_name || echo "")
-arch=$(get_machine_architecture || echo "")
-
-primaryName="$osName-$arch"
-legacyName="$legacyOsName"
-
-echo "Primary:$primaryName"
-echo "Legacy:$legacyName"
-
-if [ -z "$osName" ] && [ -z "$legacyOsName" ];then
- exit 1
-fi
\ No newline at end of file
diff --git a/externals/get-os-platform.ps1 b/externals/get-os-platform.ps1
deleted file mode 100644
index 4ac8d07..0000000
--- a/externals/get-os-platform.ps1
+++ /dev/null
@@ -1,18 +0,0 @@
-function Get-Machine-Architecture()
-{
- # possible values: AMD64, IA64, x86
- return $ENV:PROCESSOR_ARCHITECTURE
-}
-
-function Get-CLIArchitecture-From-Architecture([string]$Architecture)
-{
- switch ($Architecture.ToLower())
- {
- { ($_ -eq "amd64") -or ($_ -eq "x64") } { return "x64" }
- { $_ -eq "x86" } { return "x86" }
- default { throw "Architecture not supported. If you think this is a bug, please report it at https://github.com/dotnet/cli/issues" }
- }
-}
-
-$CLIArchitecture = Get-CLIArchitecture-From-Architecture $(Get-Machine-Architecture)
-Write-Output "Primary:win-$CLIArchitecture"
\ No newline at end of file
diff --git a/package-lock.json b/package-lock.json
index 9a71bce..e2c85b0 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -32,6 +32,13 @@
"integrity": "sha512-G4JjJ6f9Hb3Zvejj+ewLLKLf99ZC+9v+yCxoYf9vSyH+WkzPLB2LuUtRMGNkooMqdugGBFStIKXOuvH1W+EctA==",
"requires": {
"tunnel": "0.0.6"
+ },
+ "dependencies": {
+ "tunnel": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
+ "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
+ }
}
},
"@actions/io": {
@@ -39,19 +46,6 @@
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.2.tgz",
"integrity": "sha512-J8KuFqVPr3p6U8W93DOXlXW6zFvrQAJANdS+vw0YhusLIq+bszW8zmK2Fh1C2kDPX8FMvwIl1OUcFgvJoXLbAg=="
},
- "@actions/tool-cache": {
- "version": "1.3.1",
- "resolved": "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-1.3.1.tgz",
- "integrity": "sha512-sKoEJv0/c7WzjPEq2PO12Sc8QdEp58XIBHMm3c4lUn/iZWgLz9HBeCuFGpLQjDvXJNfLZ4g+WD+rMjgOmpH4Ag==",
- "requires": {
- "@actions/core": "^1.2.0",
- "@actions/exec": "^1.0.0",
- "@actions/http-client": "^1.0.3",
- "@actions/io": "^1.0.1",
- "semver": "^6.1.0",
- "uuid": "^3.3.2"
- }
- },
"@babel/code-frame": {
"version": "7.8.3",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz",
@@ -478,6 +472,14 @@
"jest-message-util": "^26.0.1",
"jest-util": "^26.0.1",
"slash": "^3.0.0"
+ },
+ "dependencies": {
+ "slash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
+ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
+ "dev": true
+ }
}
},
"@jest/core": {
@@ -513,6 +515,14 @@
"rimraf": "^3.0.0",
"slash": "^3.0.0",
"strip-ansi": "^6.0.0"
+ },
+ "dependencies": {
+ "slash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
+ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
+ "dev": true
+ }
}
},
"@jest/environment": {
@@ -581,6 +591,14 @@
"string-length": "^4.0.1",
"terminal-link": "^2.0.0",
"v8-to-istanbul": "^4.1.3"
+ },
+ "dependencies": {
+ "slash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
+ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
+ "dev": true
+ }
}
},
"@jest/source-map": {
@@ -640,6 +658,14 @@
"slash": "^3.0.0",
"source-map": "^0.6.1",
"write-file-atomic": "^3.0.0"
+ },
+ "dependencies": {
+ "slash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
+ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
+ "dev": true
+ }
}
},
"@jest/types": {
@@ -964,9 +990,9 @@
"dev": true
},
"@types/semver": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/@types/semver/-/semver-6.0.1.tgz",
- "integrity": "sha512-ffCdcrEE5h8DqVxinQjo+2d1q+FV5z7iNtPofw3JsrltSoSVlOGaW0rY8XxtO9XukdTn8TaCGWmk2VFGhI70mg==",
+ "version": "6.2.1",
+ "resolved": "https://registry.npmjs.org/@types/semver/-/semver-6.2.1.tgz",
+ "integrity": "sha512-+beqKQOh9PYxuHvijhVl+tIHvT6tuwOrE9m14zd+MT2A38KoKZhh7pYJ0SNleLtwDsiIxHDsIk9bv01oOxvSvA==",
"dev": true
},
"@types/stack-utils": {
@@ -1176,6 +1202,14 @@
"chalk": "^4.0.0",
"graceful-fs": "^4.2.4",
"slash": "^3.0.0"
+ },
+ "dependencies": {
+ "slash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
+ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
+ "dev": true
+ }
}
},
"babel-plugin-istanbul": {
@@ -2304,6 +2338,14 @@
"please-upgrade-node": "^3.2.0",
"slash": "^3.0.0",
"which-pm-runs": "^1.0.0"
+ },
+ "dependencies": {
+ "slash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
+ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
+ "dev": true
+ }
}
},
"iconv-lite": {
@@ -2938,6 +2980,14 @@
"micromatch": "^4.0.2",
"slash": "^3.0.0",
"stack-utils": "^2.0.2"
+ },
+ "dependencies": {
+ "slash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
+ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
+ "dev": true
+ }
}
},
"jest-mock": {
@@ -2975,6 +3025,14 @@
"read-pkg-up": "^7.0.1",
"resolve": "^1.17.0",
"slash": "^3.0.0"
+ },
+ "dependencies": {
+ "slash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
+ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
+ "dev": true
+ }
}
},
"jest-resolve-dependencies": {
@@ -3047,6 +3105,14 @@
"slash": "^3.0.0",
"strip-bom": "^4.0.0",
"yargs": "^15.3.1"
+ },
+ "dependencies": {
+ "slash": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
+ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
+ "dev": true
+ }
}
},
"jest-serializer": {
@@ -4345,12 +4411,6 @@
"integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==",
"dev": true
},
- "slash": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
- "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
- "dev": true
- },
"snapdragon": {
"version": "0.8.2",
"resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz",
@@ -4823,7 +4883,8 @@
"tunnel": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
- "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
+ "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
+ "dev": true
},
"tunnel-agent": {
"version": "0.6.0",
@@ -4960,7 +5021,8 @@
"uuid": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
- "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="
+ "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==",
+ "dev": true
},
"v8-to-istanbul": {
"version": "4.1.4",
diff --git a/package.json b/package.json
index dcba7e4..e5a436e 100644
--- a/package.json
+++ b/package.json
@@ -35,7 +35,6 @@
"@actions/github": "^1.1.0",
"@actions/http-client": "^1.0.8",
"@actions/io": "^1.0.2",
- "@actions/tool-cache": "^1.3.1",
"fast-xml-parser": "^3.15.1",
"semver": "^6.3.0",
"xmlbuilder": "^13.0.2"
@@ -52,5 +51,8 @@
"ts-jest": "^26.0.0",
"typescript": "^3.9.3",
"wget-improved": "^3.2.1"
+ },
+ "jest": {
+ "testEnvironment": "node"
}
}
diff --git a/src/installer.ts b/src/installer.ts
index 8d9e6cc..66032fc 100644
--- a/src/installer.ts
+++ b/src/installer.ts
@@ -1,40 +1,26 @@
// Load tempDirectory before it gets wiped by tool-cache
-let tempDirectory = process.env['RUNNER_TEMPDIRECTORY'] || '';
import * as core from '@actions/core';
import * as exec from '@actions/exec';
import * as io from '@actions/io';
-import * as tc from '@actions/tool-cache';
import hc = require('@actions/http-client');
import {chmodSync} from 'fs';
-import * as os from 'os';
import * as path from 'path';
+import {ExecOptions} from '@actions/exec/lib/interfaces';
import * as semver from 'semver';
const IS_WINDOWS = process.platform === 'win32';
-if (!tempDirectory) {
- let baseLocation;
- if (IS_WINDOWS) {
- // On windows use the USERPROFILE env variable
- baseLocation = process.env['USERPROFILE'] || 'C:\\';
- } else {
- if (process.platform === 'darwin') {
- baseLocation = '/Users';
- } else {
- baseLocation = '/home';
- }
- }
- tempDirectory = path.join(baseLocation, 'actions', 'temp');
-}
-
/**
* Represents the inputted version information
*/
export class DotNetVersionInfo {
+ public inputVersion: string;
private fullversion: string;
private isExactVersionSet: boolean = false;
constructor(version: string) {
+ this.inputVersion = version;
+
// Check for exact match
if (semver.valid(semver.clean(version) || '') != null) {
this.fullversion = semver.clean(version) as string;
@@ -89,91 +75,54 @@ export class DotNetVersionInfo {
}
}
-/**
- * Represents a resolved version from the Web-Api
- */
-class ResolvedVersionInfo {
- downloadUrls: string[];
- resolvedVersion: string;
-
- constructor(downloadUrls: string[], resolvedVersion: string) {
- if (downloadUrls.length === 0) {
- throw 'DownloadUrls can not be empty';
- }
-
- if (!resolvedVersion) {
- throw 'Resolved version is invalid';
- }
-
- this.downloadUrls = downloadUrls;
- this.resolvedVersion = resolvedVersion;
- }
-}
-
export class DotnetCoreInstaller {
constructor(version: string) {
- this.versionInfo = new DotNetVersionInfo(version);
- this.cachedToolName = 'dncs';
- this.arch = 'x64';
+ this.version = version;
}
public async installDotnet() {
- // Check cache
- let toolPath: string = '';
- let osSuffixes = await this.detectMachineOS();
- let parts = osSuffixes[0].split('-');
- if (parts.length > 1) {
- this.arch = parts[1];
- }
-
- // If version is not generic -> look up cache
- if (this.versionInfo.isExactVersion())
- toolPath = this.getLocalTool(this.versionInfo.version());
-
- if (!toolPath) {
- // download, extract, cache
- console.log('Getting a download url', this.versionInfo.version());
- let resolvedVersionInfo = await this.resolveInfos(
- osSuffixes,
- this.versionInfo
- );
-
- //Check if cache exists for resolved version
- toolPath = this.getLocalTool(resolvedVersionInfo.resolvedVersion);
- if (!toolPath) {
- //If not exists install it
- toolPath = await this.downloadAndInstall(resolvedVersionInfo);
- } else {
- console.log('Using cached tool');
- }
- } else {
- console.log('Using cached tool');
- }
-
- // Need to set this so that .NET Core global tools find the right locations.
- core.exportVariable('DOTNET_ROOT', toolPath);
-
- // Prepend the tools path. instructs the agent to prepend for future tasks
- core.addPath(toolPath);
- }
-
- private getLocalTool(version: string): string {
- console.log('Checking tool cache', version);
- return tc.find(this.cachedToolName, version, this.arch);
- }
-
- private async detectMachineOS(): Promise {
- let osSuffix: string[] = [];
let output = '';
-
let resultCode = 0;
+
+ let calculatedVersion = await this.resolveVersion(
+ new DotNetVersionInfo(this.version)
+ );
+
+ var envVariables: {[key: string]: string} = {};
+ for (let key in process.env) {
+ if (process.env[key]) {
+ let value: any = process.env[key];
+ envVariables[key] = value;
+ }
+ }
if (IS_WINDOWS) {
let escapedScript = path
- .join(__dirname, '..', 'externals', 'get-os-platform.ps1')
+ .join(__dirname, '..', 'externals', 'install-dotnet.ps1')
.replace(/'/g, "''");
let command = `& '${escapedScript}'`;
+ if (calculatedVersion) {
+ command += ` -Version ${calculatedVersion}`;
+ }
+ if (process.env['https_proxy'] != null) {
+ command += ` -ProxyAddress ${process.env['https_proxy']}`;
+ }
+ // This is not currently an option
+ if (process.env['no_proxy'] != null) {
+ command += ` -ProxyBypassList ${process.env['no_proxy']}`;
+ }
+ // process.env must be explicitly passed in for DOTNET_INSTALL_DIR to be used
const powershellPath = await io.which('powershell', true);
+
+ var options: ExecOptions = {
+ listeners: {
+ stdout: (data: Buffer) => {
+ output += data.toString();
+ }
+ },
+ env: envVariables
+ };
+
resultCode = await exec.exec(
`"${powershellPath}"`,
[
@@ -186,98 +135,63 @@ export class DotnetCoreInstaller {
'-Command',
command
],
- {
- listeners: {
- stdout: (data: Buffer) => {
- output += data.toString();
- }
- }
- }
+ options
);
} else {
- let scriptPath = path.join(
- __dirname,
- '..',
- 'externals',
- 'get-os-distro.sh'
- );
- chmodSync(scriptPath, '777');
+ let escapedScript = path
+ .join(__dirname, '..', 'externals', 'install-dotnet.sh')
+ .replace(/'/g, "''");
+ chmodSync(escapedScript, '777');
- const toolPath = await io.which(scriptPath, true);
- resultCode = await exec.exec(`"${toolPath}"`, [], {
+ const scriptPath = await io.which(escapedScript, true);
+
+ let scriptArguments: string[] = [];
+ if (this.version) {
+ scriptArguments.push('--version', this.version);
+ }
+
+ // process.env must be explicitly passed in for DOTNET_INSTALL_DIR to be used
+ resultCode = await exec.exec(`"${scriptPath}"`, scriptArguments, {
listeners: {
stdout: (data: Buffer) => {
output += data.toString();
}
- }
+ },
+ env: envVariables
});
}
- if (resultCode != 0) {
- throw `Failed to detect os with result code ${resultCode}. Output: ${output}`;
- }
-
- let index;
- if ((index = output.indexOf('Primary:')) >= 0) {
- let primary = output.substr(index + 'Primary:'.length).split(os.EOL)[0];
- osSuffix.push(primary);
- }
-
- if ((index = output.indexOf('Legacy:')) >= 0) {
- let legacy = output.substr(index + 'Legacy:'.length).split(os.EOL)[0];
- osSuffix.push(legacy);
- }
-
- if (osSuffix.length == 0) {
- throw 'Could not detect platform';
- }
-
- return osSuffix;
- }
-
- private async downloadAndInstall(resolvedVersionInfo: ResolvedVersionInfo) {
- let downloaded = false;
- let downloadPath = '';
- for (const url of resolvedVersionInfo.downloadUrls) {
- try {
- downloadPath = await tc.downloadTool(url);
- downloaded = true;
- break;
- } catch (error) {
- console.log('Could not Download', url, JSON.stringify(error));
+ if (process.env['DOTNET_INSTALL_DIR']) {
+ core.addPath(process.env['DOTNET_INSTALL_DIR']);
+ } else {
+ if (IS_WINDOWS) {
+ // This is the default set in install-dotnet.ps1
+ core.addPath(
+ path.join(process.env['LocalAppData'] + '', 'Microsoft', 'dotnet')
+ );
+ core.exportVariable(
+ 'DOTNET_ROOT',
+ path.join(process.env['LocalAppData'] + '', 'Microsoft', 'dotnet')
+ );
+ } else {
+ // This is the default set in install-dotnet.sh
+ core.addPath(path.join(process.env['HOME'] + '', '.dotnet'));
}
}
- if (!downloaded) {
- throw 'Failed to download package';
+ console.log(process.env['PATH']);
+
+ if (resultCode != 0) {
+ throw `Failed to install dotnet ${resultCode}. ${output}`;
}
-
- // extract
- console.log('Extracting Package', downloadPath);
- let extPath: string = IS_WINDOWS
- ? await tc.extractZip(downloadPath)
- : await tc.extractTar(downloadPath);
-
- // cache tool
- console.log('Caching tool');
- let cachedDir = await tc.cacheDir(
- extPath,
- this.cachedToolName,
- resolvedVersionInfo.resolvedVersion,
- this.arch
- );
-
- console.log('Successfully installed', resolvedVersionInfo.resolvedVersion);
- return cachedDir;
}
- // OsSuffixes - The suffix which is a part of the file name ex- linux-x64, windows-x86
- // Type - SDK / Runtime
// versionInfo - versionInfo of the SDK/Runtime
- async resolveInfos(
- osSuffixes: string[],
- versionInfo: DotNetVersionInfo
- ): Promise {
+ async resolveVersion(versionInfo: DotNetVersionInfo): Promise {
+ if (versionInfo.isExactVersion()) {
+ return versionInfo.version();
+ }
+
const httpClient = new hc.HttpClient('actions/setup-dotnet', [], {
allowRetries: true,
maxRetries: 3
@@ -305,67 +219,23 @@ export class DotnetCoreInstaller {
});
// Exclude versions that are newer than the latest if using not exact
- if (!versionInfo.isExactVersion()) {
- let latestSdk: string = releasesResult['latest-sdk'];
+ let latestSdk: string = releasesResult['latest-sdk'];
- releasesInfo = releasesInfo.filter((releaseInfo: any) =>
- semver.lte(releaseInfo['sdk']['version'], latestSdk)
- );
- }
+ releasesInfo = releasesInfo.filter((releaseInfo: any) =>
+ semver.lte(releaseInfo['sdk']['version'], latestSdk)
+ );
// Sort for latest version
releasesInfo = releasesInfo.sort((a, b) =>
semver.rcompare(a['sdk']['version'], b['sdk']['version'])
);
- let downloadedVersion: string = '';
- let downloadUrls: string[] = [];
-
- if (releasesInfo.length != 0) {
- let release = releasesInfo[0];
-
- downloadedVersion = release['sdk']['version'];
-
- let files: any[] = release['sdk']['files'];
- files = files.filter((file: any) => {
- if (file['rid'] == osSuffixes[0] || file['rid'] == osSuffixes[1]) {
- return (
- file['url'].endsWith('.zip') || file['url'].endsWith('.tar.gz')
- );
- }
- });
-
- if (files.length > 0) {
- files.forEach((file: any) => {
- downloadUrls.push(file['url']);
- });
- } else {
- throw `The specified version's download links are not correctly formed in the supported versions document => ${releasesJsonUrl}`;
- }
- } else {
- console.log(
- `Could not fetch download information for version ${versionInfo.version()}`
- );
-
- if (versionInfo.isExactVersion()) {
- console.log('Using fallback');
-
- downloadUrls = await this.getFallbackDownloadUrls(
- versionInfo.version()
- );
- downloadedVersion = versionInfo.version();
- } else {
- console.log('Unable to use fallback, version is generic!');
- }
+ if (releasesInfo.length == 0) {
+ throw `Could not find dotnet core version. Please ensure that specified version ${versionInfo.inputVersion} is valid.`;
}
- if (downloadUrls.length == 0) {
- throw `Could not construct download URL. Please ensure that specified version ${versionInfo.version()}/${downloadedVersion} is valid.`;
- }
-
- core.debug(`Got download urls ${downloadUrls}`);
-
- return new ResolvedVersionInfo(downloadUrls, downloadedVersion);
+ let release = releasesInfo[0];
+ return release['sdk']['version'];
}
private async getReleasesJsonUrl(
@@ -391,108 +261,7 @@ export class DotnetCoreInstaller {
return releasesInfo[0]['releases.json'];
}
- private async getFallbackDownloadUrls(version: string): Promise {
- let primaryUrlSearchString: string;
- let legacyUrlSearchString: string;
- let output = '';
- let resultCode = 0;
-
- if (IS_WINDOWS) {
- let escapedScript = path
- .join(__dirname, '..', 'externals', 'install-dotnet.ps1')
- .replace(/'/g, "''");
- let command = `& '${escapedScript}' -Version ${version} -DryRun`;
-
- const powershellPath = await io.which('powershell', true);
- resultCode = await exec.exec(
- `"${powershellPath}"`,
- [
- '-NoLogo',
- '-Sta',
- '-NoProfile',
- '-NonInteractive',
- '-ExecutionPolicy',
- 'Unrestricted',
- '-Command',
- command
- ],
- {
- listeners: {
- stdout: (data: Buffer) => {
- output += data.toString();
- }
- }
- }
- );
-
- primaryUrlSearchString = 'dotnet-install: Primary named payload URL: ';
- legacyUrlSearchString = 'dotnet-install: Legacy named payload URL: ';
- } else {
- let escapedScript = path
- .join(__dirname, '..', 'externals', 'install-dotnet.sh')
- .replace(/'/g, "''");
- chmodSync(escapedScript, '777');
-
- const scriptPath = await io.which(escapedScript, true);
- resultCode = await exec.exec(
- `"${scriptPath}"`,
- ['--version', version, '--dry-run'],
- {
- listeners: {
- stdout: (data: Buffer) => {
- output += data.toString();
- }
- }
- }
- );
-
- primaryUrlSearchString = 'dotnet-install: Primary named payload URL: ';
- legacyUrlSearchString = 'dotnet-install: Legacy named payload URL: ';
- }
-
- if (resultCode != 0) {
- throw `Failed to get download urls with result code ${resultCode}. ${output}`;
- }
-
- let primaryUrl: string = '';
- let legacyUrl: string = '';
- if (!!output && output.length > 0) {
- let lines: string[] = output.split(os.EOL);
-
- // Fallback to \n if initial split doesn't work (not consistent across versions)
- if (lines.length === 1) {
- lines = output.split('\n');
- }
- if (!!lines && lines.length > 0) {
- lines.forEach((line: string) => {
- if (!line) {
- return;
- }
- var primarySearchStringIndex = line.indexOf(primaryUrlSearchString);
- if (primarySearchStringIndex > -1) {
- primaryUrl = line.substring(
- primarySearchStringIndex + primaryUrlSearchString.length
- );
- return;
- }
-
- var legacySearchStringIndex = line.indexOf(legacyUrlSearchString);
- if (legacySearchStringIndex > -1) {
- legacyUrl = line.substring(
- legacySearchStringIndex + legacyUrlSearchString.length
- );
- return;
- }
- });
- }
- }
-
- return [primaryUrl, legacyUrl];
- }
-
- private versionInfo: DotNetVersionInfo;
- private cachedToolName: string;
- private arch: string;
+ private version: string;
}
const DotNetCoreIndexUrl: string =