8314 lines
260 KiB
JavaScript
8314 lines
260 KiB
JavaScript
|
module.exports =
|
||
|
/******/ (function(modules, runtime) { // webpackBootstrap
|
||
|
/******/ "use strict";
|
||
|
/******/ // The module cache
|
||
|
/******/ var installedModules = {};
|
||
|
/******/
|
||
|
/******/ // The require function
|
||
|
/******/ function __webpack_require__(moduleId) {
|
||
|
/******/
|
||
|
/******/ // Check if module is in cache
|
||
|
/******/ if(installedModules[moduleId]) {
|
||
|
/******/ return installedModules[moduleId].exports;
|
||
|
/******/ }
|
||
|
/******/ // Create a new module (and put it into the cache)
|
||
|
/******/ var module = installedModules[moduleId] = {
|
||
|
/******/ i: moduleId,
|
||
|
/******/ l: false,
|
||
|
/******/ exports: {}
|
||
|
/******/ };
|
||
|
/******/
|
||
|
/******/ // Execute the module function
|
||
|
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
||
|
/******/
|
||
|
/******/ // Flag the module as loaded
|
||
|
/******/ module.l = true;
|
||
|
/******/
|
||
|
/******/ // Return the exports of the module
|
||
|
/******/ return module.exports;
|
||
|
/******/ }
|
||
|
/******/
|
||
|
/******/
|
||
|
/******/ __webpack_require__.ab = __dirname + "/";
|
||
|
/******/
|
||
|
/******/ // the startup function
|
||
|
/******/ function startup() {
|
||
|
/******/ // Load entry module and return exports
|
||
|
/******/ return __webpack_require__(385);
|
||
|
/******/ };
|
||
|
/******/
|
||
|
/******/ // run startup
|
||
|
/******/ return startup();
|
||
|
/******/ })
|
||
|
/************************************************************************/
|
||
|
/******/ ({
|
||
|
|
||
|
/***/ 11:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
// Returns a wrapper function that returns a wrapped callback
|
||
|
// The wrapper function should do some stuff, and return a
|
||
|
// presumably different callback function.
|
||
|
// This makes sure that own properties are retained, so that
|
||
|
// decorations and such are not lost along the way.
|
||
|
module.exports = wrappy
|
||
|
function wrappy (fn, cb) {
|
||
|
if (fn && cb) return wrappy(fn)(cb)
|
||
|
|
||
|
if (typeof fn !== 'function')
|
||
|
throw new TypeError('need wrapper function')
|
||
|
|
||
|
Object.keys(fn).forEach(function (k) {
|
||
|
wrapper[k] = fn[k]
|
||
|
})
|
||
|
|
||
|
return wrapper
|
||
|
|
||
|
function wrapper() {
|
||
|
var args = new Array(arguments.length)
|
||
|
for (var i = 0; i < args.length; i++) {
|
||
|
args[i] = arguments[i]
|
||
|
}
|
||
|
var ret = fn.apply(this, args)
|
||
|
var cb = args[args.length-1]
|
||
|
if (typeof ret === 'function' && ret !== cb) {
|
||
|
Object.keys(cb).forEach(function (k) {
|
||
|
ret[k] = cb[k]
|
||
|
})
|
||
|
}
|
||
|
return ret
|
||
|
}
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 16:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("tls");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 49:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
var wrappy = __webpack_require__(11)
|
||
|
module.exports = wrappy(once)
|
||
|
module.exports.strict = wrappy(onceStrict)
|
||
|
|
||
|
once.proto = once(function () {
|
||
|
Object.defineProperty(Function.prototype, 'once', {
|
||
|
value: function () {
|
||
|
return once(this)
|
||
|
},
|
||
|
configurable: true
|
||
|
})
|
||
|
|
||
|
Object.defineProperty(Function.prototype, 'onceStrict', {
|
||
|
value: function () {
|
||
|
return onceStrict(this)
|
||
|
},
|
||
|
configurable: true
|
||
|
})
|
||
|
})
|
||
|
|
||
|
function once (fn) {
|
||
|
var f = function () {
|
||
|
if (f.called) return f.value
|
||
|
f.called = true
|
||
|
return f.value = fn.apply(this, arguments)
|
||
|
}
|
||
|
f.called = false
|
||
|
return f
|
||
|
}
|
||
|
|
||
|
function onceStrict (fn) {
|
||
|
var f = function () {
|
||
|
if (f.called)
|
||
|
throw new Error(f.onceError)
|
||
|
f.called = true
|
||
|
return f.value = fn.apply(this, arguments)
|
||
|
}
|
||
|
var name = fn.name || 'Function wrapped with `once`'
|
||
|
f.onceError = name + " shouldn't be called more than once"
|
||
|
f.called = false
|
||
|
return f
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 87:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("os");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 93:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
module.exports = minimatch
|
||
|
minimatch.Minimatch = Minimatch
|
||
|
|
||
|
var path = { sep: '/' }
|
||
|
try {
|
||
|
path = __webpack_require__(622)
|
||
|
} catch (er) {}
|
||
|
|
||
|
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
||
|
var expand = __webpack_require__(306)
|
||
|
|
||
|
var plTypes = {
|
||
|
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
|
||
|
'?': { open: '(?:', close: ')?' },
|
||
|
'+': { open: '(?:', close: ')+' },
|
||
|
'*': { open: '(?:', close: ')*' },
|
||
|
'@': { open: '(?:', close: ')' }
|
||
|
}
|
||
|
|
||
|
// any single thing other than /
|
||
|
// don't need to escape / when using new RegExp()
|
||
|
var qmark = '[^/]'
|
||
|
|
||
|
// * => any number of characters
|
||
|
var star = qmark + '*?'
|
||
|
|
||
|
// ** when dots are allowed. Anything goes, except .. and .
|
||
|
// not (^ or / followed by one or two dots followed by $ or /),
|
||
|
// followed by anything, any number of times.
|
||
|
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
|
||
|
|
||
|
// not a ^ or / followed by a dot,
|
||
|
// followed by anything, any number of times.
|
||
|
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
|
||
|
|
||
|
// characters that need to be escaped in RegExp.
|
||
|
var reSpecials = charSet('().*{}+?[]^$\\!')
|
||
|
|
||
|
// "abc" -> { a:true, b:true, c:true }
|
||
|
function charSet (s) {
|
||
|
return s.split('').reduce(function (set, c) {
|
||
|
set[c] = true
|
||
|
return set
|
||
|
}, {})
|
||
|
}
|
||
|
|
||
|
// normalizes slashes.
|
||
|
var slashSplit = /\/+/
|
||
|
|
||
|
minimatch.filter = filter
|
||
|
function filter (pattern, options) {
|
||
|
options = options || {}
|
||
|
return function (p, i, list) {
|
||
|
return minimatch(p, pattern, options)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function ext (a, b) {
|
||
|
a = a || {}
|
||
|
b = b || {}
|
||
|
var t = {}
|
||
|
Object.keys(b).forEach(function (k) {
|
||
|
t[k] = b[k]
|
||
|
})
|
||
|
Object.keys(a).forEach(function (k) {
|
||
|
t[k] = a[k]
|
||
|
})
|
||
|
return t
|
||
|
}
|
||
|
|
||
|
minimatch.defaults = function (def) {
|
||
|
if (!def || !Object.keys(def).length) return minimatch
|
||
|
|
||
|
var orig = minimatch
|
||
|
|
||
|
var m = function minimatch (p, pattern, options) {
|
||
|
return orig.minimatch(p, pattern, ext(def, options))
|
||
|
}
|
||
|
|
||
|
m.Minimatch = function Minimatch (pattern, options) {
|
||
|
return new orig.Minimatch(pattern, ext(def, options))
|
||
|
}
|
||
|
|
||
|
return m
|
||
|
}
|
||
|
|
||
|
Minimatch.defaults = function (def) {
|
||
|
if (!def || !Object.keys(def).length) return Minimatch
|
||
|
return minimatch.defaults(def).Minimatch
|
||
|
}
|
||
|
|
||
|
function minimatch (p, pattern, options) {
|
||
|
if (typeof pattern !== 'string') {
|
||
|
throw new TypeError('glob pattern string required')
|
||
|
}
|
||
|
|
||
|
if (!options) options = {}
|
||
|
|
||
|
// shortcut: comments match nothing.
|
||
|
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||
|
return false
|
||
|
}
|
||
|
|
||
|
// "" only matches ""
|
||
|
if (pattern.trim() === '') return p === ''
|
||
|
|
||
|
return new Minimatch(pattern, options).match(p)
|
||
|
}
|
||
|
|
||
|
function Minimatch (pattern, options) {
|
||
|
if (!(this instanceof Minimatch)) {
|
||
|
return new Minimatch(pattern, options)
|
||
|
}
|
||
|
|
||
|
if (typeof pattern !== 'string') {
|
||
|
throw new TypeError('glob pattern string required')
|
||
|
}
|
||
|
|
||
|
if (!options) options = {}
|
||
|
pattern = pattern.trim()
|
||
|
|
||
|
// windows support: need to use /, not \
|
||
|
if (path.sep !== '/') {
|
||
|
pattern = pattern.split(path.sep).join('/')
|
||
|
}
|
||
|
|
||
|
this.options = options
|
||
|
this.set = []
|
||
|
this.pattern = pattern
|
||
|
this.regexp = null
|
||
|
this.negate = false
|
||
|
this.comment = false
|
||
|
this.empty = false
|
||
|
|
||
|
// make the set of regexps etc.
|
||
|
this.make()
|
||
|
}
|
||
|
|
||
|
Minimatch.prototype.debug = function () {}
|
||
|
|
||
|
Minimatch.prototype.make = make
|
||
|
function make () {
|
||
|
// don't do it more than once.
|
||
|
if (this._made) return
|
||
|
|
||
|
var pattern = this.pattern
|
||
|
var options = this.options
|
||
|
|
||
|
// empty patterns and comments match nothing.
|
||
|
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||
|
this.comment = true
|
||
|
return
|
||
|
}
|
||
|
if (!pattern) {
|
||
|
this.empty = true
|
||
|
return
|
||
|
}
|
||
|
|
||
|
// step 1: figure out negation, etc.
|
||
|
this.parseNegate()
|
||
|
|
||
|
// step 2: expand braces
|
||
|
var set = this.globSet = this.braceExpand()
|
||
|
|
||
|
if (options.debug) this.debug = console.error
|
||
|
|
||
|
this.debug(this.pattern, set)
|
||
|
|
||
|
// step 3: now we have a set, so turn each one into a series of path-portion
|
||
|
// matching patterns.
|
||
|
// These will be regexps, except in the case of "**", which is
|
||
|
// set to the GLOBSTAR object for globstar behavior,
|
||
|
// and will not contain any / characters
|
||
|
set = this.globParts = set.map(function (s) {
|
||
|
return s.split(slashSplit)
|
||
|
})
|
||
|
|
||
|
this.debug(this.pattern, set)
|
||
|
|
||
|
// glob --> regexps
|
||
|
set = set.map(function (s, si, set) {
|
||
|
return s.map(this.parse, this)
|
||
|
}, this)
|
||
|
|
||
|
this.debug(this.pattern, set)
|
||
|
|
||
|
// filter out everything that didn't compile properly.
|
||
|
set = set.filter(function (s) {
|
||
|
return s.indexOf(false) === -1
|
||
|
})
|
||
|
|
||
|
this.debug(this.pattern, set)
|
||
|
|
||
|
this.set = set
|
||
|
}
|
||
|
|
||
|
Minimatch.prototype.parseNegate = parseNegate
|
||
|
function parseNegate () {
|
||
|
var pattern = this.pattern
|
||
|
var negate = false
|
||
|
var options = this.options
|
||
|
var negateOffset = 0
|
||
|
|
||
|
if (options.nonegate) return
|
||
|
|
||
|
for (var i = 0, l = pattern.length
|
||
|
; i < l && pattern.charAt(i) === '!'
|
||
|
; i++) {
|
||
|
negate = !negate
|
||
|
negateOffset++
|
||
|
}
|
||
|
|
||
|
if (negateOffset) this.pattern = pattern.substr(negateOffset)
|
||
|
this.negate = negate
|
||
|
}
|
||
|
|
||
|
// Brace expansion:
|
||
|
// a{b,c}d -> abd acd
|
||
|
// a{b,}c -> abc ac
|
||
|
// a{0..3}d -> a0d a1d a2d a3d
|
||
|
// a{b,c{d,e}f}g -> abg acdfg acefg
|
||
|
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
|
||
|
//
|
||
|
// Invalid sets are not expanded.
|
||
|
// a{2..}b -> a{2..}b
|
||
|
// a{b}c -> a{b}c
|
||
|
minimatch.braceExpand = function (pattern, options) {
|
||
|
return braceExpand(pattern, options)
|
||
|
}
|
||
|
|
||
|
Minimatch.prototype.braceExpand = braceExpand
|
||
|
|
||
|
function braceExpand (pattern, options) {
|
||
|
if (!options) {
|
||
|
if (this instanceof Minimatch) {
|
||
|
options = this.options
|
||
|
} else {
|
||
|
options = {}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
pattern = typeof pattern === 'undefined'
|
||
|
? this.pattern : pattern
|
||
|
|
||
|
if (typeof pattern === 'undefined') {
|
||
|
throw new TypeError('undefined pattern')
|
||
|
}
|
||
|
|
||
|
if (options.nobrace ||
|
||
|
!pattern.match(/\{.*\}/)) {
|
||
|
// shortcut. no need to expand.
|
||
|
return [pattern]
|
||
|
}
|
||
|
|
||
|
return expand(pattern)
|
||
|
}
|
||
|
|
||
|
// parse a component of the expanded set.
|
||
|
// At this point, no pattern may contain "/" in it
|
||
|
// so we're going to return a 2d array, where each entry is the full
|
||
|
// pattern, split on '/', and then turned into a regular expression.
|
||
|
// A regexp is made at the end which joins each array with an
|
||
|
// escaped /, and another full one which joins each regexp with |.
|
||
|
//
|
||
|
// Following the lead of Bash 4.1, note that "**" only has special meaning
|
||
|
// when it is the *only* thing in a path portion. Otherwise, any series
|
||
|
// of * is equivalent to a single *. Globstar behavior is enabled by
|
||
|
// default, and can be disabled by setting options.noglobstar.
|
||
|
Minimatch.prototype.parse = parse
|
||
|
var SUBPARSE = {}
|
||
|
function parse (pattern, isSub) {
|
||
|
if (pattern.length > 1024 * 64) {
|
||
|
throw new TypeError('pattern is too long')
|
||
|
}
|
||
|
|
||
|
var options = this.options
|
||
|
|
||
|
// shortcuts
|
||
|
if (!options.noglobstar && pattern === '**') return GLOBSTAR
|
||
|
if (pattern === '') return ''
|
||
|
|
||
|
var re = ''
|
||
|
var hasMagic = !!options.nocase
|
||
|
var escaping = false
|
||
|
// ? => one single character
|
||
|
var patternListStack = []
|
||
|
var negativeLists = []
|
||
|
var stateChar
|
||
|
var inClass = false
|
||
|
var reClassStart = -1
|
||
|
var classStart = -1
|
||
|
// . and .. never match anything that doesn't start with .,
|
||
|
// even when options.dot is set.
|
||
|
var patternStart = pattern.charAt(0) === '.' ? '' // anything
|
||
|
// not (start or / followed by . or .. followed by / or end)
|
||
|
: options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
|
||
|
: '(?!\\.)'
|
||
|
var self = this
|
||
|
|
||
|
function clearStateChar () {
|
||
|
if (stateChar) {
|
||
|
// we had some state-tracking character
|
||
|
// that wasn't consumed by this pass.
|
||
|
switch (stateChar) {
|
||
|
case '*':
|
||
|
re += star
|
||
|
hasMagic = true
|
||
|
break
|
||
|
case '?':
|
||
|
re += qmark
|
||
|
hasMagic = true
|
||
|
break
|
||
|
default:
|
||
|
re += '\\' + stateChar
|
||
|
break
|
||
|
}
|
||
|
self.debug('clearStateChar %j %j', stateChar, re)
|
||
|
stateChar = false
|
||
|
}
|
||
|
}
|
||
|
|
||
|
for (var i = 0, len = pattern.length, c
|
||
|
; (i < len) && (c = pattern.charAt(i))
|
||
|
; i++) {
|
||
|
this.debug('%s\t%s %s %j', pattern, i, re, c)
|
||
|
|
||
|
// skip over any that are escaped.
|
||
|
if (escaping && reSpecials[c]) {
|
||
|
re += '\\' + c
|
||
|
escaping = false
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
switch (c) {
|
||
|
case '/':
|
||
|
// completely not allowed, even escaped.
|
||
|
// Should already be path-split by now.
|
||
|
return false
|
||
|
|
||
|
case '\\':
|
||
|
clearStateChar()
|
||
|
escaping = true
|
||
|
continue
|
||
|
|
||
|
// the various stateChar values
|
||
|
// for the "extglob" stuff.
|
||
|
case '?':
|
||
|
case '*':
|
||
|
case '+':
|
||
|
case '@':
|
||
|
case '!':
|
||
|
this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
|
||
|
|
||
|
// all of those are literals inside a class, except that
|
||
|
// the glob [!a] means [^a] in regexp
|
||
|
if (inClass) {
|
||
|
this.debug(' in class')
|
||
|
if (c === '!' && i === classStart + 1) c = '^'
|
||
|
re += c
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
// if we already have a stateChar, then it means
|
||
|
// that there was something like ** or +? in there.
|
||
|
// Handle the stateChar, then proceed with this one.
|
||
|
self.debug('call clearStateChar %j', stateChar)
|
||
|
clearStateChar()
|
||
|
stateChar = c
|
||
|
// if extglob is disabled, then +(asdf|foo) isn't a thing.
|
||
|
// just clear the statechar *now*, rather than even diving into
|
||
|
// the patternList stuff.
|
||
|
if (options.noext) clearStateChar()
|
||
|
continue
|
||
|
|
||
|
case '(':
|
||
|
if (inClass) {
|
||
|
re += '('
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
if (!stateChar) {
|
||
|
re += '\\('
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
patternListStack.push({
|
||
|
type: stateChar,
|
||
|
start: i - 1,
|
||
|
reStart: re.length,
|
||
|
open: plTypes[stateChar].open,
|
||
|
close: plTypes[stateChar].close
|
||
|
})
|
||
|
// negation is (?:(?!js)[^/]*)
|
||
|
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
|
||
|
this.debug('plType %j %j', stateChar, re)
|
||
|
stateChar = false
|
||
|
continue
|
||
|
|
||
|
case ')':
|
||
|
if (inClass || !patternListStack.length) {
|
||
|
re += '\\)'
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
clearStateChar()
|
||
|
hasMagic = true
|
||
|
var pl = patternListStack.pop()
|
||
|
// negation is (?:(?!js)[^/]*)
|
||
|
// The others are (?:<pattern>)<type>
|
||
|
re += pl.close
|
||
|
if (pl.type === '!') {
|
||
|
negativeLists.push(pl)
|
||
|
}
|
||
|
pl.reEnd = re.length
|
||
|
continue
|
||
|
|
||
|
case '|':
|
||
|
if (inClass || !patternListStack.length || escaping) {
|
||
|
re += '\\|'
|
||
|
escaping = false
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
clearStateChar()
|
||
|
re += '|'
|
||
|
continue
|
||
|
|
||
|
// these are mostly the same in regexp and glob
|
||
|
case '[':
|
||
|
// swallow any state-tracking char before the [
|
||
|
clearStateChar()
|
||
|
|
||
|
if (inClass) {
|
||
|
re += '\\' + c
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
inClass = true
|
||
|
classStart = i
|
||
|
reClassStart = re.length
|
||
|
re += c
|
||
|
continue
|
||
|
|
||
|
case ']':
|
||
|
// a right bracket shall lose its special
|
||
|
// meaning and represent itself in
|
||
|
// a bracket expression if it occurs
|
||
|
// first in the list. -- POSIX.2 2.8.3.2
|
||
|
if (i === classStart + 1 || !inClass) {
|
||
|
re += '\\' + c
|
||
|
escaping = false
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
// handle the case where we left a class open.
|
||
|
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
||
|
if (inClass) {
|
||
|
// split where the last [ was, make sure we don't have
|
||
|
// an invalid re. if so, re-walk the contents of the
|
||
|
// would-be class to re-translate any characters that
|
||
|
// were passed through as-is
|
||
|
// TODO: It would probably be faster to determine this
|
||
|
// without a try/catch and a new RegExp, but it's tricky
|
||
|
// to do safely. For now, this is safe and works.
|
||
|
var cs = pattern.substring(classStart + 1, i)
|
||
|
try {
|
||
|
RegExp('[' + cs + ']')
|
||
|
} catch (er) {
|
||
|
// not a valid class!
|
||
|
var sp = this.parse(cs, SUBPARSE)
|
||
|
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
|
||
|
hasMagic = hasMagic || sp[1]
|
||
|
inClass = false
|
||
|
continue
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// finish up the class.
|
||
|
hasMagic = true
|
||
|
inClass = false
|
||
|
re += c
|
||
|
continue
|
||
|
|
||
|
default:
|
||
|
// swallow any state char that wasn't consumed
|
||
|
clearStateChar()
|
||
|
|
||
|
if (escaping) {
|
||
|
// no need
|
||
|
escaping = false
|
||
|
} else if (reSpecials[c]
|
||
|
&& !(c === '^' && inClass)) {
|
||
|
re += '\\'
|
||
|
}
|
||
|
|
||
|
re += c
|
||
|
|
||
|
} // switch
|
||
|
} // for
|
||
|
|
||
|
// handle the case where we left a class open.
|
||
|
// "[abc" is valid, equivalent to "\[abc"
|
||
|
if (inClass) {
|
||
|
// split where the last [ was, and escape it
|
||
|
// this is a huge pita. We now have to re-walk
|
||
|
// the contents of the would-be class to re-translate
|
||
|
// any characters that were passed through as-is
|
||
|
cs = pattern.substr(classStart + 1)
|
||
|
sp = this.parse(cs, SUBPARSE)
|
||
|
re = re.substr(0, reClassStart) + '\\[' + sp[0]
|
||
|
hasMagic = hasMagic || sp[1]
|
||
|
}
|
||
|
|
||
|
// handle the case where we had a +( thing at the *end*
|
||
|
// of the pattern.
|
||
|
// each pattern list stack adds 3 chars, and we need to go through
|
||
|
// and escape any | chars that were passed through as-is for the regexp.
|
||
|
// Go through and escape them, taking care not to double-escape any
|
||
|
// | chars that were already escaped.
|
||
|
for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
|
||
|
var tail = re.slice(pl.reStart + pl.open.length)
|
||
|
this.debug('setting tail', re, pl)
|
||
|
// maybe some even number of \, then maybe 1 \, followed by a |
|
||
|
tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) {
|
||
|
if (!$2) {
|
||
|
// the | isn't already escaped, so escape it.
|
||
|
$2 = '\\'
|
||
|
}
|
||
|
|
||
|
// need to escape all those slashes *again*, without escaping the
|
||
|
// one that we need for escaping the | character. As it works out,
|
||
|
// escaping an even number of slashes can be done by simply repeating
|
||
|
// it exactly after itself. That's why this trick works.
|
||
|
//
|
||
|
// I am sorry that you have to see this.
|
||
|
return $1 + $1 + $2 + '|'
|
||
|
})
|
||
|
|
||
|
this.debug('tail=%j\n %s', tail, tail, pl, re)
|
||
|
var t = pl.type === '*' ? star
|
||
|
: pl.type === '?' ? qmark
|
||
|
: '\\' + pl.type
|
||
|
|
||
|
hasMagic = true
|
||
|
re = re.slice(0, pl.reStart) + t + '\\(' + tail
|
||
|
}
|
||
|
|
||
|
// handle trailing things that only matter at the very end.
|
||
|
clearStateChar()
|
||
|
if (escaping) {
|
||
|
// trailing \\
|
||
|
re += '\\\\'
|
||
|
}
|
||
|
|
||
|
// only need to apply the nodot start if the re starts with
|
||
|
// something that could conceivably capture a dot
|
||
|
var addPatternStart = false
|
||
|
switch (re.charAt(0)) {
|
||
|
case '.':
|
||
|
case '[':
|
||
|
case '(': addPatternStart = true
|
||
|
}
|
||
|
|
||
|
// Hack to work around lack of negative lookbehind in JS
|
||
|
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
|
||
|
// like 'a.xyz.yz' doesn't match. So, the first negative
|
||
|
// lookahead, has to look ALL the way ahead, to the end of
|
||
|
// the pattern.
|
||
|
for (var n = negativeLists.length - 1; n > -1; n--) {
|
||
|
var nl = negativeLists[n]
|
||
|
|
||
|
var nlBefore = re.slice(0, nl.reStart)
|
||
|
var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
|
||
|
var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
|
||
|
var nlAfter = re.slice(nl.reEnd)
|
||
|
|
||
|
nlLast += nlAfter
|
||
|
|
||
|
// Handle nested stuff like *(*.js|!(*.json)), where open parens
|
||
|
// mean that we should *not* include the ) in the bit that is considered
|
||
|
// "after" the negated section.
|
||
|
var openParensBefore = nlBefore.split('(').length - 1
|
||
|
var cleanAfter = nlAfter
|
||
|
for (i = 0; i < openParensBefore; i++) {
|
||
|
cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
|
||
|
}
|
||
|
nlAfter = cleanAfter
|
||
|
|
||
|
var dollar = ''
|
||
|
if (nlAfter === '' && isSub !== SUBPARSE) {
|
||
|
dollar = '$'
|
||
|
}
|
||
|
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
|
||
|
re = newRe
|
||
|
}
|
||
|
|
||
|
// if the re is not "" at this point, then we need to make sure
|
||
|
// it doesn't match against an empty path part.
|
||
|
// Otherwise a/* will match a/, which it should not.
|
||
|
if (re !== '' && hasMagic) {
|
||
|
re = '(?=.)' + re
|
||
|
}
|
||
|
|
||
|
if (addPatternStart) {
|
||
|
re = patternStart + re
|
||
|
}
|
||
|
|
||
|
// parsing just a piece of a larger pattern.
|
||
|
if (isSub === SUBPARSE) {
|
||
|
return [re, hasMagic]
|
||
|
}
|
||
|
|
||
|
// skip the regexp for non-magical patterns
|
||
|
// unescape anything in it, though, so that it'll be
|
||
|
// an exact match against a file etc.
|
||
|
if (!hasMagic) {
|
||
|
return globUnescape(pattern)
|
||
|
}
|
||
|
|
||
|
var flags = options.nocase ? 'i' : ''
|
||
|
try {
|
||
|
var regExp = new RegExp('^' + re + '$', flags)
|
||
|
} catch (er) {
|
||
|
// If it was an invalid regular expression, then it can't match
|
||
|
// anything. This trick looks for a character after the end of
|
||
|
// the string, which is of course impossible, except in multi-line
|
||
|
// mode, but it's not a /m regex.
|
||
|
return new RegExp('$.')
|
||
|
}
|
||
|
|
||
|
regExp._glob = pattern
|
||
|
regExp._src = re
|
||
|
|
||
|
return regExp
|
||
|
}
|
||
|
|
||
|
minimatch.makeRe = function (pattern, options) {
|
||
|
return new Minimatch(pattern, options || {}).makeRe()
|
||
|
}
|
||
|
|
||
|
Minimatch.prototype.makeRe = makeRe
|
||
|
function makeRe () {
|
||
|
if (this.regexp || this.regexp === false) return this.regexp
|
||
|
|
||
|
// at this point, this.set is a 2d array of partial
|
||
|
// pattern strings, or "**".
|
||
|
//
|
||
|
// It's better to use .match(). This function shouldn't
|
||
|
// be used, really, but it's pretty convenient sometimes,
|
||
|
// when you just want to work with a regex.
|
||
|
var set = this.set
|
||
|
|
||
|
if (!set.length) {
|
||
|
this.regexp = false
|
||
|
return this.regexp
|
||
|
}
|
||
|
var options = this.options
|
||
|
|
||
|
var twoStar = options.noglobstar ? star
|
||
|
: options.dot ? twoStarDot
|
||
|
: twoStarNoDot
|
||
|
var flags = options.nocase ? 'i' : ''
|
||
|
|
||
|
var re = set.map(function (pattern) {
|
||
|
return pattern.map(function (p) {
|
||
|
return (p === GLOBSTAR) ? twoStar
|
||
|
: (typeof p === 'string') ? regExpEscape(p)
|
||
|
: p._src
|
||
|
}).join('\\\/')
|
||
|
}).join('|')
|
||
|
|
||
|
// must match entire pattern
|
||
|
// ending in a * or ** will make it less strict.
|
||
|
re = '^(?:' + re + ')$'
|
||
|
|
||
|
// can match anything, as long as it's not this.
|
||
|
if (this.negate) re = '^(?!' + re + ').*$'
|
||
|
|
||
|
try {
|
||
|
this.regexp = new RegExp(re, flags)
|
||
|
} catch (ex) {
|
||
|
this.regexp = false
|
||
|
}
|
||
|
return this.regexp
|
||
|
}
|
||
|
|
||
|
minimatch.match = function (list, pattern, options) {
|
||
|
options = options || {}
|
||
|
var mm = new Minimatch(pattern, options)
|
||
|
list = list.filter(function (f) {
|
||
|
return mm.match(f)
|
||
|
})
|
||
|
if (mm.options.nonull && !list.length) {
|
||
|
list.push(pattern)
|
||
|
}
|
||
|
return list
|
||
|
}
|
||
|
|
||
|
Minimatch.prototype.match = match
|
||
|
function match (f, partial) {
|
||
|
this.debug('match', f, this.pattern)
|
||
|
// short-circuit in the case of busted things.
|
||
|
// comments, etc.
|
||
|
if (this.comment) return false
|
||
|
if (this.empty) return f === ''
|
||
|
|
||
|
if (f === '/' && partial) return true
|
||
|
|
||
|
var options = this.options
|
||
|
|
||
|
// windows: need to use /, not \
|
||
|
if (path.sep !== '/') {
|
||
|
f = f.split(path.sep).join('/')
|
||
|
}
|
||
|
|
||
|
// treat the test path as a set of pathparts.
|
||
|
f = f.split(slashSplit)
|
||
|
this.debug(this.pattern, 'split', f)
|
||
|
|
||
|
// just ONE of the pattern sets in this.set needs to match
|
||
|
// in order for it to be valid. If negating, then just one
|
||
|
// match means that we have failed.
|
||
|
// Either way, return on the first hit.
|
||
|
|
||
|
var set = this.set
|
||
|
this.debug(this.pattern, 'set', set)
|
||
|
|
||
|
// Find the basename of the path by looking for the last non-empty segment
|
||
|
var filename
|
||
|
var i
|
||
|
for (i = f.length - 1; i >= 0; i--) {
|
||
|
filename = f[i]
|
||
|
if (filename) break
|
||
|
}
|
||
|
|
||
|
for (i = 0; i < set.length; i++) {
|
||
|
var pattern = set[i]
|
||
|
var file = f
|
||
|
if (options.matchBase && pattern.length === 1) {
|
||
|
file = [filename]
|
||
|
}
|
||
|
var hit = this.matchOne(file, pattern, partial)
|
||
|
if (hit) {
|
||
|
if (options.flipNegate) return true
|
||
|
return !this.negate
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// didn't get any hits. this is success if it's a negative
|
||
|
// pattern, failure otherwise.
|
||
|
if (options.flipNegate) return false
|
||
|
return this.negate
|
||
|
}
|
||
|
|
||
|
// set partial to true to test if, for example,
|
||
|
// "/a/b" matches the start of "/*/b/*/d"
|
||
|
// Partial means, if you run out of file before you run
|
||
|
// out of pattern, then that's fine, as long as all
|
||
|
// the parts match.
|
||
|
Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
||
|
var options = this.options
|
||
|
|
||
|
this.debug('matchOne',
|
||
|
{ 'this': this, file: file, pattern: pattern })
|
||
|
|
||
|
this.debug('matchOne', file.length, pattern.length)
|
||
|
|
||
|
for (var fi = 0,
|
||
|
pi = 0,
|
||
|
fl = file.length,
|
||
|
pl = pattern.length
|
||
|
; (fi < fl) && (pi < pl)
|
||
|
; fi++, pi++) {
|
||
|
this.debug('matchOne loop')
|
||
|
var p = pattern[pi]
|
||
|
var f = file[fi]
|
||
|
|
||
|
this.debug(pattern, p, f)
|
||
|
|
||
|
// should be impossible.
|
||
|
// some invalid regexp stuff in the set.
|
||
|
if (p === false) return false
|
||
|
|
||
|
if (p === GLOBSTAR) {
|
||
|
this.debug('GLOBSTAR', [pattern, p, f])
|
||
|
|
||
|
// "**"
|
||
|
// a/**/b/**/c would match the following:
|
||
|
// a/b/x/y/z/c
|
||
|
// a/x/y/z/b/c
|
||
|
// a/b/x/b/x/c
|
||
|
// a/b/c
|
||
|
// To do this, take the rest of the pattern after
|
||
|
// the **, and see if it would match the file remainder.
|
||
|
// If so, return success.
|
||
|
// If not, the ** "swallows" a segment, and try again.
|
||
|
// This is recursively awful.
|
||
|
//
|
||
|
// a/**/b/**/c matching a/b/x/y/z/c
|
||
|
// - a matches a
|
||
|
// - doublestar
|
||
|
// - matchOne(b/x/y/z/c, b/**/c)
|
||
|
// - b matches b
|
||
|
// - doublestar
|
||
|
// - matchOne(x/y/z/c, c) -> no
|
||
|
// - matchOne(y/z/c, c) -> no
|
||
|
// - matchOne(z/c, c) -> no
|
||
|
// - matchOne(c, c) yes, hit
|
||
|
var fr = fi
|
||
|
var pr = pi + 1
|
||
|
if (pr === pl) {
|
||
|
this.debug('** at the end')
|
||
|
// a ** at the end will just swallow the rest.
|
||
|
// We have found a match.
|
||
|
// however, it will not swallow /.x, unless
|
||
|
// options.dot is set.
|
||
|
// . and .. are *never* matched by **, for explosively
|
||
|
// exponential reasons.
|
||
|
for (; fi < fl; fi++) {
|
||
|
if (file[fi] === '.' || file[fi] === '..' ||
|
||
|
(!options.dot && file[fi].charAt(0) === '.')) return false
|
||
|
}
|
||
|
return true
|
||
|
}
|
||
|
|
||
|
// ok, let's see if we can swallow whatever we can.
|
||
|
while (fr < fl) {
|
||
|
var swallowee = file[fr]
|
||
|
|
||
|
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
|
||
|
|
||
|
// XXX remove this slice. Just pass the start index.
|
||
|
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
|
||
|
this.debug('globstar found match!', fr, fl, swallowee)
|
||
|
// found a match.
|
||
|
return true
|
||
|
} else {
|
||
|
// can't swallow "." or ".." ever.
|
||
|
// can only swallow ".foo" when explicitly asked.
|
||
|
if (swallowee === '.' || swallowee === '..' ||
|
||
|
(!options.dot && swallowee.charAt(0) === '.')) {
|
||
|
this.debug('dot detected!', file, fr, pattern, pr)
|
||
|
break
|
||
|
}
|
||
|
|
||
|
// ** swallows a segment, and continue.
|
||
|
this.debug('globstar swallow a segment, and continue')
|
||
|
fr++
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// no match was found.
|
||
|
// However, in partial mode, we can't say this is necessarily over.
|
||
|
// If there's more *pattern* left, then
|
||
|
if (partial) {
|
||
|
// ran out of file
|
||
|
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
||
|
if (fr === fl) return true
|
||
|
}
|
||
|
return false
|
||
|
}
|
||
|
|
||
|
// something other than **
|
||
|
// non-magic patterns just have to match exactly
|
||
|
// patterns with magic have been turned into regexps.
|
||
|
var hit
|
||
|
if (typeof p === 'string') {
|
||
|
if (options.nocase) {
|
||
|
hit = f.toLowerCase() === p.toLowerCase()
|
||
|
} else {
|
||
|
hit = f === p
|
||
|
}
|
||
|
this.debug('string match', p, f, hit)
|
||
|
} else {
|
||
|
hit = f.match(p)
|
||
|
this.debug('pattern match', p, f, hit)
|
||
|
}
|
||
|
|
||
|
if (!hit) return false
|
||
|
}
|
||
|
|
||
|
// Note: ending in / means that we'll get a final ""
|
||
|
// at the end of the pattern. This can only match a
|
||
|
// corresponding "" at the end of the file.
|
||
|
// If the file ends in /, then it can only match a
|
||
|
// a pattern that ends in /, unless the pattern just
|
||
|
// doesn't have any more for it. But, a/b/ should *not*
|
||
|
// match "a/b/*", even though "" matches against the
|
||
|
// [^/]*? pattern, except in partial mode, where it might
|
||
|
// simply not be reached yet.
|
||
|
// However, a/b/ should still satisfy a/*
|
||
|
|
||
|
// now either we fell off the end of the pattern, or we're done.
|
||
|
if (fi === fl && pi === pl) {
|
||
|
// ran out of pattern and filename at the same time.
|
||
|
// an exact hit!
|
||
|
return true
|
||
|
} else if (fi === fl) {
|
||
|
// ran out of file, but still had pattern left.
|
||
|
// this is ok if we're doing the match as part of
|
||
|
// a glob fs traversal.
|
||
|
return partial
|
||
|
} else if (pi === pl) {
|
||
|
// ran out of pattern, still have file left.
|
||
|
// this is only acceptable if we're on the very last
|
||
|
// empty segment of a file with a trailing slash.
|
||
|
// a/* should match a/b/
|
||
|
var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
|
||
|
return emptyFileEnd
|
||
|
}
|
||
|
|
||
|
// should be unreachable.
|
||
|
throw new Error('wtf?')
|
||
|
}
|
||
|
|
||
|
// replace stuff like \* with *
|
||
|
function globUnescape (s) {
|
||
|
return s.replace(/\\(.)/g, '$1')
|
||
|
}
|
||
|
|
||
|
function regExpEscape (s) {
|
||
|
return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 117:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
// Copyright Joyent, Inc. and other Node contributors.
|
||
|
//
|
||
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
|
// copy of this software and associated documentation files (the
|
||
|
// "Software"), to deal in the Software without restriction, including
|
||
|
// without limitation the rights to use, copy, modify, merge, publish,
|
||
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
|
// persons to whom the Software is furnished to do so, subject to the
|
||
|
// following conditions:
|
||
|
//
|
||
|
// The above copyright notice and this permission notice shall be included
|
||
|
// in all copies or substantial portions of the Software.
|
||
|
//
|
||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
|
||
|
var pathModule = __webpack_require__(622);
|
||
|
var isWindows = process.platform === 'win32';
|
||
|
var fs = __webpack_require__(747);
|
||
|
|
||
|
// JavaScript implementation of realpath, ported from node pre-v6
|
||
|
|
||
|
var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);
|
||
|
|
||
|
function rethrow() {
|
||
|
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
|
||
|
// is fairly slow to generate.
|
||
|
var callback;
|
||
|
if (DEBUG) {
|
||
|
var backtrace = new Error;
|
||
|
callback = debugCallback;
|
||
|
} else
|
||
|
callback = missingCallback;
|
||
|
|
||
|
return callback;
|
||
|
|
||
|
function debugCallback(err) {
|
||
|
if (err) {
|
||
|
backtrace.message = err.message;
|
||
|
err = backtrace;
|
||
|
missingCallback(err);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function missingCallback(err) {
|
||
|
if (err) {
|
||
|
if (process.throwDeprecation)
|
||
|
throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
|
||
|
else if (!process.noDeprecation) {
|
||
|
var msg = 'fs: missing callback ' + (err.stack || err.message);
|
||
|
if (process.traceDeprecation)
|
||
|
console.trace(msg);
|
||
|
else
|
||
|
console.error(msg);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function maybeCallback(cb) {
|
||
|
return typeof cb === 'function' ? cb : rethrow();
|
||
|
}
|
||
|
|
||
|
var normalize = pathModule.normalize;
|
||
|
|
||
|
// Regexp that finds the next partion of a (partial) path
|
||
|
// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
|
||
|
if (isWindows) {
|
||
|
var nextPartRe = /(.*?)(?:[\/\\]+|$)/g;
|
||
|
} else {
|
||
|
var nextPartRe = /(.*?)(?:[\/]+|$)/g;
|
||
|
}
|
||
|
|
||
|
// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
|
||
|
if (isWindows) {
|
||
|
var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/;
|
||
|
} else {
|
||
|
var splitRootRe = /^[\/]*/;
|
||
|
}
|
||
|
|
||
|
exports.realpathSync = function realpathSync(p, cache) {
|
||
|
// make p is absolute
|
||
|
p = pathModule.resolve(p);
|
||
|
|
||
|
if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
|
||
|
return cache[p];
|
||
|
}
|
||
|
|
||
|
var original = p,
|
||
|
seenLinks = {},
|
||
|
knownHard = {};
|
||
|
|
||
|
// current character position in p
|
||
|
var pos;
|
||
|
// the partial path so far, including a trailing slash if any
|
||
|
var current;
|
||
|
// the partial path without a trailing slash (except when pointing at a root)
|
||
|
var base;
|
||
|
// the partial path scanned in the previous round, with slash
|
||
|
var previous;
|
||
|
|
||
|
start();
|
||
|
|
||
|
function start() {
|
||
|
// Skip over roots
|
||
|
var m = splitRootRe.exec(p);
|
||
|
pos = m[0].length;
|
||
|
current = m[0];
|
||
|
base = m[0];
|
||
|
previous = '';
|
||
|
|
||
|
// On windows, check that the root exists. On unix there is no need.
|
||
|
if (isWindows && !knownHard[base]) {
|
||
|
fs.lstatSync(base);
|
||
|
knownHard[base] = true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// walk down the path, swapping out linked pathparts for their real
|
||
|
// values
|
||
|
// NB: p.length changes.
|
||
|
while (pos < p.length) {
|
||
|
// find the next part
|
||
|
nextPartRe.lastIndex = pos;
|
||
|
var result = nextPartRe.exec(p);
|
||
|
previous = current;
|
||
|
current += result[0];
|
||
|
base = previous + result[1];
|
||
|
pos = nextPartRe.lastIndex;
|
||
|
|
||
|
// continue if not a symlink
|
||
|
if (knownHard[base] || (cache && cache[base] === base)) {
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
var resolvedLink;
|
||
|
if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
|
||
|
// some known symbolic link. no need to stat again.
|
||
|
resolvedLink = cache[base];
|
||
|
} else {
|
||
|
var stat = fs.lstatSync(base);
|
||
|
if (!stat.isSymbolicLink()) {
|
||
|
knownHard[base] = true;
|
||
|
if (cache) cache[base] = base;
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
// read the link if it wasn't read before
|
||
|
// dev/ino always return 0 on windows, so skip the check.
|
||
|
var linkTarget = null;
|
||
|
if (!isWindows) {
|
||
|
var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
|
||
|
if (seenLinks.hasOwnProperty(id)) {
|
||
|
linkTarget = seenLinks[id];
|
||
|
}
|
||
|
}
|
||
|
if (linkTarget === null) {
|
||
|
fs.statSync(base);
|
||
|
linkTarget = fs.readlinkSync(base);
|
||
|
}
|
||
|
resolvedLink = pathModule.resolve(previous, linkTarget);
|
||
|
// track this, if given a cache.
|
||
|
if (cache) cache[base] = resolvedLink;
|
||
|
if (!isWindows) seenLinks[id] = linkTarget;
|
||
|
}
|
||
|
|
||
|
// resolve the link, then start over
|
||
|
p = pathModule.resolve(resolvedLink, p.slice(pos));
|
||
|
start();
|
||
|
}
|
||
|
|
||
|
if (cache) cache[original] = p;
|
||
|
|
||
|
return p;
|
||
|
};
|
||
|
|
||
|
|
||
|
exports.realpath = function realpath(p, cache, cb) {
|
||
|
if (typeof cb !== 'function') {
|
||
|
cb = maybeCallback(cache);
|
||
|
cache = null;
|
||
|
}
|
||
|
|
||
|
// make p is absolute
|
||
|
p = pathModule.resolve(p);
|
||
|
|
||
|
if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
|
||
|
return process.nextTick(cb.bind(null, null, cache[p]));
|
||
|
}
|
||
|
|
||
|
var original = p,
|
||
|
seenLinks = {},
|
||
|
knownHard = {};
|
||
|
|
||
|
// current character position in p
|
||
|
var pos;
|
||
|
// the partial path so far, including a trailing slash if any
|
||
|
var current;
|
||
|
// the partial path without a trailing slash (except when pointing at a root)
|
||
|
var base;
|
||
|
// the partial path scanned in the previous round, with slash
|
||
|
var previous;
|
||
|
|
||
|
start();
|
||
|
|
||
|
function start() {
|
||
|
// Skip over roots
|
||
|
var m = splitRootRe.exec(p);
|
||
|
pos = m[0].length;
|
||
|
current = m[0];
|
||
|
base = m[0];
|
||
|
previous = '';
|
||
|
|
||
|
// On windows, check that the root exists. On unix there is no need.
|
||
|
if (isWindows && !knownHard[base]) {
|
||
|
fs.lstat(base, function(err) {
|
||
|
if (err) return cb(err);
|
||
|
knownHard[base] = true;
|
||
|
LOOP();
|
||
|
});
|
||
|
} else {
|
||
|
process.nextTick(LOOP);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// walk down the path, swapping out linked pathparts for their real
|
||
|
// values
|
||
|
function LOOP() {
|
||
|
// stop if scanned past end of path
|
||
|
if (pos >= p.length) {
|
||
|
if (cache) cache[original] = p;
|
||
|
return cb(null, p);
|
||
|
}
|
||
|
|
||
|
// find the next part
|
||
|
nextPartRe.lastIndex = pos;
|
||
|
var result = nextPartRe.exec(p);
|
||
|
previous = current;
|
||
|
current += result[0];
|
||
|
base = previous + result[1];
|
||
|
pos = nextPartRe.lastIndex;
|
||
|
|
||
|
// continue if not a symlink
|
||
|
if (knownHard[base] || (cache && cache[base] === base)) {
|
||
|
return process.nextTick(LOOP);
|
||
|
}
|
||
|
|
||
|
if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
|
||
|
// known symbolic link. no need to stat again.
|
||
|
return gotResolvedLink(cache[base]);
|
||
|
}
|
||
|
|
||
|
return fs.lstat(base, gotStat);
|
||
|
}
|
||
|
|
||
|
function gotStat(err, stat) {
|
||
|
if (err) return cb(err);
|
||
|
|
||
|
// if not a symlink, skip to the next path part
|
||
|
if (!stat.isSymbolicLink()) {
|
||
|
knownHard[base] = true;
|
||
|
if (cache) cache[base] = base;
|
||
|
return process.nextTick(LOOP);
|
||
|
}
|
||
|
|
||
|
// stat & read the link if not read before
|
||
|
// call gotTarget as soon as the link target is known
|
||
|
// dev/ino always return 0 on windows, so skip the check.
|
||
|
if (!isWindows) {
|
||
|
var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
|
||
|
if (seenLinks.hasOwnProperty(id)) {
|
||
|
return gotTarget(null, seenLinks[id], base);
|
||
|
}
|
||
|
}
|
||
|
fs.stat(base, function(err) {
|
||
|
if (err) return cb(err);
|
||
|
|
||
|
fs.readlink(base, function(err, target) {
|
||
|
if (!isWindows) seenLinks[id] = target;
|
||
|
gotTarget(err, target);
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function gotTarget(err, target, base) {
|
||
|
if (err) return cb(err);
|
||
|
|
||
|
var resolvedLink = pathModule.resolve(previous, target);
|
||
|
if (cache) cache[base] = resolvedLink;
|
||
|
gotResolvedLink(resolvedLink);
|
||
|
}
|
||
|
|
||
|
function gotResolvedLink(resolvedLink) {
|
||
|
// resolve the link, then start over
|
||
|
p = pathModule.resolve(resolvedLink, p.slice(pos));
|
||
|
start();
|
||
|
}
|
||
|
};
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 120:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
// Approach:
|
||
|
//
|
||
|
// 1. Get the minimatch set
|
||
|
// 2. For each pattern in the set, PROCESS(pattern, false)
|
||
|
// 3. Store matches per-set, then uniq them
|
||
|
//
|
||
|
// PROCESS(pattern, inGlobStar)
|
||
|
// Get the first [n] items from pattern that are all strings
|
||
|
// Join these together. This is PREFIX.
|
||
|
// If there is no more remaining, then stat(PREFIX) and
|
||
|
// add to matches if it succeeds. END.
|
||
|
//
|
||
|
// If inGlobStar and PREFIX is symlink and points to dir
|
||
|
// set ENTRIES = []
|
||
|
// else readdir(PREFIX) as ENTRIES
|
||
|
// If fail, END
|
||
|
//
|
||
|
// with ENTRIES
|
||
|
// If pattern[n] is GLOBSTAR
|
||
|
// // handle the case where the globstar match is empty
|
||
|
// // by pruning it out, and testing the resulting pattern
|
||
|
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
|
||
|
// // handle other cases.
|
||
|
// for ENTRY in ENTRIES (not dotfiles)
|
||
|
// // attach globstar + tail onto the entry
|
||
|
// // Mark that this entry is a globstar match
|
||
|
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
|
||
|
//
|
||
|
// else // not globstar
|
||
|
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
|
||
|
// Test ENTRY against pattern[n]
|
||
|
// If fails, continue
|
||
|
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
|
||
|
//
|
||
|
// Caveat:
|
||
|
// Cache all stats and readdirs results to minimize syscall. Since all
|
||
|
// we ever care about is existence and directory-ness, we can just keep
|
||
|
// `true` for files, and [children,...] for directories, or `false` for
|
||
|
// things that don't exist.
|
||
|
|
||
|
module.exports = glob
|
||
|
|
||
|
var fs = __webpack_require__(747)
|
||
|
var rp = __webpack_require__(302)
|
||
|
var minimatch = __webpack_require__(93)
|
||
|
var Minimatch = minimatch.Minimatch
|
||
|
var inherits = __webpack_require__(689)
|
||
|
var EE = __webpack_require__(614).EventEmitter
|
||
|
var path = __webpack_require__(622)
|
||
|
var assert = __webpack_require__(357)
|
||
|
var isAbsolute = __webpack_require__(681)
|
||
|
var globSync = __webpack_require__(245)
|
||
|
var common = __webpack_require__(856)
|
||
|
var alphasort = common.alphasort
|
||
|
var alphasorti = common.alphasorti
|
||
|
var setopts = common.setopts
|
||
|
var ownProp = common.ownProp
|
||
|
var inflight = __webpack_require__(674)
|
||
|
var util = __webpack_require__(669)
|
||
|
var childrenIgnored = common.childrenIgnored
|
||
|
var isIgnored = common.isIgnored
|
||
|
|
||
|
var once = __webpack_require__(49)
|
||
|
|
||
|
function glob (pattern, options, cb) {
|
||
|
if (typeof options === 'function') cb = options, options = {}
|
||
|
if (!options) options = {}
|
||
|
|
||
|
if (options.sync) {
|
||
|
if (cb)
|
||
|
throw new TypeError('callback provided to sync glob')
|
||
|
return globSync(pattern, options)
|
||
|
}
|
||
|
|
||
|
return new Glob(pattern, options, cb)
|
||
|
}
|
||
|
|
||
|
glob.sync = globSync
|
||
|
var GlobSync = glob.GlobSync = globSync.GlobSync
|
||
|
|
||
|
// old api surface
|
||
|
glob.glob = glob
|
||
|
|
||
|
function extend (origin, add) {
|
||
|
if (add === null || typeof add !== 'object') {
|
||
|
return origin
|
||
|
}
|
||
|
|
||
|
var keys = Object.keys(add)
|
||
|
var i = keys.length
|
||
|
while (i--) {
|
||
|
origin[keys[i]] = add[keys[i]]
|
||
|
}
|
||
|
return origin
|
||
|
}
|
||
|
|
||
|
glob.hasMagic = function (pattern, options_) {
|
||
|
var options = extend({}, options_)
|
||
|
options.noprocess = true
|
||
|
|
||
|
var g = new Glob(pattern, options)
|
||
|
var set = g.minimatch.set
|
||
|
|
||
|
if (!pattern)
|
||
|
return false
|
||
|
|
||
|
if (set.length > 1)
|
||
|
return true
|
||
|
|
||
|
for (var j = 0; j < set[0].length; j++) {
|
||
|
if (typeof set[0][j] !== 'string')
|
||
|
return true
|
||
|
}
|
||
|
|
||
|
return false
|
||
|
}
|
||
|
|
||
|
glob.Glob = Glob
|
||
|
inherits(Glob, EE)
|
||
|
function Glob (pattern, options, cb) {
|
||
|
if (typeof options === 'function') {
|
||
|
cb = options
|
||
|
options = null
|
||
|
}
|
||
|
|
||
|
if (options && options.sync) {
|
||
|
if (cb)
|
||
|
throw new TypeError('callback provided to sync glob')
|
||
|
return new GlobSync(pattern, options)
|
||
|
}
|
||
|
|
||
|
if (!(this instanceof Glob))
|
||
|
return new Glob(pattern, options, cb)
|
||
|
|
||
|
setopts(this, pattern, options)
|
||
|
this._didRealPath = false
|
||
|
|
||
|
// process each pattern in the minimatch set
|
||
|
var n = this.minimatch.set.length
|
||
|
|
||
|
// The matches are stored as {<filename>: true,...} so that
|
||
|
// duplicates are automagically pruned.
|
||
|
// Later, we do an Object.keys() on these.
|
||
|
// Keep them as a list so we can fill in when nonull is set.
|
||
|
this.matches = new Array(n)
|
||
|
|
||
|
if (typeof cb === 'function') {
|
||
|
cb = once(cb)
|
||
|
this.on('error', cb)
|
||
|
this.on('end', function (matches) {
|
||
|
cb(null, matches)
|
||
|
})
|
||
|
}
|
||
|
|
||
|
var self = this
|
||
|
this._processing = 0
|
||
|
|
||
|
this._emitQueue = []
|
||
|
this._processQueue = []
|
||
|
this.paused = false
|
||
|
|
||
|
if (this.noprocess)
|
||
|
return this
|
||
|
|
||
|
if (n === 0)
|
||
|
return done()
|
||
|
|
||
|
var sync = true
|
||
|
for (var i = 0; i < n; i ++) {
|
||
|
this._process(this.minimatch.set[i], i, false, done)
|
||
|
}
|
||
|
sync = false
|
||
|
|
||
|
function done () {
|
||
|
--self._processing
|
||
|
if (self._processing <= 0) {
|
||
|
if (sync) {
|
||
|
process.nextTick(function () {
|
||
|
self._finish()
|
||
|
})
|
||
|
} else {
|
||
|
self._finish()
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Glob.prototype._finish = function () {
|
||
|
assert(this instanceof Glob)
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
if (this.realpath && !this._didRealpath)
|
||
|
return this._realpath()
|
||
|
|
||
|
common.finish(this)
|
||
|
this.emit('end', this.found)
|
||
|
}
|
||
|
|
||
|
Glob.prototype._realpath = function () {
|
||
|
if (this._didRealpath)
|
||
|
return
|
||
|
|
||
|
this._didRealpath = true
|
||
|
|
||
|
var n = this.matches.length
|
||
|
if (n === 0)
|
||
|
return this._finish()
|
||
|
|
||
|
var self = this
|
||
|
for (var i = 0; i < this.matches.length; i++)
|
||
|
this._realpathSet(i, next)
|
||
|
|
||
|
function next () {
|
||
|
if (--n === 0)
|
||
|
self._finish()
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Glob.prototype._realpathSet = function (index, cb) {
|
||
|
var matchset = this.matches[index]
|
||
|
if (!matchset)
|
||
|
return cb()
|
||
|
|
||
|
var found = Object.keys(matchset)
|
||
|
var self = this
|
||
|
var n = found.length
|
||
|
|
||
|
if (n === 0)
|
||
|
return cb()
|
||
|
|
||
|
var set = this.matches[index] = Object.create(null)
|
||
|
found.forEach(function (p, i) {
|
||
|
// If there's a problem with the stat, then it means that
|
||
|
// one or more of the links in the realpath couldn't be
|
||
|
// resolved. just return the abs value in that case.
|
||
|
p = self._makeAbs(p)
|
||
|
rp.realpath(p, self.realpathCache, function (er, real) {
|
||
|
if (!er)
|
||
|
set[real] = true
|
||
|
else if (er.syscall === 'stat')
|
||
|
set[p] = true
|
||
|
else
|
||
|
self.emit('error', er) // srsly wtf right here
|
||
|
|
||
|
if (--n === 0) {
|
||
|
self.matches[index] = set
|
||
|
cb()
|
||
|
}
|
||
|
})
|
||
|
})
|
||
|
}
|
||
|
|
||
|
Glob.prototype._mark = function (p) {
|
||
|
return common.mark(this, p)
|
||
|
}
|
||
|
|
||
|
Glob.prototype._makeAbs = function (f) {
|
||
|
return common.makeAbs(this, f)
|
||
|
}
|
||
|
|
||
|
Glob.prototype.abort = function () {
|
||
|
this.aborted = true
|
||
|
this.emit('abort')
|
||
|
}
|
||
|
|
||
|
Glob.prototype.pause = function () {
|
||
|
if (!this.paused) {
|
||
|
this.paused = true
|
||
|
this.emit('pause')
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Glob.prototype.resume = function () {
|
||
|
if (this.paused) {
|
||
|
this.emit('resume')
|
||
|
this.paused = false
|
||
|
if (this._emitQueue.length) {
|
||
|
var eq = this._emitQueue.slice(0)
|
||
|
this._emitQueue.length = 0
|
||
|
for (var i = 0; i < eq.length; i ++) {
|
||
|
var e = eq[i]
|
||
|
this._emitMatch(e[0], e[1])
|
||
|
}
|
||
|
}
|
||
|
if (this._processQueue.length) {
|
||
|
var pq = this._processQueue.slice(0)
|
||
|
this._processQueue.length = 0
|
||
|
for (var i = 0; i < pq.length; i ++) {
|
||
|
var p = pq[i]
|
||
|
this._processing--
|
||
|
this._process(p[0], p[1], p[2], p[3])
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
|
||
|
assert(this instanceof Glob)
|
||
|
assert(typeof cb === 'function')
|
||
|
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
this._processing++
|
||
|
if (this.paused) {
|
||
|
this._processQueue.push([pattern, index, inGlobStar, cb])
|
||
|
return
|
||
|
}
|
||
|
|
||
|
//console.error('PROCESS %d', this._processing, pattern)
|
||
|
|
||
|
// Get the first [n] parts of pattern that are all strings.
|
||
|
var n = 0
|
||
|
while (typeof pattern[n] === 'string') {
|
||
|
n ++
|
||
|
}
|
||
|
// now n is the index of the first one that is *not* a string.
|
||
|
|
||
|
// see if there's anything else
|
||
|
var prefix
|
||
|
switch (n) {
|
||
|
// if not, then this is rather simple
|
||
|
case pattern.length:
|
||
|
this._processSimple(pattern.join('/'), index, cb)
|
||
|
return
|
||
|
|
||
|
case 0:
|
||
|
// pattern *starts* with some non-trivial item.
|
||
|
// going to readdir(cwd), but not include the prefix in matches.
|
||
|
prefix = null
|
||
|
break
|
||
|
|
||
|
default:
|
||
|
// pattern has some string bits in the front.
|
||
|
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||
|
// or 'relative' like '../baz'
|
||
|
prefix = pattern.slice(0, n).join('/')
|
||
|
break
|
||
|
}
|
||
|
|
||
|
var remain = pattern.slice(n)
|
||
|
|
||
|
// get the list of entries.
|
||
|
var read
|
||
|
if (prefix === null)
|
||
|
read = '.'
|
||
|
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
|
||
|
if (!prefix || !isAbsolute(prefix))
|
||
|
prefix = '/' + prefix
|
||
|
read = prefix
|
||
|
} else
|
||
|
read = prefix
|
||
|
|
||
|
var abs = this._makeAbs(read)
|
||
|
|
||
|
//if ignored, skip _processing
|
||
|
if (childrenIgnored(this, read))
|
||
|
return cb()
|
||
|
|
||
|
var isGlobStar = remain[0] === minimatch.GLOBSTAR
|
||
|
if (isGlobStar)
|
||
|
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
|
||
|
else
|
||
|
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
|
||
|
}
|
||
|
|
||
|
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||
|
var self = this
|
||
|
this._readdir(abs, inGlobStar, function (er, entries) {
|
||
|
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
|
||
|
})
|
||
|
}
|
||
|
|
||
|
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||
|
|
||
|
// if the abs isn't a dir, then nothing can match!
|
||
|
if (!entries)
|
||
|
return cb()
|
||
|
|
||
|
// It will only match dot entries if it starts with a dot, or if
|
||
|
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||
|
var pn = remain[0]
|
||
|
var negate = !!this.minimatch.negate
|
||
|
var rawGlob = pn._glob
|
||
|
var dotOk = this.dot || rawGlob.charAt(0) === '.'
|
||
|
|
||
|
var matchedEntries = []
|
||
|
for (var i = 0; i < entries.length; i++) {
|
||
|
var e = entries[i]
|
||
|
if (e.charAt(0) !== '.' || dotOk) {
|
||
|
var m
|
||
|
if (negate && !prefix) {
|
||
|
m = !e.match(pn)
|
||
|
} else {
|
||
|
m = e.match(pn)
|
||
|
}
|
||
|
if (m)
|
||
|
matchedEntries.push(e)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
|
||
|
|
||
|
var len = matchedEntries.length
|
||
|
// If there are no matched entries, then nothing matches.
|
||
|
if (len === 0)
|
||
|
return cb()
|
||
|
|
||
|
// if this is the last remaining pattern bit, then no need for
|
||
|
// an additional stat *unless* the user has specified mark or
|
||
|
// stat explicitly. We know they exist, since readdir returned
|
||
|
// them.
|
||
|
|
||
|
if (remain.length === 1 && !this.mark && !this.stat) {
|
||
|
if (!this.matches[index])
|
||
|
this.matches[index] = Object.create(null)
|
||
|
|
||
|
for (var i = 0; i < len; i ++) {
|
||
|
var e = matchedEntries[i]
|
||
|
if (prefix) {
|
||
|
if (prefix !== '/')
|
||
|
e = prefix + '/' + e
|
||
|
else
|
||
|
e = prefix + e
|
||
|
}
|
||
|
|
||
|
if (e.charAt(0) === '/' && !this.nomount) {
|
||
|
e = path.join(this.root, e)
|
||
|
}
|
||
|
this._emitMatch(index, e)
|
||
|
}
|
||
|
// This was the last one, and no stats were needed
|
||
|
return cb()
|
||
|
}
|
||
|
|
||
|
// now test all matched entries as stand-ins for that part
|
||
|
// of the pattern.
|
||
|
remain.shift()
|
||
|
for (var i = 0; i < len; i ++) {
|
||
|
var e = matchedEntries[i]
|
||
|
var newPattern
|
||
|
if (prefix) {
|
||
|
if (prefix !== '/')
|
||
|
e = prefix + '/' + e
|
||
|
else
|
||
|
e = prefix + e
|
||
|
}
|
||
|
this._process([e].concat(remain), index, inGlobStar, cb)
|
||
|
}
|
||
|
cb()
|
||
|
}
|
||
|
|
||
|
Glob.prototype._emitMatch = function (index, e) {
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
if (isIgnored(this, e))
|
||
|
return
|
||
|
|
||
|
if (this.paused) {
|
||
|
this._emitQueue.push([index, e])
|
||
|
return
|
||
|
}
|
||
|
|
||
|
var abs = isAbsolute(e) ? e : this._makeAbs(e)
|
||
|
|
||
|
if (this.mark)
|
||
|
e = this._mark(e)
|
||
|
|
||
|
if (this.absolute)
|
||
|
e = abs
|
||
|
|
||
|
if (this.matches[index][e])
|
||
|
return
|
||
|
|
||
|
if (this.nodir) {
|
||
|
var c = this.cache[abs]
|
||
|
if (c === 'DIR' || Array.isArray(c))
|
||
|
return
|
||
|
}
|
||
|
|
||
|
this.matches[index][e] = true
|
||
|
|
||
|
var st = this.statCache[abs]
|
||
|
if (st)
|
||
|
this.emit('stat', e, st)
|
||
|
|
||
|
this.emit('match', e)
|
||
|
}
|
||
|
|
||
|
Glob.prototype._readdirInGlobStar = function (abs, cb) {
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
// follow all symlinked directories forever
|
||
|
// just proceed as if this is a non-globstar situation
|
||
|
if (this.follow)
|
||
|
return this._readdir(abs, false, cb)
|
||
|
|
||
|
var lstatkey = 'lstat\0' + abs
|
||
|
var self = this
|
||
|
var lstatcb = inflight(lstatkey, lstatcb_)
|
||
|
|
||
|
if (lstatcb)
|
||
|
fs.lstat(abs, lstatcb)
|
||
|
|
||
|
function lstatcb_ (er, lstat) {
|
||
|
if (er && er.code === 'ENOENT')
|
||
|
return cb()
|
||
|
|
||
|
var isSym = lstat && lstat.isSymbolicLink()
|
||
|
self.symlinks[abs] = isSym
|
||
|
|
||
|
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||
|
// don't bother doing a readdir in that case.
|
||
|
if (!isSym && lstat && !lstat.isDirectory()) {
|
||
|
self.cache[abs] = 'FILE'
|
||
|
cb()
|
||
|
} else
|
||
|
self._readdir(abs, false, cb)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
|
||
|
if (!cb)
|
||
|
return
|
||
|
|
||
|
//console.error('RD %j %j', +inGlobStar, abs)
|
||
|
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||
|
return this._readdirInGlobStar(abs, cb)
|
||
|
|
||
|
if (ownProp(this.cache, abs)) {
|
||
|
var c = this.cache[abs]
|
||
|
if (!c || c === 'FILE')
|
||
|
return cb()
|
||
|
|
||
|
if (Array.isArray(c))
|
||
|
return cb(null, c)
|
||
|
}
|
||
|
|
||
|
var self = this
|
||
|
fs.readdir(abs, readdirCb(this, abs, cb))
|
||
|
}
|
||
|
|
||
|
function readdirCb (self, abs, cb) {
|
||
|
return function (er, entries) {
|
||
|
if (er)
|
||
|
self._readdirError(abs, er, cb)
|
||
|
else
|
||
|
self._readdirEntries(abs, entries, cb)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Glob.prototype._readdirEntries = function (abs, entries, cb) {
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
// if we haven't asked to stat everything, then just
|
||
|
// assume that everything in there exists, so we can avoid
|
||
|
// having to stat it a second time.
|
||
|
if (!this.mark && !this.stat) {
|
||
|
for (var i = 0; i < entries.length; i ++) {
|
||
|
var e = entries[i]
|
||
|
if (abs === '/')
|
||
|
e = abs + e
|
||
|
else
|
||
|
e = abs + '/' + e
|
||
|
this.cache[e] = true
|
||
|
}
|
||
|
}
|
||
|
|
||
|
this.cache[abs] = entries
|
||
|
return cb(null, entries)
|
||
|
}
|
||
|
|
||
|
Glob.prototype._readdirError = function (f, er, cb) {
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
// handle errors, and cache the information
|
||
|
switch (er.code) {
|
||
|
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||
|
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||
|
var abs = this._makeAbs(f)
|
||
|
this.cache[abs] = 'FILE'
|
||
|
if (abs === this.cwdAbs) {
|
||
|
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
|
||
|
error.path = this.cwd
|
||
|
error.code = er.code
|
||
|
this.emit('error', error)
|
||
|
this.abort()
|
||
|
}
|
||
|
break
|
||
|
|
||
|
case 'ENOENT': // not terribly unusual
|
||
|
case 'ELOOP':
|
||
|
case 'ENAMETOOLONG':
|
||
|
case 'UNKNOWN':
|
||
|
this.cache[this._makeAbs(f)] = false
|
||
|
break
|
||
|
|
||
|
default: // some unusual error. Treat as failure.
|
||
|
this.cache[this._makeAbs(f)] = false
|
||
|
if (this.strict) {
|
||
|
this.emit('error', er)
|
||
|
// If the error is handled, then we abort
|
||
|
// if not, we threw out of here
|
||
|
this.abort()
|
||
|
}
|
||
|
if (!this.silent)
|
||
|
console.error('glob error', er)
|
||
|
break
|
||
|
}
|
||
|
|
||
|
return cb()
|
||
|
}
|
||
|
|
||
|
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||
|
var self = this
|
||
|
this._readdir(abs, inGlobStar, function (er, entries) {
|
||
|
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
|
||
|
})
|
||
|
}
|
||
|
|
||
|
|
||
|
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||
|
//console.error('pgs2', prefix, remain[0], entries)
|
||
|
|
||
|
// no entries means not a dir, so it can never have matches
|
||
|
// foo.txt/** doesn't match foo.txt
|
||
|
if (!entries)
|
||
|
return cb()
|
||
|
|
||
|
// test without the globstar, and with every child both below
|
||
|
// and replacing the globstar.
|
||
|
var remainWithoutGlobStar = remain.slice(1)
|
||
|
var gspref = prefix ? [ prefix ] : []
|
||
|
var noGlobStar = gspref.concat(remainWithoutGlobStar)
|
||
|
|
||
|
// the noGlobStar pattern exits the inGlobStar state
|
||
|
this._process(noGlobStar, index, false, cb)
|
||
|
|
||
|
var isSym = this.symlinks[abs]
|
||
|
var len = entries.length
|
||
|
|
||
|
// If it's a symlink, and we're in a globstar, then stop
|
||
|
if (isSym && inGlobStar)
|
||
|
return cb()
|
||
|
|
||
|
for (var i = 0; i < len; i++) {
|
||
|
var e = entries[i]
|
||
|
if (e.charAt(0) === '.' && !this.dot)
|
||
|
continue
|
||
|
|
||
|
// these two cases enter the inGlobStar state
|
||
|
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
|
||
|
this._process(instead, index, true, cb)
|
||
|
|
||
|
var below = gspref.concat(entries[i], remain)
|
||
|
this._process(below, index, true, cb)
|
||
|
}
|
||
|
|
||
|
cb()
|
||
|
}
|
||
|
|
||
|
Glob.prototype._processSimple = function (prefix, index, cb) {
|
||
|
// XXX review this. Shouldn't it be doing the mounting etc
|
||
|
// before doing stat? kinda weird?
|
||
|
var self = this
|
||
|
this._stat(prefix, function (er, exists) {
|
||
|
self._processSimple2(prefix, index, er, exists, cb)
|
||
|
})
|
||
|
}
|
||
|
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
|
||
|
|
||
|
//console.error('ps2', prefix, exists)
|
||
|
|
||
|
if (!this.matches[index])
|
||
|
this.matches[index] = Object.create(null)
|
||
|
|
||
|
// If it doesn't exist, then just mark the lack of results
|
||
|
if (!exists)
|
||
|
return cb()
|
||
|
|
||
|
if (prefix && isAbsolute(prefix) && !this.nomount) {
|
||
|
var trail = /[\/\\]$/.test(prefix)
|
||
|
if (prefix.charAt(0) === '/') {
|
||
|
prefix = path.join(this.root, prefix)
|
||
|
} else {
|
||
|
prefix = path.resolve(this.root, prefix)
|
||
|
if (trail)
|
||
|
prefix += '/'
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (process.platform === 'win32')
|
||
|
prefix = prefix.replace(/\\/g, '/')
|
||
|
|
||
|
// Mark this as a match
|
||
|
this._emitMatch(index, prefix)
|
||
|
cb()
|
||
|
}
|
||
|
|
||
|
// Returns either 'DIR', 'FILE', or false
|
||
|
Glob.prototype._stat = function (f, cb) {
|
||
|
var abs = this._makeAbs(f)
|
||
|
var needDir = f.slice(-1) === '/'
|
||
|
|
||
|
if (f.length > this.maxLength)
|
||
|
return cb()
|
||
|
|
||
|
if (!this.stat && ownProp(this.cache, abs)) {
|
||
|
var c = this.cache[abs]
|
||
|
|
||
|
if (Array.isArray(c))
|
||
|
c = 'DIR'
|
||
|
|
||
|
// It exists, but maybe not how we need it
|
||
|
if (!needDir || c === 'DIR')
|
||
|
return cb(null, c)
|
||
|
|
||
|
if (needDir && c === 'FILE')
|
||
|
return cb()
|
||
|
|
||
|
// otherwise we have to stat, because maybe c=true
|
||
|
// if we know it exists, but not what it is.
|
||
|
}
|
||
|
|
||
|
var exists
|
||
|
var stat = this.statCache[abs]
|
||
|
if (stat !== undefined) {
|
||
|
if (stat === false)
|
||
|
return cb(null, stat)
|
||
|
else {
|
||
|
var type = stat.isDirectory() ? 'DIR' : 'FILE'
|
||
|
if (needDir && type === 'FILE')
|
||
|
return cb()
|
||
|
else
|
||
|
return cb(null, type, stat)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var self = this
|
||
|
var statcb = inflight('stat\0' + abs, lstatcb_)
|
||
|
if (statcb)
|
||
|
fs.lstat(abs, statcb)
|
||
|
|
||
|
function lstatcb_ (er, lstat) {
|
||
|
if (lstat && lstat.isSymbolicLink()) {
|
||
|
// If it's a symlink, then treat it as the target, unless
|
||
|
// the target does not exist, then treat it as a file.
|
||
|
return fs.stat(abs, function (er, stat) {
|
||
|
if (er)
|
||
|
self._stat2(f, abs, null, lstat, cb)
|
||
|
else
|
||
|
self._stat2(f, abs, er, stat, cb)
|
||
|
})
|
||
|
} else {
|
||
|
self._stat2(f, abs, er, lstat, cb)
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
|
||
|
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
|
||
|
this.statCache[abs] = false
|
||
|
return cb()
|
||
|
}
|
||
|
|
||
|
var needDir = f.slice(-1) === '/'
|
||
|
this.statCache[abs] = stat
|
||
|
|
||
|
if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
|
||
|
return cb(null, false, stat)
|
||
|
|
||
|
var c = true
|
||
|
if (stat)
|
||
|
c = stat.isDirectory() ? 'DIR' : 'FILE'
|
||
|
this.cache[abs] = this.cache[abs] || c
|
||
|
|
||
|
if (needDir && c === 'FILE')
|
||
|
return cb()
|
||
|
|
||
|
return cb(null, c, stat)
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 141:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
|
||
|
var net = __webpack_require__(631);
|
||
|
var tls = __webpack_require__(16);
|
||
|
var http = __webpack_require__(605);
|
||
|
var https = __webpack_require__(211);
|
||
|
var events = __webpack_require__(614);
|
||
|
var assert = __webpack_require__(357);
|
||
|
var util = __webpack_require__(669);
|
||
|
|
||
|
|
||
|
exports.httpOverHttp = httpOverHttp;
|
||
|
exports.httpsOverHttp = httpsOverHttp;
|
||
|
exports.httpOverHttps = httpOverHttps;
|
||
|
exports.httpsOverHttps = httpsOverHttps;
|
||
|
|
||
|
|
||
|
function httpOverHttp(options) {
|
||
|
var agent = new TunnelingAgent(options);
|
||
|
agent.request = http.request;
|
||
|
return agent;
|
||
|
}
|
||
|
|
||
|
function httpsOverHttp(options) {
|
||
|
var agent = new TunnelingAgent(options);
|
||
|
agent.request = http.request;
|
||
|
agent.createSocket = createSecureSocket;
|
||
|
agent.defaultPort = 443;
|
||
|
return agent;
|
||
|
}
|
||
|
|
||
|
function httpOverHttps(options) {
|
||
|
var agent = new TunnelingAgent(options);
|
||
|
agent.request = https.request;
|
||
|
return agent;
|
||
|
}
|
||
|
|
||
|
function httpsOverHttps(options) {
|
||
|
var agent = new TunnelingAgent(options);
|
||
|
agent.request = https.request;
|
||
|
agent.createSocket = createSecureSocket;
|
||
|
agent.defaultPort = 443;
|
||
|
return agent;
|
||
|
}
|
||
|
|
||
|
|
||
|
function TunnelingAgent(options) {
|
||
|
var self = this;
|
||
|
self.options = options || {};
|
||
|
self.proxyOptions = self.options.proxy || {};
|
||
|
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
|
||
|
self.requests = [];
|
||
|
self.sockets = [];
|
||
|
|
||
|
self.on('free', function onFree(socket, host, port, localAddress) {
|
||
|
var options = toOptions(host, port, localAddress);
|
||
|
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
||
|
var pending = self.requests[i];
|
||
|
if (pending.host === options.host && pending.port === options.port) {
|
||
|
// Detect the request to connect same origin server,
|
||
|
// reuse the connection.
|
||
|
self.requests.splice(i, 1);
|
||
|
pending.request.onSocket(socket);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
socket.destroy();
|
||
|
self.removeSocket(socket);
|
||
|
});
|
||
|
}
|
||
|
util.inherits(TunnelingAgent, events.EventEmitter);
|
||
|
|
||
|
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
||
|
var self = this;
|
||
|
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
||
|
|
||
|
if (self.sockets.length >= this.maxSockets) {
|
||
|
// We are over limit so we'll add it to the queue.
|
||
|
self.requests.push(options);
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
// If we are under maxSockets create a new one.
|
||
|
self.createSocket(options, function(socket) {
|
||
|
socket.on('free', onFree);
|
||
|
socket.on('close', onCloseOrRemove);
|
||
|
socket.on('agentRemove', onCloseOrRemove);
|
||
|
req.onSocket(socket);
|
||
|
|
||
|
function onFree() {
|
||
|
self.emit('free', socket, options);
|
||
|
}
|
||
|
|
||
|
function onCloseOrRemove(err) {
|
||
|
self.removeSocket(socket);
|
||
|
socket.removeListener('free', onFree);
|
||
|
socket.removeListener('close', onCloseOrRemove);
|
||
|
socket.removeListener('agentRemove', onCloseOrRemove);
|
||
|
}
|
||
|
});
|
||
|
};
|
||
|
|
||
|
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
||
|
var self = this;
|
||
|
var placeholder = {};
|
||
|
self.sockets.push(placeholder);
|
||
|
|
||
|
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
||
|
method: 'CONNECT',
|
||
|
path: options.host + ':' + options.port,
|
||
|
agent: false,
|
||
|
headers: {
|
||
|
host: options.host + ':' + options.port
|
||
|
}
|
||
|
});
|
||
|
if (options.localAddress) {
|
||
|
connectOptions.localAddress = options.localAddress;
|
||
|
}
|
||
|
if (connectOptions.proxyAuth) {
|
||
|
connectOptions.headers = connectOptions.headers || {};
|
||
|
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
||
|
new Buffer(connectOptions.proxyAuth).toString('base64');
|
||
|
}
|
||
|
|
||
|
debug('making CONNECT request');
|
||
|
var connectReq = self.request(connectOptions);
|
||
|
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
||
|
connectReq.once('response', onResponse); // for v0.6
|
||
|
connectReq.once('upgrade', onUpgrade); // for v0.6
|
||
|
connectReq.once('connect', onConnect); // for v0.7 or later
|
||
|
connectReq.once('error', onError);
|
||
|
connectReq.end();
|
||
|
|
||
|
function onResponse(res) {
|
||
|
// Very hacky. This is necessary to avoid http-parser leaks.
|
||
|
res.upgrade = true;
|
||
|
}
|
||
|
|
||
|
function onUpgrade(res, socket, head) {
|
||
|
// Hacky.
|
||
|
process.nextTick(function() {
|
||
|
onConnect(res, socket, head);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function onConnect(res, socket, head) {
|
||
|
connectReq.removeAllListeners();
|
||
|
socket.removeAllListeners();
|
||
|
|
||
|
if (res.statusCode !== 200) {
|
||
|
debug('tunneling socket could not be established, statusCode=%d',
|
||
|
res.statusCode);
|
||
|
socket.destroy();
|
||
|
var error = new Error('tunneling socket could not be established, ' +
|
||
|
'statusCode=' + res.statusCode);
|
||
|
error.code = 'ECONNRESET';
|
||
|
options.request.emit('error', error);
|
||
|
self.removeSocket(placeholder);
|
||
|
return;
|
||
|
}
|
||
|
if (head.length > 0) {
|
||
|
debug('got illegal response body from proxy');
|
||
|
socket.destroy();
|
||
|
var error = new Error('got illegal response body from proxy');
|
||
|
error.code = 'ECONNRESET';
|
||
|
options.request.emit('error', error);
|
||
|
self.removeSocket(placeholder);
|
||
|
return;
|
||
|
}
|
||
|
debug('tunneling connection has established');
|
||
|
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
||
|
return cb(socket);
|
||
|
}
|
||
|
|
||
|
function onError(cause) {
|
||
|
connectReq.removeAllListeners();
|
||
|
|
||
|
debug('tunneling socket could not be established, cause=%s\n',
|
||
|
cause.message, cause.stack);
|
||
|
var error = new Error('tunneling socket could not be established, ' +
|
||
|
'cause=' + cause.message);
|
||
|
error.code = 'ECONNRESET';
|
||
|
options.request.emit('error', error);
|
||
|
self.removeSocket(placeholder);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
||
|
var pos = this.sockets.indexOf(socket)
|
||
|
if (pos === -1) {
|
||
|
return;
|
||
|
}
|
||
|
this.sockets.splice(pos, 1);
|
||
|
|
||
|
var pending = this.requests.shift();
|
||
|
if (pending) {
|
||
|
// If we have pending requests and a socket gets closed a new one
|
||
|
// needs to be created to take over in the pool for the one that closed.
|
||
|
this.createSocket(pending, function(socket) {
|
||
|
pending.request.onSocket(socket);
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function createSecureSocket(options, cb) {
|
||
|
var self = this;
|
||
|
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
||
|
var hostHeader = options.request.getHeader('host');
|
||
|
var tlsOptions = mergeOptions({}, self.options, {
|
||
|
socket: socket,
|
||
|
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
||
|
});
|
||
|
|
||
|
// 0 is dummy port for v0.6
|
||
|
var secureSocket = tls.connect(0, tlsOptions);
|
||
|
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
||
|
cb(secureSocket);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
|
||
|
function toOptions(host, port, localAddress) {
|
||
|
if (typeof host === 'string') { // since v0.10
|
||
|
return {
|
||
|
host: host,
|
||
|
port: port,
|
||
|
localAddress: localAddress
|
||
|
};
|
||
|
}
|
||
|
return host; // for v0.11 or later
|
||
|
}
|
||
|
|
||
|
function mergeOptions(target) {
|
||
|
for (var i = 1, len = arguments.length; i < len; ++i) {
|
||
|
var overrides = arguments[i];
|
||
|
if (typeof overrides === 'object') {
|
||
|
var keys = Object.keys(overrides);
|
||
|
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
||
|
var k = keys[j];
|
||
|
if (overrides[k] !== undefined) {
|
||
|
target[k] = overrides[k];
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return target;
|
||
|
}
|
||
|
|
||
|
|
||
|
var debug;
|
||
|
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
||
|
debug = function() {
|
||
|
var args = Array.prototype.slice.call(arguments);
|
||
|
if (typeof args[0] === 'string') {
|
||
|
args[0] = 'TUNNEL: ' + args[0];
|
||
|
} else {
|
||
|
args.unshift('TUNNEL:');
|
||
|
}
|
||
|
console.error.apply(console, args);
|
||
|
}
|
||
|
} else {
|
||
|
debug = function() {};
|
||
|
}
|
||
|
exports.debug = debug; // for test
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 211:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("https");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 214:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const artifact_client_1 = __webpack_require__(359);
|
||
|
/**
|
||
|
* Constructs an ArtifactClient
|
||
|
*/
|
||
|
function create() {
|
||
|
return artifact_client_1.DefaultArtifactClient.create();
|
||
|
}
|
||
|
exports.create = create;
|
||
|
//# sourceMappingURL=artifact-client.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 221:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const core_1 = __webpack_require__(470);
|
||
|
/**
|
||
|
* Upload Status Reporter that displays information about the progress/status of an artifact that is being uploaded
|
||
|
*
|
||
|
* Every 10 seconds, the total status of the upload gets displayed. If there is a large file that is being uploaded,
|
||
|
* extra information about the individual status of an upload can also be displayed
|
||
|
*/
|
||
|
class UploadStatusReporter {
|
||
|
constructor() {
|
||
|
this.totalNumberOfFilesToUpload = 0;
|
||
|
this.processedCount = 0;
|
||
|
this.largeUploads = new Map();
|
||
|
this.totalUploadStatus = undefined;
|
||
|
this.largeFileUploadStatus = undefined;
|
||
|
}
|
||
|
setTotalNumberOfFilesToUpload(fileTotal) {
|
||
|
this.totalNumberOfFilesToUpload = fileTotal;
|
||
|
}
|
||
|
start() {
|
||
|
const _this = this;
|
||
|
// displays information about the total upload status every 10 seconds
|
||
|
this.totalUploadStatus = setInterval(function () {
|
||
|
// display 1 decimal place without any rounding
|
||
|
const percentage = _this.formatPercentage(_this.processedCount, _this.totalNumberOfFilesToUpload);
|
||
|
core_1.info(`Total file(s): ${_this.totalNumberOfFilesToUpload} ---- Processed file #${_this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`);
|
||
|
}, 10000);
|
||
|
// displays extra information about any large files that take a significant amount of time to upload every 1 second
|
||
|
this.largeFileUploadStatus = setInterval(function () {
|
||
|
for (const value of Array.from(_this.largeUploads.values())) {
|
||
|
core_1.info(value);
|
||
|
}
|
||
|
// delete all entires in the map after displaying the information so it will not be displayed again unless explicitly added
|
||
|
_this.largeUploads = new Map();
|
||
|
}, 1000);
|
||
|
}
|
||
|
updateLargeFileStatus(fileName, numerator, denomiator) {
|
||
|
// display 1 decimal place without any rounding
|
||
|
const percentage = this.formatPercentage(numerator, denomiator);
|
||
|
const displayInformation = `Uploading ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`;
|
||
|
// any previously added display information should be overwritten for the specific large file because a map is being used
|
||
|
this.largeUploads.set(fileName, displayInformation);
|
||
|
}
|
||
|
stop() {
|
||
|
if (this.totalUploadStatus) {
|
||
|
clearInterval(this.totalUploadStatus);
|
||
|
}
|
||
|
if (this.largeFileUploadStatus) {
|
||
|
clearInterval(this.largeFileUploadStatus);
|
||
|
}
|
||
|
}
|
||
|
incrementProcessedCount() {
|
||
|
this.processedCount++;
|
||
|
}
|
||
|
formatPercentage(numerator, denominator) {
|
||
|
// toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed
|
||
|
return ((numerator / denominator) * 100).toFixed(4).toString();
|
||
|
}
|
||
|
}
|
||
|
exports.UploadStatusReporter = UploadStatusReporter;
|
||
|
//# sourceMappingURL=upload-status-reporter.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 226:
|
||
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
class BasicCredentialHandler {
|
||
|
constructor(username, password) {
|
||
|
this.username = username;
|
||
|
this.password = password;
|
||
|
}
|
||
|
prepareRequest(options) {
|
||
|
options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64');
|
||
|
}
|
||
|
// This handler cannot handle 401
|
||
|
canHandleAuthentication(response) {
|
||
|
return false;
|
||
|
}
|
||
|
handleAuthentication(httpClient, requestInfo, objs) {
|
||
|
return null;
|
||
|
}
|
||
|
}
|
||
|
exports.BasicCredentialHandler = BasicCredentialHandler;
|
||
|
class BearerCredentialHandler {
|
||
|
constructor(token) {
|
||
|
this.token = token;
|
||
|
}
|
||
|
// currently implements pre-authorization
|
||
|
// TODO: support preAuth = false where it hooks on 401
|
||
|
prepareRequest(options) {
|
||
|
options.headers['Authorization'] = 'Bearer ' + this.token;
|
||
|
}
|
||
|
// This handler cannot handle 401
|
||
|
canHandleAuthentication(response) {
|
||
|
return false;
|
||
|
}
|
||
|
handleAuthentication(httpClient, requestInfo, objs) {
|
||
|
return null;
|
||
|
}
|
||
|
}
|
||
|
exports.BearerCredentialHandler = BearerCredentialHandler;
|
||
|
class PersonalAccessTokenCredentialHandler {
|
||
|
constructor(token) {
|
||
|
this.token = token;
|
||
|
}
|
||
|
// currently implements pre-authorization
|
||
|
// TODO: support preAuth = false where it hooks on 401
|
||
|
prepareRequest(options) {
|
||
|
options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||
|
}
|
||
|
// This handler cannot handle 401
|
||
|
canHandleAuthentication(response) {
|
||
|
return false;
|
||
|
}
|
||
|
handleAuthentication(httpClient, requestInfo, objs) {
|
||
|
return null;
|
||
|
}
|
||
|
}
|
||
|
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 245:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
module.exports = globSync
|
||
|
globSync.GlobSync = GlobSync
|
||
|
|
||
|
var fs = __webpack_require__(747)
|
||
|
var rp = __webpack_require__(302)
|
||
|
var minimatch = __webpack_require__(93)
|
||
|
var Minimatch = minimatch.Minimatch
|
||
|
var Glob = __webpack_require__(120).Glob
|
||
|
var util = __webpack_require__(669)
|
||
|
var path = __webpack_require__(622)
|
||
|
var assert = __webpack_require__(357)
|
||
|
var isAbsolute = __webpack_require__(681)
|
||
|
var common = __webpack_require__(856)
|
||
|
var alphasort = common.alphasort
|
||
|
var alphasorti = common.alphasorti
|
||
|
var setopts = common.setopts
|
||
|
var ownProp = common.ownProp
|
||
|
var childrenIgnored = common.childrenIgnored
|
||
|
var isIgnored = common.isIgnored
|
||
|
|
||
|
function globSync (pattern, options) {
|
||
|
if (typeof options === 'function' || arguments.length === 3)
|
||
|
throw new TypeError('callback provided to sync glob\n'+
|
||
|
'See: https://github.com/isaacs/node-glob/issues/167')
|
||
|
|
||
|
return new GlobSync(pattern, options).found
|
||
|
}
|
||
|
|
||
|
function GlobSync (pattern, options) {
|
||
|
if (!pattern)
|
||
|
throw new Error('must provide pattern')
|
||
|
|
||
|
if (typeof options === 'function' || arguments.length === 3)
|
||
|
throw new TypeError('callback provided to sync glob\n'+
|
||
|
'See: https://github.com/isaacs/node-glob/issues/167')
|
||
|
|
||
|
if (!(this instanceof GlobSync))
|
||
|
return new GlobSync(pattern, options)
|
||
|
|
||
|
setopts(this, pattern, options)
|
||
|
|
||
|
if (this.noprocess)
|
||
|
return this
|
||
|
|
||
|
var n = this.minimatch.set.length
|
||
|
this.matches = new Array(n)
|
||
|
for (var i = 0; i < n; i ++) {
|
||
|
this._process(this.minimatch.set[i], i, false)
|
||
|
}
|
||
|
this._finish()
|
||
|
}
|
||
|
|
||
|
GlobSync.prototype._finish = function () {
|
||
|
assert(this instanceof GlobSync)
|
||
|
if (this.realpath) {
|
||
|
var self = this
|
||
|
this.matches.forEach(function (matchset, index) {
|
||
|
var set = self.matches[index] = Object.create(null)
|
||
|
for (var p in matchset) {
|
||
|
try {
|
||
|
p = self._makeAbs(p)
|
||
|
var real = rp.realpathSync(p, self.realpathCache)
|
||
|
set[real] = true
|
||
|
} catch (er) {
|
||
|
if (er.syscall === 'stat')
|
||
|
set[self._makeAbs(p)] = true
|
||
|
else
|
||
|
throw er
|
||
|
}
|
||
|
}
|
||
|
})
|
||
|
}
|
||
|
common.finish(this)
|
||
|
}
|
||
|
|
||
|
|
||
|
GlobSync.prototype._process = function (pattern, index, inGlobStar) {
|
||
|
assert(this instanceof GlobSync)
|
||
|
|
||
|
// Get the first [n] parts of pattern that are all strings.
|
||
|
var n = 0
|
||
|
while (typeof pattern[n] === 'string') {
|
||
|
n ++
|
||
|
}
|
||
|
// now n is the index of the first one that is *not* a string.
|
||
|
|
||
|
// See if there's anything else
|
||
|
var prefix
|
||
|
switch (n) {
|
||
|
// if not, then this is rather simple
|
||
|
case pattern.length:
|
||
|
this._processSimple(pattern.join('/'), index)
|
||
|
return
|
||
|
|
||
|
case 0:
|
||
|
// pattern *starts* with some non-trivial item.
|
||
|
// going to readdir(cwd), but not include the prefix in matches.
|
||
|
prefix = null
|
||
|
break
|
||
|
|
||
|
default:
|
||
|
// pattern has some string bits in the front.
|
||
|
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||
|
// or 'relative' like '../baz'
|
||
|
prefix = pattern.slice(0, n).join('/')
|
||
|
break
|
||
|
}
|
||
|
|
||
|
var remain = pattern.slice(n)
|
||
|
|
||
|
// get the list of entries.
|
||
|
var read
|
||
|
if (prefix === null)
|
||
|
read = '.'
|
||
|
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
|
||
|
if (!prefix || !isAbsolute(prefix))
|
||
|
prefix = '/' + prefix
|
||
|
read = prefix
|
||
|
} else
|
||
|
read = prefix
|
||
|
|
||
|
var abs = this._makeAbs(read)
|
||
|
|
||
|
//if ignored, skip processing
|
||
|
if (childrenIgnored(this, read))
|
||
|
return
|
||
|
|
||
|
var isGlobStar = remain[0] === minimatch.GLOBSTAR
|
||
|
if (isGlobStar)
|
||
|
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
|
||
|
else
|
||
|
this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
|
||
|
}
|
||
|
|
||
|
|
||
|
GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
|
||
|
var entries = this._readdir(abs, inGlobStar)
|
||
|
|
||
|
// if the abs isn't a dir, then nothing can match!
|
||
|
if (!entries)
|
||
|
return
|
||
|
|
||
|
// It will only match dot entries if it starts with a dot, or if
|
||
|
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||
|
var pn = remain[0]
|
||
|
var negate = !!this.minimatch.negate
|
||
|
var rawGlob = pn._glob
|
||
|
var dotOk = this.dot || rawGlob.charAt(0) === '.'
|
||
|
|
||
|
var matchedEntries = []
|
||
|
for (var i = 0; i < entries.length; i++) {
|
||
|
var e = entries[i]
|
||
|
if (e.charAt(0) !== '.' || dotOk) {
|
||
|
var m
|
||
|
if (negate && !prefix) {
|
||
|
m = !e.match(pn)
|
||
|
} else {
|
||
|
m = e.match(pn)
|
||
|
}
|
||
|
if (m)
|
||
|
matchedEntries.push(e)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var len = matchedEntries.length
|
||
|
// If there are no matched entries, then nothing matches.
|
||
|
if (len === 0)
|
||
|
return
|
||
|
|
||
|
// if this is the last remaining pattern bit, then no need for
|
||
|
// an additional stat *unless* the user has specified mark or
|
||
|
// stat explicitly. We know they exist, since readdir returned
|
||
|
// them.
|
||
|
|
||
|
if (remain.length === 1 && !this.mark && !this.stat) {
|
||
|
if (!this.matches[index])
|
||
|
this.matches[index] = Object.create(null)
|
||
|
|
||
|
for (var i = 0; i < len; i ++) {
|
||
|
var e = matchedEntries[i]
|
||
|
if (prefix) {
|
||
|
if (prefix.slice(-1) !== '/')
|
||
|
e = prefix + '/' + e
|
||
|
else
|
||
|
e = prefix + e
|
||
|
}
|
||
|
|
||
|
if (e.charAt(0) === '/' && !this.nomount) {
|
||
|
e = path.join(this.root, e)
|
||
|
}
|
||
|
this._emitMatch(index, e)
|
||
|
}
|
||
|
// This was the last one, and no stats were needed
|
||
|
return
|
||
|
}
|
||
|
|
||
|
// now test all matched entries as stand-ins for that part
|
||
|
// of the pattern.
|
||
|
remain.shift()
|
||
|
for (var i = 0; i < len; i ++) {
|
||
|
var e = matchedEntries[i]
|
||
|
var newPattern
|
||
|
if (prefix)
|
||
|
newPattern = [prefix, e]
|
||
|
else
|
||
|
newPattern = [e]
|
||
|
this._process(newPattern.concat(remain), index, inGlobStar)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
|
||
|
GlobSync.prototype._emitMatch = function (index, e) {
|
||
|
if (isIgnored(this, e))
|
||
|
return
|
||
|
|
||
|
var abs = this._makeAbs(e)
|
||
|
|
||
|
if (this.mark)
|
||
|
e = this._mark(e)
|
||
|
|
||
|
if (this.absolute) {
|
||
|
e = abs
|
||
|
}
|
||
|
|
||
|
if (this.matches[index][e])
|
||
|
return
|
||
|
|
||
|
if (this.nodir) {
|
||
|
var c = this.cache[abs]
|
||
|
if (c === 'DIR' || Array.isArray(c))
|
||
|
return
|
||
|
}
|
||
|
|
||
|
this.matches[index][e] = true
|
||
|
|
||
|
if (this.stat)
|
||
|
this._stat(e)
|
||
|
}
|
||
|
|
||
|
|
||
|
GlobSync.prototype._readdirInGlobStar = function (abs) {
|
||
|
// follow all symlinked directories forever
|
||
|
// just proceed as if this is a non-globstar situation
|
||
|
if (this.follow)
|
||
|
return this._readdir(abs, false)
|
||
|
|
||
|
var entries
|
||
|
var lstat
|
||
|
var stat
|
||
|
try {
|
||
|
lstat = fs.lstatSync(abs)
|
||
|
} catch (er) {
|
||
|
if (er.code === 'ENOENT') {
|
||
|
// lstat failed, doesn't exist
|
||
|
return null
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var isSym = lstat && lstat.isSymbolicLink()
|
||
|
this.symlinks[abs] = isSym
|
||
|
|
||
|
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||
|
// don't bother doing a readdir in that case.
|
||
|
if (!isSym && lstat && !lstat.isDirectory())
|
||
|
this.cache[abs] = 'FILE'
|
||
|
else
|
||
|
entries = this._readdir(abs, false)
|
||
|
|
||
|
return entries
|
||
|
}
|
||
|
|
||
|
GlobSync.prototype._readdir = function (abs, inGlobStar) {
|
||
|
var entries
|
||
|
|
||
|
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||
|
return this._readdirInGlobStar(abs)
|
||
|
|
||
|
if (ownProp(this.cache, abs)) {
|
||
|
var c = this.cache[abs]
|
||
|
if (!c || c === 'FILE')
|
||
|
return null
|
||
|
|
||
|
if (Array.isArray(c))
|
||
|
return c
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
return this._readdirEntries(abs, fs.readdirSync(abs))
|
||
|
} catch (er) {
|
||
|
this._readdirError(abs, er)
|
||
|
return null
|
||
|
}
|
||
|
}
|
||
|
|
||
|
GlobSync.prototype._readdirEntries = function (abs, entries) {
|
||
|
// if we haven't asked to stat everything, then just
|
||
|
// assume that everything in there exists, so we can avoid
|
||
|
// having to stat it a second time.
|
||
|
if (!this.mark && !this.stat) {
|
||
|
for (var i = 0; i < entries.length; i ++) {
|
||
|
var e = entries[i]
|
||
|
if (abs === '/')
|
||
|
e = abs + e
|
||
|
else
|
||
|
e = abs + '/' + e
|
||
|
this.cache[e] = true
|
||
|
}
|
||
|
}
|
||
|
|
||
|
this.cache[abs] = entries
|
||
|
|
||
|
// mark and cache dir-ness
|
||
|
return entries
|
||
|
}
|
||
|
|
||
|
GlobSync.prototype._readdirError = function (f, er) {
|
||
|
// handle errors, and cache the information
|
||
|
switch (er.code) {
|
||
|
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||
|
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||
|
var abs = this._makeAbs(f)
|
||
|
this.cache[abs] = 'FILE'
|
||
|
if (abs === this.cwdAbs) {
|
||
|
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
|
||
|
error.path = this.cwd
|
||
|
error.code = er.code
|
||
|
throw error
|
||
|
}
|
||
|
break
|
||
|
|
||
|
case 'ENOENT': // not terribly unusual
|
||
|
case 'ELOOP':
|
||
|
case 'ENAMETOOLONG':
|
||
|
case 'UNKNOWN':
|
||
|
this.cache[this._makeAbs(f)] = false
|
||
|
break
|
||
|
|
||
|
default: // some unusual error. Treat as failure.
|
||
|
this.cache[this._makeAbs(f)] = false
|
||
|
if (this.strict)
|
||
|
throw er
|
||
|
if (!this.silent)
|
||
|
console.error('glob error', er)
|
||
|
break
|
||
|
}
|
||
|
}
|
||
|
|
||
|
GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
|
||
|
|
||
|
var entries = this._readdir(abs, inGlobStar)
|
||
|
|
||
|
// no entries means not a dir, so it can never have matches
|
||
|
// foo.txt/** doesn't match foo.txt
|
||
|
if (!entries)
|
||
|
return
|
||
|
|
||
|
// test without the globstar, and with every child both below
|
||
|
// and replacing the globstar.
|
||
|
var remainWithoutGlobStar = remain.slice(1)
|
||
|
var gspref = prefix ? [ prefix ] : []
|
||
|
var noGlobStar = gspref.concat(remainWithoutGlobStar)
|
||
|
|
||
|
// the noGlobStar pattern exits the inGlobStar state
|
||
|
this._process(noGlobStar, index, false)
|
||
|
|
||
|
var len = entries.length
|
||
|
var isSym = this.symlinks[abs]
|
||
|
|
||
|
// If it's a symlink, and we're in a globstar, then stop
|
||
|
if (isSym && inGlobStar)
|
||
|
return
|
||
|
|
||
|
for (var i = 0; i < len; i++) {
|
||
|
var e = entries[i]
|
||
|
if (e.charAt(0) === '.' && !this.dot)
|
||
|
continue
|
||
|
|
||
|
// these two cases enter the inGlobStar state
|
||
|
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
|
||
|
this._process(instead, index, true)
|
||
|
|
||
|
var below = gspref.concat(entries[i], remain)
|
||
|
this._process(below, index, true)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
GlobSync.prototype._processSimple = function (prefix, index) {
|
||
|
// XXX review this. Shouldn't it be doing the mounting etc
|
||
|
// before doing stat? kinda weird?
|
||
|
var exists = this._stat(prefix)
|
||
|
|
||
|
if (!this.matches[index])
|
||
|
this.matches[index] = Object.create(null)
|
||
|
|
||
|
// If it doesn't exist, then just mark the lack of results
|
||
|
if (!exists)
|
||
|
return
|
||
|
|
||
|
if (prefix && isAbsolute(prefix) && !this.nomount) {
|
||
|
var trail = /[\/\\]$/.test(prefix)
|
||
|
if (prefix.charAt(0) === '/') {
|
||
|
prefix = path.join(this.root, prefix)
|
||
|
} else {
|
||
|
prefix = path.resolve(this.root, prefix)
|
||
|
if (trail)
|
||
|
prefix += '/'
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (process.platform === 'win32')
|
||
|
prefix = prefix.replace(/\\/g, '/')
|
||
|
|
||
|
// Mark this as a match
|
||
|
this._emitMatch(index, prefix)
|
||
|
}
|
||
|
|
||
|
// Returns either 'DIR', 'FILE', or false
|
||
|
GlobSync.prototype._stat = function (f) {
|
||
|
var abs = this._makeAbs(f)
|
||
|
var needDir = f.slice(-1) === '/'
|
||
|
|
||
|
if (f.length > this.maxLength)
|
||
|
return false
|
||
|
|
||
|
if (!this.stat && ownProp(this.cache, abs)) {
|
||
|
var c = this.cache[abs]
|
||
|
|
||
|
if (Array.isArray(c))
|
||
|
c = 'DIR'
|
||
|
|
||
|
// It exists, but maybe not how we need it
|
||
|
if (!needDir || c === 'DIR')
|
||
|
return c
|
||
|
|
||
|
if (needDir && c === 'FILE')
|
||
|
return false
|
||
|
|
||
|
// otherwise we have to stat, because maybe c=true
|
||
|
// if we know it exists, but not what it is.
|
||
|
}
|
||
|
|
||
|
var exists
|
||
|
var stat = this.statCache[abs]
|
||
|
if (!stat) {
|
||
|
var lstat
|
||
|
try {
|
||
|
lstat = fs.lstatSync(abs)
|
||
|
} catch (er) {
|
||
|
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
|
||
|
this.statCache[abs] = false
|
||
|
return false
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (lstat && lstat.isSymbolicLink()) {
|
||
|
try {
|
||
|
stat = fs.statSync(abs)
|
||
|
} catch (er) {
|
||
|
stat = lstat
|
||
|
}
|
||
|
} else {
|
||
|
stat = lstat
|
||
|
}
|
||
|
}
|
||
|
|
||
|
this.statCache[abs] = stat
|
||
|
|
||
|
var c = true
|
||
|
if (stat)
|
||
|
c = stat.isDirectory() ? 'DIR' : 'FILE'
|
||
|
|
||
|
this.cache[abs] = this.cache[abs] || c
|
||
|
|
||
|
if (needDir && c === 'FILE')
|
||
|
return false
|
||
|
|
||
|
return c
|
||
|
}
|
||
|
|
||
|
GlobSync.prototype._mark = function (p) {
|
||
|
return common.mark(this, p)
|
||
|
}
|
||
|
|
||
|
GlobSync.prototype._makeAbs = function (f) {
|
||
|
return common.makeAbs(this, f)
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 281:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const internal_globber_1 = __webpack_require__(297);
|
||
|
/**
|
||
|
* Constructs a globber
|
||
|
*
|
||
|
* @param patterns Patterns separated by newlines
|
||
|
* @param options Glob options
|
||
|
*/
|
||
|
function create(patterns, options) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return yield internal_globber_1.DefaultGlobber.create(patterns, options);
|
||
|
});
|
||
|
}
|
||
|
exports.create = create;
|
||
|
//# sourceMappingURL=glob.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 297:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||
|
var m = o[Symbol.asyncIterator], i;
|
||
|
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||
|
};
|
||
|
var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
|
||
|
var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
|
||
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||
|
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
||
|
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
|
||
|
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
|
||
|
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
||
|
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
||
|
function fulfill(value) { resume("next", value); }
|
||
|
function reject(value) { resume("throw", value); }
|
||
|
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const core = __webpack_require__(470);
|
||
|
const fs = __webpack_require__(747);
|
||
|
const globOptionsHelper = __webpack_require__(601);
|
||
|
const path = __webpack_require__(622);
|
||
|
const patternHelper = __webpack_require__(597);
|
||
|
const internal_match_kind_1 = __webpack_require__(327);
|
||
|
const internal_pattern_1 = __webpack_require__(923);
|
||
|
const internal_search_state_1 = __webpack_require__(728);
|
||
|
const IS_WINDOWS = process.platform === 'win32';
|
||
|
class DefaultGlobber {
|
||
|
constructor(options) {
|
||
|
this.patterns = [];
|
||
|
this.searchPaths = [];
|
||
|
this.options = globOptionsHelper.getOptions(options);
|
||
|
}
|
||
|
getSearchPaths() {
|
||
|
// Return a copy
|
||
|
return this.searchPaths.slice();
|
||
|
}
|
||
|
glob() {
|
||
|
var e_1, _a;
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const result = [];
|
||
|
try {
|
||
|
for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
|
||
|
const itemPath = _c.value;
|
||
|
result.push(itemPath);
|
||
|
}
|
||
|
}
|
||
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||
|
finally {
|
||
|
try {
|
||
|
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
||
|
}
|
||
|
finally { if (e_1) throw e_1.error; }
|
||
|
}
|
||
|
return result;
|
||
|
});
|
||
|
}
|
||
|
globGenerator() {
|
||
|
return __asyncGenerator(this, arguments, function* globGenerator_1() {
|
||
|
// Fill in defaults options
|
||
|
const options = globOptionsHelper.getOptions(this.options);
|
||
|
// Implicit descendants?
|
||
|
const patterns = [];
|
||
|
for (const pattern of this.patterns) {
|
||
|
patterns.push(pattern);
|
||
|
if (options.implicitDescendants &&
|
||
|
(pattern.trailingSeparator ||
|
||
|
pattern.segments[pattern.segments.length - 1] !== '**')) {
|
||
|
patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
|
||
|
}
|
||
|
}
|
||
|
// Push the search paths
|
||
|
const stack = [];
|
||
|
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
|
||
|
core.debug(`Search path '${searchPath}'`);
|
||
|
// Exists?
|
||
|
try {
|
||
|
// Intentionally using lstat. Detection for broken symlink
|
||
|
// will be performed later (if following symlinks).
|
||
|
yield __await(fs.promises.lstat(searchPath));
|
||
|
}
|
||
|
catch (err) {
|
||
|
if (err.code === 'ENOENT') {
|
||
|
continue;
|
||
|
}
|
||
|
throw err;
|
||
|
}
|
||
|
stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
|
||
|
}
|
||
|
// Search
|
||
|
const traversalChain = []; // used to detect cycles
|
||
|
while (stack.length) {
|
||
|
// Pop
|
||
|
const item = stack.pop();
|
||
|
// Match?
|
||
|
const match = patternHelper.match(patterns, item.path);
|
||
|
const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
|
||
|
if (!match && !partialMatch) {
|
||
|
continue;
|
||
|
}
|
||
|
// Stat
|
||
|
const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
|
||
|
// Broken symlink, or symlink cycle detected, or no longer exists
|
||
|
);
|
||
|
// Broken symlink, or symlink cycle detected, or no longer exists
|
||
|
if (!stats) {
|
||
|
continue;
|
||
|
}
|
||
|
// Directory
|
||
|
if (stats.isDirectory()) {
|
||
|
// Matched
|
||
|
if (match & internal_match_kind_1.MatchKind.Directory) {
|
||
|
yield yield __await(item.path);
|
||
|
}
|
||
|
// Descend?
|
||
|
else if (!partialMatch) {
|
||
|
continue;
|
||
|
}
|
||
|
// Push the child items in reverse
|
||
|
const childLevel = item.level + 1;
|
||
|
const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
|
||
|
stack.push(...childItems.reverse());
|
||
|
}
|
||
|
// File
|
||
|
else if (match & internal_match_kind_1.MatchKind.File) {
|
||
|
yield yield __await(item.path);
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Constructs a DefaultGlobber
|
||
|
*/
|
||
|
static create(patterns, options) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const result = new DefaultGlobber(options);
|
||
|
if (IS_WINDOWS) {
|
||
|
patterns = patterns.replace(/\r\n/g, '\n');
|
||
|
patterns = patterns.replace(/\r/g, '\n');
|
||
|
}
|
||
|
const lines = patterns.split('\n').map(x => x.trim());
|
||
|
for (const line of lines) {
|
||
|
// Empty or comment
|
||
|
if (!line || line.startsWith('#')) {
|
||
|
continue;
|
||
|
}
|
||
|
// Pattern
|
||
|
else {
|
||
|
result.patterns.push(new internal_pattern_1.Pattern(line));
|
||
|
}
|
||
|
}
|
||
|
result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
|
||
|
return result;
|
||
|
});
|
||
|
}
|
||
|
static stat(item, options, traversalChain) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
// Note:
|
||
|
// `stat` returns info about the target of a symlink (or symlink chain)
|
||
|
// `lstat` returns info about a symlink itself
|
||
|
let stats;
|
||
|
if (options.followSymbolicLinks) {
|
||
|
try {
|
||
|
// Use `stat` (following symlinks)
|
||
|
stats = yield fs.promises.stat(item.path);
|
||
|
}
|
||
|
catch (err) {
|
||
|
if (err.code === 'ENOENT') {
|
||
|
if (options.omitBrokenSymbolicLinks) {
|
||
|
core.debug(`Broken symlink '${item.path}'`);
|
||
|
return undefined;
|
||
|
}
|
||
|
throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
|
||
|
}
|
||
|
throw err;
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
// Use `lstat` (not following symlinks)
|
||
|
stats = yield fs.promises.lstat(item.path);
|
||
|
}
|
||
|
// Note, isDirectory() returns false for the lstat of a symlink
|
||
|
if (stats.isDirectory() && options.followSymbolicLinks) {
|
||
|
// Get the realpath
|
||
|
const realPath = yield fs.promises.realpath(item.path);
|
||
|
// Fixup the traversal chain to match the item level
|
||
|
while (traversalChain.length >= item.level) {
|
||
|
traversalChain.pop();
|
||
|
}
|
||
|
// Test for a cycle
|
||
|
if (traversalChain.some((x) => x === realPath)) {
|
||
|
core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
|
||
|
return undefined;
|
||
|
}
|
||
|
// Update the traversal chain
|
||
|
traversalChain.push(realPath);
|
||
|
}
|
||
|
return stats;
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
exports.DefaultGlobber = DefaultGlobber;
|
||
|
//# sourceMappingURL=internal-globber.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 302:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
module.exports = realpath
|
||
|
realpath.realpath = realpath
|
||
|
realpath.sync = realpathSync
|
||
|
realpath.realpathSync = realpathSync
|
||
|
realpath.monkeypatch = monkeypatch
|
||
|
realpath.unmonkeypatch = unmonkeypatch
|
||
|
|
||
|
var fs = __webpack_require__(747)
|
||
|
var origRealpath = fs.realpath
|
||
|
var origRealpathSync = fs.realpathSync
|
||
|
|
||
|
var version = process.version
|
||
|
var ok = /^v[0-5]\./.test(version)
|
||
|
var old = __webpack_require__(117)
|
||
|
|
||
|
function newError (er) {
|
||
|
return er && er.syscall === 'realpath' && (
|
||
|
er.code === 'ELOOP' ||
|
||
|
er.code === 'ENOMEM' ||
|
||
|
er.code === 'ENAMETOOLONG'
|
||
|
)
|
||
|
}
|
||
|
|
||
|
function realpath (p, cache, cb) {
|
||
|
if (ok) {
|
||
|
return origRealpath(p, cache, cb)
|
||
|
}
|
||
|
|
||
|
if (typeof cache === 'function') {
|
||
|
cb = cache
|
||
|
cache = null
|
||
|
}
|
||
|
origRealpath(p, cache, function (er, result) {
|
||
|
if (newError(er)) {
|
||
|
old.realpath(p, cache, cb)
|
||
|
} else {
|
||
|
cb(er, result)
|
||
|
}
|
||
|
})
|
||
|
}
|
||
|
|
||
|
function realpathSync (p, cache) {
|
||
|
if (ok) {
|
||
|
return origRealpathSync(p, cache)
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
return origRealpathSync(p, cache)
|
||
|
} catch (er) {
|
||
|
if (newError(er)) {
|
||
|
return old.realpathSync(p, cache)
|
||
|
} else {
|
||
|
throw er
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function monkeypatch () {
|
||
|
fs.realpath = realpath
|
||
|
fs.realpathSync = realpathSync
|
||
|
}
|
||
|
|
||
|
function unmonkeypatch () {
|
||
|
fs.realpath = origRealpath
|
||
|
fs.realpathSync = origRealpathSync
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 306:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
var concatMap = __webpack_require__(896);
|
||
|
var balanced = __webpack_require__(621);
|
||
|
|
||
|
module.exports = expandTop;
|
||
|
|
||
|
var escSlash = '\0SLASH'+Math.random()+'\0';
|
||
|
var escOpen = '\0OPEN'+Math.random()+'\0';
|
||
|
var escClose = '\0CLOSE'+Math.random()+'\0';
|
||
|
var escComma = '\0COMMA'+Math.random()+'\0';
|
||
|
var escPeriod = '\0PERIOD'+Math.random()+'\0';
|
||
|
|
||
|
function numeric(str) {
|
||
|
return parseInt(str, 10) == str
|
||
|
? parseInt(str, 10)
|
||
|
: str.charCodeAt(0);
|
||
|
}
|
||
|
|
||
|
function escapeBraces(str) {
|
||
|
return str.split('\\\\').join(escSlash)
|
||
|
.split('\\{').join(escOpen)
|
||
|
.split('\\}').join(escClose)
|
||
|
.split('\\,').join(escComma)
|
||
|
.split('\\.').join(escPeriod);
|
||
|
}
|
||
|
|
||
|
function unescapeBraces(str) {
|
||
|
return str.split(escSlash).join('\\')
|
||
|
.split(escOpen).join('{')
|
||
|
.split(escClose).join('}')
|
||
|
.split(escComma).join(',')
|
||
|
.split(escPeriod).join('.');
|
||
|
}
|
||
|
|
||
|
|
||
|
// Basically just str.split(","), but handling cases
|
||
|
// where we have nested braced sections, which should be
|
||
|
// treated as individual members, like {a,{b,c},d}
|
||
|
function parseCommaParts(str) {
|
||
|
if (!str)
|
||
|
return [''];
|
||
|
|
||
|
var parts = [];
|
||
|
var m = balanced('{', '}', str);
|
||
|
|
||
|
if (!m)
|
||
|
return str.split(',');
|
||
|
|
||
|
var pre = m.pre;
|
||
|
var body = m.body;
|
||
|
var post = m.post;
|
||
|
var p = pre.split(',');
|
||
|
|
||
|
p[p.length-1] += '{' + body + '}';
|
||
|
var postParts = parseCommaParts(post);
|
||
|
if (post.length) {
|
||
|
p[p.length-1] += postParts.shift();
|
||
|
p.push.apply(p, postParts);
|
||
|
}
|
||
|
|
||
|
parts.push.apply(parts, p);
|
||
|
|
||
|
return parts;
|
||
|
}
|
||
|
|
||
|
function expandTop(str) {
|
||
|
if (!str)
|
||
|
return [];
|
||
|
|
||
|
// I don't know why Bash 4.3 does this, but it does.
|
||
|
// Anything starting with {} will have the first two bytes preserved
|
||
|
// but *only* at the top level, so {},a}b will not expand to anything,
|
||
|
// but a{},b}c will be expanded to [a}c,abc].
|
||
|
// One could argue that this is a bug in Bash, but since the goal of
|
||
|
// this module is to match Bash's rules, we escape a leading {}
|
||
|
if (str.substr(0, 2) === '{}') {
|
||
|
str = '\\{\\}' + str.substr(2);
|
||
|
}
|
||
|
|
||
|
return expand(escapeBraces(str), true).map(unescapeBraces);
|
||
|
}
|
||
|
|
||
|
function identity(e) {
|
||
|
return e;
|
||
|
}
|
||
|
|
||
|
function embrace(str) {
|
||
|
return '{' + str + '}';
|
||
|
}
|
||
|
function isPadded(el) {
|
||
|
return /^-?0\d/.test(el);
|
||
|
}
|
||
|
|
||
|
function lte(i, y) {
|
||
|
return i <= y;
|
||
|
}
|
||
|
function gte(i, y) {
|
||
|
return i >= y;
|
||
|
}
|
||
|
|
||
|
function expand(str, isTop) {
|
||
|
var expansions = [];
|
||
|
|
||
|
var m = balanced('{', '}', str);
|
||
|
if (!m || /\$$/.test(m.pre)) return [str];
|
||
|
|
||
|
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
||
|
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
||
|
var isSequence = isNumericSequence || isAlphaSequence;
|
||
|
var isOptions = m.body.indexOf(',') >= 0;
|
||
|
if (!isSequence && !isOptions) {
|
||
|
// {a},b}
|
||
|
if (m.post.match(/,.*\}/)) {
|
||
|
str = m.pre + '{' + m.body + escClose + m.post;
|
||
|
return expand(str);
|
||
|
}
|
||
|
return [str];
|
||
|
}
|
||
|
|
||
|
var n;
|
||
|
if (isSequence) {
|
||
|
n = m.body.split(/\.\./);
|
||
|
} else {
|
||
|
n = parseCommaParts(m.body);
|
||
|
if (n.length === 1) {
|
||
|
// x{{a,b}}y ==> x{a}y x{b}y
|
||
|
n = expand(n[0], false).map(embrace);
|
||
|
if (n.length === 1) {
|
||
|
var post = m.post.length
|
||
|
? expand(m.post, false)
|
||
|
: [''];
|
||
|
return post.map(function(p) {
|
||
|
return m.pre + n[0] + p;
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// at this point, n is the parts, and we know it's not a comma set
|
||
|
// with a single entry.
|
||
|
|
||
|
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
||
|
var pre = m.pre;
|
||
|
var post = m.post.length
|
||
|
? expand(m.post, false)
|
||
|
: [''];
|
||
|
|
||
|
var N;
|
||
|
|
||
|
if (isSequence) {
|
||
|
var x = numeric(n[0]);
|
||
|
var y = numeric(n[1]);
|
||
|
var width = Math.max(n[0].length, n[1].length)
|
||
|
var incr = n.length == 3
|
||
|
? Math.abs(numeric(n[2]))
|
||
|
: 1;
|
||
|
var test = lte;
|
||
|
var reverse = y < x;
|
||
|
if (reverse) {
|
||
|
incr *= -1;
|
||
|
test = gte;
|
||
|
}
|
||
|
var pad = n.some(isPadded);
|
||
|
|
||
|
N = [];
|
||
|
|
||
|
for (var i = x; test(i, y); i += incr) {
|
||
|
var c;
|
||
|
if (isAlphaSequence) {
|
||
|
c = String.fromCharCode(i);
|
||
|
if (c === '\\')
|
||
|
c = '';
|
||
|
} else {
|
||
|
c = String(i);
|
||
|
if (pad) {
|
||
|
var need = width - c.length;
|
||
|
if (need > 0) {
|
||
|
var z = new Array(need + 1).join('0');
|
||
|
if (i < 0)
|
||
|
c = '-' + z + c.slice(1);
|
||
|
else
|
||
|
c = z + c;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
N.push(c);
|
||
|
}
|
||
|
} else {
|
||
|
N = concatMap(n, function(el) { return expand(el, false) });
|
||
|
}
|
||
|
|
||
|
for (var j = 0; j < N.length; j++) {
|
||
|
for (var k = 0; k < post.length; k++) {
|
||
|
var expansion = pre + N[j] + post[k];
|
||
|
if (!isTop || isSequence || expansion)
|
||
|
expansions.push(expansion);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return expansions;
|
||
|
}
|
||
|
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 315:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
if (typeof Object.create === 'function') {
|
||
|
// implementation from standard node.js 'util' module
|
||
|
module.exports = function inherits(ctor, superCtor) {
|
||
|
if (superCtor) {
|
||
|
ctor.super_ = superCtor
|
||
|
ctor.prototype = Object.create(superCtor.prototype, {
|
||
|
constructor: {
|
||
|
value: ctor,
|
||
|
enumerable: false,
|
||
|
writable: true,
|
||
|
configurable: true
|
||
|
}
|
||
|
})
|
||
|
}
|
||
|
};
|
||
|
} else {
|
||
|
// old school shim for old browsers
|
||
|
module.exports = function inherits(ctor, superCtor) {
|
||
|
if (superCtor) {
|
||
|
ctor.super_ = superCtor
|
||
|
var TempCtor = function () {}
|
||
|
TempCtor.prototype = superCtor.prototype
|
||
|
ctor.prototype = new TempCtor()
|
||
|
ctor.prototype.constructor = ctor
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 327:
|
||
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
/**
|
||
|
* Indicates whether a pattern matches a path
|
||
|
*/
|
||
|
var MatchKind;
|
||
|
(function (MatchKind) {
|
||
|
/** Not matched */
|
||
|
MatchKind[MatchKind["None"] = 0] = "None";
|
||
|
/** Matched if the path is a directory */
|
||
|
MatchKind[MatchKind["Directory"] = 1] = "Directory";
|
||
|
/** Matched if the path is a regular file */
|
||
|
MatchKind[MatchKind["File"] = 2] = "File";
|
||
|
/** Matched */
|
||
|
MatchKind[MatchKind["All"] = 3] = "All";
|
||
|
})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
|
||
|
//# sourceMappingURL=internal-match-kind.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 357:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("assert");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 359:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
result["default"] = mod;
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const core = __importStar(__webpack_require__(470));
|
||
|
const upload_specification_1 = __webpack_require__(590);
|
||
|
const upload_http_client_1 = __webpack_require__(608);
|
||
|
const utils_1 = __webpack_require__(870);
|
||
|
const download_http_client_1 = __webpack_require__(855);
|
||
|
const download_specification_1 = __webpack_require__(532);
|
||
|
const config_variables_1 = __webpack_require__(401);
|
||
|
const path_1 = __webpack_require__(622);
|
||
|
class DefaultArtifactClient {
|
||
|
/**
|
||
|
* Constructs a DefaultArtifactClient
|
||
|
*/
|
||
|
static create() {
|
||
|
return new DefaultArtifactClient();
|
||
|
}
|
||
|
/**
|
||
|
* Uploads an artifact
|
||
|
*/
|
||
|
uploadArtifact(name, files, rootDirectory, options) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
utils_1.checkArtifactName(name);
|
||
|
// Get specification for the files being uploaded
|
||
|
const uploadSpecification = upload_specification_1.getUploadSpecification(name, rootDirectory, files);
|
||
|
const uploadResponse = {
|
||
|
artifactName: name,
|
||
|
artifactItems: [],
|
||
|
size: 0,
|
||
|
failedItems: []
|
||
|
};
|
||
|
const uploadHttpClient = new upload_http_client_1.UploadHttpClient();
|
||
|
if (uploadSpecification.length === 0) {
|
||
|
core.warning(`No files found that can be uploaded`);
|
||
|
}
|
||
|
else {
|
||
|
// Create an entry for the artifact in the file container
|
||
|
const response = yield uploadHttpClient.createArtifactInFileContainer(name);
|
||
|
if (!response.fileContainerResourceUrl) {
|
||
|
core.debug(response.toString());
|
||
|
throw new Error('No URL provided by the Artifact Service to upload an artifact to');
|
||
|
}
|
||
|
core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`);
|
||
|
// Upload each of the files that were found concurrently
|
||
|
const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options);
|
||
|
// Update the size of the artifact to indicate we are done uploading
|
||
|
// The uncompressed size is used for display when downloading a zip of the artifact from the UI
|
||
|
yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name);
|
||
|
core.info(`Finished uploading artifact ${name}. Reported size is ${uploadResult.uploadSize} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`);
|
||
|
uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath);
|
||
|
uploadResponse.size = uploadResult.uploadSize;
|
||
|
uploadResponse.failedItems = uploadResult.failedItems;
|
||
|
}
|
||
|
return uploadResponse;
|
||
|
});
|
||
|
}
|
||
|
downloadArtifact(name, path, options) {
|
||
|
var _a;
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const downloadHttpClient = new download_http_client_1.DownloadHttpClient();
|
||
|
const artifacts = yield downloadHttpClient.listArtifacts();
|
||
|
if (artifacts.count === 0) {
|
||
|
throw new Error(`Unable to find any artifacts for the associated workflow`);
|
||
|
}
|
||
|
const artifactToDownload = artifacts.value.find(artifact => {
|
||
|
return artifact.name === name;
|
||
|
});
|
||
|
if (!artifactToDownload) {
|
||
|
throw new Error(`Unable to find an artifact with the name: ${name}`);
|
||
|
}
|
||
|
const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl);
|
||
|
if (!path) {
|
||
|
path = config_variables_1.getWorkSpaceDirectory();
|
||
|
}
|
||
|
path = path_1.normalize(path);
|
||
|
path = path_1.resolve(path);
|
||
|
// During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories
|
||
|
const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, ((_a = options) === null || _a === void 0 ? void 0 : _a.createArtifactFolder) || false);
|
||
|
if (downloadSpecification.filesToDownload.length === 0) {
|
||
|
core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`);
|
||
|
}
|
||
|
else {
|
||
|
// Create all necessary directories recursively before starting any download
|
||
|
yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure);
|
||
|
yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload);
|
||
|
}
|
||
|
return {
|
||
|
artifactName: name,
|
||
|
downloadPath: downloadSpecification.rootDownloadLocation
|
||
|
};
|
||
|
});
|
||
|
}
|
||
|
downloadAllArtifacts(path) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const downloadHttpClient = new download_http_client_1.DownloadHttpClient();
|
||
|
const response = [];
|
||
|
const artifacts = yield downloadHttpClient.listArtifacts();
|
||
|
if (artifacts.count === 0) {
|
||
|
core.info('Unable to find any artifacts for the associated workflow');
|
||
|
return response;
|
||
|
}
|
||
|
if (!path) {
|
||
|
path = config_variables_1.getWorkSpaceDirectory();
|
||
|
}
|
||
|
path = path_1.normalize(path);
|
||
|
path = path_1.resolve(path);
|
||
|
let downloadedArtifacts = 0;
|
||
|
while (downloadedArtifacts < artifacts.count) {
|
||
|
const currentArtifactToDownload = artifacts.value[downloadedArtifacts];
|
||
|
downloadedArtifacts += 1;
|
||
|
// Get container entries for the specific artifact
|
||
|
const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl);
|
||
|
const downloadSpecification = download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, true);
|
||
|
if (downloadSpecification.filesToDownload.length === 0) {
|
||
|
core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`);
|
||
|
}
|
||
|
else {
|
||
|
yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure);
|
||
|
yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload);
|
||
|
}
|
||
|
response.push({
|
||
|
artifactName: currentArtifactToDownload.name,
|
||
|
downloadPath: downloadSpecification.rootDownloadLocation
|
||
|
});
|
||
|
}
|
||
|
return response;
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
exports.DefaultArtifactClient = DefaultArtifactClient;
|
||
|
//# sourceMappingURL=artifact-client.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 383:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const assert = __webpack_require__(357);
|
||
|
const path = __webpack_require__(622);
|
||
|
const pathHelper = __webpack_require__(972);
|
||
|
const IS_WINDOWS = process.platform === 'win32';
|
||
|
/**
|
||
|
* Helper class for parsing paths into segments
|
||
|
*/
|
||
|
class Path {
|
||
|
/**
|
||
|
* Constructs a Path
|
||
|
* @param itemPath Path or array of segments
|
||
|
*/
|
||
|
constructor(itemPath) {
|
||
|
this.segments = [];
|
||
|
// String
|
||
|
if (typeof itemPath === 'string') {
|
||
|
assert(itemPath, `Parameter 'itemPath' must not be empty`);
|
||
|
// Normalize slashes and trim unnecessary trailing slash
|
||
|
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||
|
// Not rooted
|
||
|
if (!pathHelper.hasRoot(itemPath)) {
|
||
|
this.segments = itemPath.split(path.sep);
|
||
|
}
|
||
|
// Rooted
|
||
|
else {
|
||
|
// Add all segments, while not at the root
|
||
|
let remaining = itemPath;
|
||
|
let dir = pathHelper.dirname(remaining);
|
||
|
while (dir !== remaining) {
|
||
|
// Add the segment
|
||
|
const basename = path.basename(remaining);
|
||
|
this.segments.unshift(basename);
|
||
|
// Truncate the last segment
|
||
|
remaining = dir;
|
||
|
dir = pathHelper.dirname(remaining);
|
||
|
}
|
||
|
// Remainder is the root
|
||
|
this.segments.unshift(remaining);
|
||
|
}
|
||
|
}
|
||
|
// Array
|
||
|
else {
|
||
|
// Must not be empty
|
||
|
assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
||
|
// Each segment
|
||
|
for (let i = 0; i < itemPath.length; i++) {
|
||
|
let segment = itemPath[i];
|
||
|
// Must not be empty
|
||
|
assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
||
|
// Normalize slashes
|
||
|
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
||
|
// Root segment
|
||
|
if (i === 0 && pathHelper.hasRoot(segment)) {
|
||
|
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
||
|
assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
||
|
this.segments.push(segment);
|
||
|
}
|
||
|
// All other segments
|
||
|
else {
|
||
|
// Must not contain slash
|
||
|
assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
||
|
this.segments.push(segment);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Converts the path to it's string representation
|
||
|
*/
|
||
|
toString() {
|
||
|
// First segment
|
||
|
let result = this.segments[0];
|
||
|
// All others
|
||
|
let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
|
||
|
for (let i = 1; i < this.segments.length; i++) {
|
||
|
if (skipSlash) {
|
||
|
skipSlash = false;
|
||
|
}
|
||
|
else {
|
||
|
result += path.sep;
|
||
|
}
|
||
|
result += this.segments[i];
|
||
|
}
|
||
|
return result;
|
||
|
}
|
||
|
}
|
||
|
exports.Path = Path;
|
||
|
//# sourceMappingURL=internal-path.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 385:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
result["default"] = mod;
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const core = __importStar(__webpack_require__(470));
|
||
|
const artifact_1 = __webpack_require__(214);
|
||
|
const constants_1 = __webpack_require__(694);
|
||
|
const search_1 = __webpack_require__(575);
|
||
|
function run() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
try {
|
||
|
const name = core.getInput(constants_1.Inputs.Name, { required: false });
|
||
|
const path = core.getInput(constants_1.Inputs.Path, { required: true });
|
||
|
const searchResult = yield search_1.findFilesToUpload(path);
|
||
|
if (searchResult.filesToUpload.length === 0) {
|
||
|
core.warning(`No files were found for the provided path: ${path}. No artifacts will be uploaded.`);
|
||
|
}
|
||
|
else {
|
||
|
core.info(`With the provided path, there will be ${searchResult.filesToUpload.length} files uploaded`);
|
||
|
core.debug(`Root artifact directory is ${searchResult.rootDirectory}`);
|
||
|
const artifactClient = artifact_1.create();
|
||
|
const options = {
|
||
|
continueOnError: true
|
||
|
};
|
||
|
yield artifactClient.uploadArtifact(name || constants_1.getDefaultArtifactName(), searchResult.filesToUpload, searchResult.rootDirectory, options);
|
||
|
core.info('Artifact upload has finished successfully!');
|
||
|
}
|
||
|
}
|
||
|
catch (err) {
|
||
|
core.setFailed(err.message);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
run();
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 401:
|
||
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
function getUploadFileConcurrency() {
|
||
|
return 2;
|
||
|
}
|
||
|
exports.getUploadFileConcurrency = getUploadFileConcurrency;
|
||
|
function getUploadChunkSize() {
|
||
|
return 4 * 1024 * 1024; // 4 MB Chunks
|
||
|
}
|
||
|
exports.getUploadChunkSize = getUploadChunkSize;
|
||
|
function getUploadRetryCount() {
|
||
|
return 3;
|
||
|
}
|
||
|
exports.getUploadRetryCount = getUploadRetryCount;
|
||
|
function getRetryWaitTimeInMilliseconds() {
|
||
|
return 10000;
|
||
|
}
|
||
|
exports.getRetryWaitTimeInMilliseconds = getRetryWaitTimeInMilliseconds;
|
||
|
function getDownloadFileConcurrency() {
|
||
|
return 2;
|
||
|
}
|
||
|
exports.getDownloadFileConcurrency = getDownloadFileConcurrency;
|
||
|
function getRuntimeToken() {
|
||
|
const token = process.env['ACTIONS_RUNTIME_TOKEN'];
|
||
|
if (!token) {
|
||
|
throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable');
|
||
|
}
|
||
|
return token;
|
||
|
}
|
||
|
exports.getRuntimeToken = getRuntimeToken;
|
||
|
function getRuntimeUrl() {
|
||
|
const runtimeUrl = process.env['ACTIONS_RUNTIME_URL'];
|
||
|
if (!runtimeUrl) {
|
||
|
throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable');
|
||
|
}
|
||
|
return runtimeUrl;
|
||
|
}
|
||
|
exports.getRuntimeUrl = getRuntimeUrl;
|
||
|
function getWorkFlowRunId() {
|
||
|
const workFlowRunId = process.env['GITHUB_RUN_ID'];
|
||
|
if (!workFlowRunId) {
|
||
|
throw new Error('Unable to get GITHUB_RUN_ID env variable');
|
||
|
}
|
||
|
return workFlowRunId;
|
||
|
}
|
||
|
exports.getWorkFlowRunId = getWorkFlowRunId;
|
||
|
function getWorkSpaceDirectory() {
|
||
|
const workspaceDirectory = process.env['GITHUB_WORKSPACE'];
|
||
|
if (!workspaceDirectory) {
|
||
|
throw new Error('Unable to get GITHUB_WORKSPACE env variable');
|
||
|
}
|
||
|
return workspaceDirectory;
|
||
|
}
|
||
|
exports.getWorkSpaceDirectory = getWorkSpaceDirectory;
|
||
|
//# sourceMappingURL=config-variables.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 402:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
/*!
|
||
|
* Tmp
|
||
|
*
|
||
|
* Copyright (c) 2011-2017 KARASZI Istvan <github@spam.raszi.hu>
|
||
|
*
|
||
|
* MIT Licensed
|
||
|
*/
|
||
|
|
||
|
/*
|
||
|
* Module dependencies.
|
||
|
*/
|
||
|
const fs = __webpack_require__(747);
|
||
|
const os = __webpack_require__(87);
|
||
|
const path = __webpack_require__(622);
|
||
|
const crypto = __webpack_require__(417);
|
||
|
const _c = fs.constants && os.constants ?
|
||
|
{ fs: fs.constants, os: os.constants } :
|
||
|
process.binding('constants');
|
||
|
const rimraf = __webpack_require__(569);
|
||
|
|
||
|
/*
|
||
|
* The working inner variables.
|
||
|
*/
|
||
|
const
|
||
|
// the random characters to choose from
|
||
|
RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',
|
||
|
|
||
|
TEMPLATE_PATTERN = /XXXXXX/,
|
||
|
|
||
|
DEFAULT_TRIES = 3,
|
||
|
|
||
|
CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR),
|
||
|
|
||
|
EBADF = _c.EBADF || _c.os.errno.EBADF,
|
||
|
ENOENT = _c.ENOENT || _c.os.errno.ENOENT,
|
||
|
|
||
|
DIR_MODE = 448 /* 0o700 */,
|
||
|
FILE_MODE = 384 /* 0o600 */,
|
||
|
|
||
|
EXIT = 'exit',
|
||
|
|
||
|
SIGINT = 'SIGINT',
|
||
|
|
||
|
// this will hold the objects need to be removed on exit
|
||
|
_removeObjects = [];
|
||
|
|
||
|
var
|
||
|
_gracefulCleanup = false;
|
||
|
|
||
|
/**
|
||
|
* Random name generator based on crypto.
|
||
|
* Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
|
||
|
*
|
||
|
* @param {number} howMany
|
||
|
* @returns {string} the generated random name
|
||
|
* @private
|
||
|
*/
|
||
|
function _randomChars(howMany) {
|
||
|
var
|
||
|
value = [],
|
||
|
rnd = null;
|
||
|
|
||
|
// make sure that we do not fail because we ran out of entropy
|
||
|
try {
|
||
|
rnd = crypto.randomBytes(howMany);
|
||
|
} catch (e) {
|
||
|
rnd = crypto.pseudoRandomBytes(howMany);
|
||
|
}
|
||
|
|
||
|
for (var i = 0; i < howMany; i++) {
|
||
|
value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]);
|
||
|
}
|
||
|
|
||
|
return value.join('');
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Checks whether the `obj` parameter is defined or not.
|
||
|
*
|
||
|
* @param {Object} obj
|
||
|
* @returns {boolean} true if the object is undefined
|
||
|
* @private
|
||
|
*/
|
||
|
function _isUndefined(obj) {
|
||
|
return typeof obj === 'undefined';
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Parses the function arguments.
|
||
|
*
|
||
|
* This function helps to have optional arguments.
|
||
|
*
|
||
|
* @param {(Options|Function)} options
|
||
|
* @param {Function} callback
|
||
|
* @returns {Array} parsed arguments
|
||
|
* @private
|
||
|
*/
|
||
|
function _parseArguments(options, callback) {
|
||
|
/* istanbul ignore else */
|
||
|
if (typeof options === 'function') {
|
||
|
return [{}, options];
|
||
|
}
|
||
|
|
||
|
/* istanbul ignore else */
|
||
|
if (_isUndefined(options)) {
|
||
|
return [{}, callback];
|
||
|
}
|
||
|
|
||
|
return [options, callback];
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Generates a new temporary name.
|
||
|
*
|
||
|
* @param {Object} opts
|
||
|
* @returns {string} the new random name according to opts
|
||
|
* @private
|
||
|
*/
|
||
|
function _generateTmpName(opts) {
|
||
|
|
||
|
const tmpDir = _getTmpDir();
|
||
|
|
||
|
// fail early on missing tmp dir
|
||
|
if (isBlank(opts.dir) && isBlank(tmpDir)) {
|
||
|
throw new Error('No tmp dir specified');
|
||
|
}
|
||
|
|
||
|
/* istanbul ignore else */
|
||
|
if (!isBlank(opts.name)) {
|
||
|
return path.join(opts.dir || tmpDir, opts.name);
|
||
|
}
|
||
|
|
||
|
// mkstemps like template
|
||
|
// opts.template has already been guarded in tmpName() below
|
||
|
/* istanbul ignore else */
|
||
|
if (opts.template) {
|
||
|
var template = opts.template;
|
||
|
// make sure that we prepend the tmp path if none was given
|
||
|
/* istanbul ignore else */
|
||
|
if (path.basename(template) === template)
|
||
|
template = path.join(opts.dir || tmpDir, template);
|
||
|
return template.replace(TEMPLATE_PATTERN, _randomChars(6));
|
||
|
}
|
||
|
|
||
|
// prefix and postfix
|
||
|
const name = [
|
||
|
(isBlank(opts.prefix) ? 'tmp-' : opts.prefix),
|
||
|
process.pid,
|
||
|
_randomChars(12),
|
||
|
(opts.postfix ? opts.postfix : '')
|
||
|
].join('');
|
||
|
|
||
|
return path.join(opts.dir || tmpDir, name);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Gets a temporary file name.
|
||
|
*
|
||
|
* @param {(Options|tmpNameCallback)} options options or callback
|
||
|
* @param {?tmpNameCallback} callback the callback function
|
||
|
*/
|
||
|
function tmpName(options, callback) {
|
||
|
var
|
||
|
args = _parseArguments(options, callback),
|
||
|
opts = args[0],
|
||
|
cb = args[1],
|
||
|
tries = !isBlank(opts.name) ? 1 : opts.tries || DEFAULT_TRIES;
|
||
|
|
||
|
/* istanbul ignore else */
|
||
|
if (isNaN(tries) || tries < 0)
|
||
|
return cb(new Error('Invalid tries'));
|
||
|
|
||
|
/* istanbul ignore else */
|
||
|
if (opts.template && !opts.template.match(TEMPLATE_PATTERN))
|
||
|
return cb(new Error('Invalid template provided'));
|
||
|
|
||
|
(function _getUniqueName() {
|
||
|
try {
|
||
|
const name = _generateTmpName(opts);
|
||
|
|
||
|
// check whether the path exists then retry if needed
|
||
|
fs.stat(name, function (err) {
|
||
|
/* istanbul ignore else */
|
||
|
if (!err) {
|
||
|
/* istanbul ignore else */
|
||
|
if (tries-- > 0) return _getUniqueName();
|
||
|
|
||
|
return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name));
|
||
|
}
|
||
|
|
||
|
cb(null, name);
|
||
|
});
|
||
|
} catch (err) {
|
||
|
cb(err);
|
||
|
}
|
||
|
}());
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Synchronous version of tmpName.
|
||
|
*
|
||
|
* @param {Object} options
|
||
|
* @returns {string} the generated random name
|
||
|
* @throws {Error} if the options are invalid or could not generate a filename
|
||
|
*/
|
||
|
function tmpNameSync(options) {
|
||
|
var
|
||
|
args = _parseArguments(options),
|
||
|
opts = args[0],
|
||
|
tries = !isBlank(opts.name) ? 1 : opts.tries || DEFAULT_TRIES;
|
||
|
|
||
|
/* istanbul ignore else */
|
||
|
if (isNaN(tries) || tries < 0)
|
||
|
throw new Error('Invalid tries');
|
||
|
|
||
|
/* istanbul ignore else */
|
||
|
if (opts.template && !opts.template.match(TEMPLATE_PATTERN))
|
||
|
throw new Error('Invalid template provided');
|
||
|
|
||
|
do {
|
||
|
const name = _generateTmpName(opts);
|
||
|
try {
|
||
|
fs.statSync(name);
|
||
|
} catch (e) {
|
||
|
return name;
|
||
|
}
|
||
|
} while (tries-- > 0);
|
||
|
|
||
|
throw new Error('Could not get a unique tmp filename, max tries reached');
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Creates and opens a temporary file.
|
||
|
*
|
||
|
* @param {(Options|fileCallback)} options the config options or the callback function
|
||
|
* @param {?fileCallback} callback
|
||
|
*/
|
||
|
function file(options, callback) {
|
||
|
var
|
||
|
args = _parseArguments(options, callback),
|
||
|
opts = args[0],
|
||
|
cb = args[1];
|
||
|
|
||
|
// gets a temporary filename
|
||
|
tmpName(opts, function _tmpNameCreated(err, name) {
|
||
|
/* istanbul ignore else */
|
||
|
if (err) return cb(err);
|
||
|
|
||
|
// create and open the file
|
||
|
fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) {
|
||
|
/* istanbul ignore else */
|
||
|
if (err) return cb(err);
|
||
|
|
||
|
if (opts.discardDescriptor) {
|
||
|
return fs.close(fd, function _discardCallback(err) {
|
||
|
/* istanbul ignore else */
|
||
|
if (err) {
|
||
|
// Low probability, and the file exists, so this could be
|
||
|
// ignored. If it isn't we certainly need to unlink the
|
||
|
// file, and if that fails too its error is more
|
||
|
// important.
|
||
|
try {
|
||
|
fs.unlinkSync(name);
|
||
|
} catch (e) {
|
||
|
if (!isENOENT(e)) {
|
||
|
err = e;
|
||
|
}
|
||
|
}
|
||
|
return cb(err);
|
||
|
}
|
||
|
cb(null, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts));
|
||
|
});
|
||
|
}
|
||
|
/* istanbul ignore else */
|
||
|
if (opts.detachDescriptor) {
|
||
|
return cb(null, name, fd, _prepareTmpFileRemoveCallback(name, -1, opts));
|
||
|
}
|
||
|
cb(null, name, fd, _prepareTmpFileRemoveCallback(name, fd, opts));
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Synchronous version of file.
|
||
|
*
|
||
|
* @param {Options} options
|
||
|
* @returns {FileSyncObject} object consists of name, fd and removeCallback
|
||
|
* @throws {Error} if cannot create a file
|
||
|
*/
|
||
|
function fileSync(options) {
|
||
|
var
|
||
|
args = _parseArguments(options),
|
||
|
opts = args[0];
|
||
|
|
||
|
const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor;
|
||
|
const name = tmpNameSync(opts);
|
||
|
var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE);
|
||
|
/* istanbul ignore else */
|
||
|
if (opts.discardDescriptor) {
|
||
|
fs.closeSync(fd);
|
||
|
fd = undefined;
|
||
|
}
|
||
|
|
||
|
return {
|
||
|
name: name,
|
||
|
fd: fd,
|
||
|
removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts)
|
||
|
};
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Creates a temporary directory.
|
||
|
*
|
||
|
* @param {(Options|dirCallback)} options the options or the callback function
|
||
|
* @param {?dirCallback} callback
|
||
|
*/
|
||
|
function dir(options, callback) {
|
||
|
var
|
||
|
args = _parseArguments(options, callback),
|
||
|
opts = args[0],
|
||
|
cb = args[1];
|
||
|
|
||
|
// gets a temporary filename
|
||
|
tmpName(opts, function _tmpNameCreated(err, name) {
|
||
|
/* istanbul ignore else */
|
||
|
if (err) return cb(err);
|
||
|
|
||
|
// create the directory
|
||
|
fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) {
|
||
|
/* istanbul ignore else */
|
||
|
if (err) return cb(err);
|
||
|
|
||
|
cb(null, name, _prepareTmpDirRemoveCallback(name, opts));
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Synchronous version of dir.
|
||
|
*
|
||
|
* @param {Options} options
|
||
|
* @returns {DirSyncObject} object consists of name and removeCallback
|
||
|
* @throws {Error} if it cannot create a directory
|
||
|
*/
|
||
|
function dirSync(options) {
|
||
|
var
|
||
|
args = _parseArguments(options),
|
||
|
opts = args[0];
|
||
|
|
||
|
const name = tmpNameSync(opts);
|
||
|
fs.mkdirSync(name, opts.mode || DIR_MODE);
|
||
|
|
||
|
return {
|
||
|
name: name,
|
||
|
removeCallback: _prepareTmpDirRemoveCallback(name, opts)
|
||
|
};
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Removes files asynchronously.
|
||
|
*
|
||
|
* @param {Object} fdPath
|
||
|
* @param {Function} next
|
||
|
* @private
|
||
|
*/
|
||
|
function _removeFileAsync(fdPath, next) {
|
||
|
const _handler = function (err) {
|
||
|
if (err && !isENOENT(err)) {
|
||
|
// reraise any unanticipated error
|
||
|
return next(err);
|
||
|
}
|
||
|
next();
|
||
|
}
|
||
|
|
||
|
if (0 <= fdPath[0])
|
||
|
fs.close(fdPath[0], function (err) {
|
||
|
fs.unlink(fdPath[1], _handler);
|
||
|
});
|
||
|
else fs.unlink(fdPath[1], _handler);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Removes files synchronously.
|
||
|
*
|
||
|
* @param {Object} fdPath
|
||
|
* @private
|
||
|
*/
|
||
|
function _removeFileSync(fdPath) {
|
||
|
try {
|
||
|
if (0 <= fdPath[0]) fs.closeSync(fdPath[0]);
|
||
|
} catch (e) {
|
||
|
// reraise any unanticipated error
|
||
|
if (!isEBADF(e) && !isENOENT(e)) throw e;
|
||
|
} finally {
|
||
|
try {
|
||
|
fs.unlinkSync(fdPath[1]);
|
||
|
}
|
||
|
catch (e) {
|
||
|
// reraise any unanticipated error
|
||
|
if (!isENOENT(e)) throw e;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Prepares the callback for removal of the temporary file.
|
||
|
*
|
||
|
* @param {string} name the path of the file
|
||
|
* @param {number} fd file descriptor
|
||
|
* @param {Object} opts
|
||
|
* @returns {fileCallback}
|
||
|
* @private
|
||
|
*/
|
||
|
function _prepareTmpFileRemoveCallback(name, fd, opts) {
|
||
|
const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name]);
|
||
|
const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], removeCallbackSync);
|
||
|
|
||
|
if (!opts.keep) _removeObjects.unshift(removeCallbackSync);
|
||
|
|
||
|
return removeCallback;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Simple wrapper for rimraf.
|
||
|
*
|
||
|
* @param {string} dirPath
|
||
|
* @param {Function} next
|
||
|
* @private
|
||
|
*/
|
||
|
function _rimrafRemoveDirWrapper(dirPath, next) {
|
||
|
rimraf(dirPath, next);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Simple wrapper for rimraf.sync.
|
||
|
*
|
||
|
* @param {string} dirPath
|
||
|
* @private
|
||
|
*/
|
||
|
function _rimrafRemoveDirSyncWrapper(dirPath, next) {
|
||
|
try {
|
||
|
return next(null, rimraf.sync(dirPath));
|
||
|
} catch (err) {
|
||
|
return next(err);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Prepares the callback for removal of the temporary directory.
|
||
|
*
|
||
|
* @param {string} name
|
||
|
* @param {Object} opts
|
||
|
* @returns {Function} the callback
|
||
|
* @private
|
||
|
*/
|
||
|
function _prepareTmpDirRemoveCallback(name, opts) {
|
||
|
const removeFunction = opts.unsafeCleanup ? _rimrafRemoveDirWrapper : fs.rmdir.bind(fs);
|
||
|
const removeFunctionSync = opts.unsafeCleanup ? _rimrafRemoveDirSyncWrapper : fs.rmdirSync.bind(fs);
|
||
|
const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name);
|
||
|
const removeCallback = _prepareRemoveCallback(removeFunction, name, removeCallbackSync);
|
||
|
if (!opts.keep) _removeObjects.unshift(removeCallbackSync);
|
||
|
|
||
|
return removeCallback;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Creates a guarded function wrapping the removeFunction call.
|
||
|
*
|
||
|
* @param {Function} removeFunction
|
||
|
* @param {Object} arg
|
||
|
* @returns {Function}
|
||
|
* @private
|
||
|
*/
|
||
|
function _prepareRemoveCallback(removeFunction, arg, cleanupCallbackSync) {
|
||
|
var called = false;
|
||
|
|
||
|
return function _cleanupCallback(next) {
|
||
|
next = next || function () {};
|
||
|
if (!called) {
|
||
|
const toRemove = cleanupCallbackSync || _cleanupCallback;
|
||
|
const index = _removeObjects.indexOf(toRemove);
|
||
|
/* istanbul ignore else */
|
||
|
if (index >= 0) _removeObjects.splice(index, 1);
|
||
|
|
||
|
called = true;
|
||
|
// sync?
|
||
|
if (removeFunction.length === 1) {
|
||
|
try {
|
||
|
removeFunction(arg);
|
||
|
return next(null);
|
||
|
}
|
||
|
catch (err) {
|
||
|
// if no next is provided and since we are
|
||
|
// in silent cleanup mode on process exit,
|
||
|
// we will ignore the error
|
||
|
return next(err);
|
||
|
}
|
||
|
} else return removeFunction(arg, next);
|
||
|
} else return next(new Error('cleanup callback has already been called'));
|
||
|
};
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* The garbage collector.
|
||
|
*
|
||
|
* @private
|
||
|
*/
|
||
|
function _garbageCollector() {
|
||
|
/* istanbul ignore else */
|
||
|
if (!_gracefulCleanup) return;
|
||
|
|
||
|
// the function being called removes itself from _removeObjects,
|
||
|
// loop until _removeObjects is empty
|
||
|
while (_removeObjects.length) {
|
||
|
try {
|
||
|
_removeObjects[0]();
|
||
|
} catch (e) {
|
||
|
// already removed?
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Helper for testing against EBADF to compensate changes made to Node 7.x under Windows.
|
||
|
*/
|
||
|
function isEBADF(error) {
|
||
|
return isExpectedError(error, -EBADF, 'EBADF');
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows.
|
||
|
*/
|
||
|
function isENOENT(error) {
|
||
|
return isExpectedError(error, -ENOENT, 'ENOENT');
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Helper to determine whether the expected error code matches the actual code and errno,
|
||
|
* which will differ between the supported node versions.
|
||
|
*
|
||
|
* - Node >= 7.0:
|
||
|
* error.code {string}
|
||
|
* error.errno {string|number} any numerical value will be negated
|
||
|
*
|
||
|
* - Node >= 6.0 < 7.0:
|
||
|
* error.code {string}
|
||
|
* error.errno {number} negated
|
||
|
*
|
||
|
* - Node >= 4.0 < 6.0: introduces SystemError
|
||
|
* error.code {string}
|
||
|
* error.errno {number} negated
|
||
|
*
|
||
|
* - Node >= 0.10 < 4.0:
|
||
|
* error.code {number} negated
|
||
|
* error.errno n/a
|
||
|
*/
|
||
|
function isExpectedError(error, code, errno) {
|
||
|
return error.code === code || error.code === errno;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Helper which determines whether a string s is blank, that is undefined, or empty or null.
|
||
|
*
|
||
|
* @private
|
||
|
* @param {string} s
|
||
|
* @returns {Boolean} true whether the string s is blank, false otherwise
|
||
|
*/
|
||
|
function isBlank(s) {
|
||
|
return s === null || s === undefined || !s.trim();
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Sets the graceful cleanup.
|
||
|
*/
|
||
|
function setGracefulCleanup() {
|
||
|
_gracefulCleanup = true;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Returns the currently configured tmp dir from os.tmpdir().
|
||
|
*
|
||
|
* @private
|
||
|
* @returns {string} the currently configured tmp dir
|
||
|
*/
|
||
|
function _getTmpDir() {
|
||
|
return os.tmpdir();
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* If there are multiple different versions of tmp in place, make sure that
|
||
|
* we recognize the old listeners.
|
||
|
*
|
||
|
* @param {Function} listener
|
||
|
* @private
|
||
|
* @returns {Boolean} true whether listener is a legacy listener
|
||
|
*/
|
||
|
function _is_legacy_listener(listener) {
|
||
|
return (listener.name === '_exit' || listener.name === '_uncaughtExceptionThrown')
|
||
|
&& listener.toString().indexOf('_garbageCollector();') > -1;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Safely install SIGINT listener.
|
||
|
*
|
||
|
* NOTE: this will only work on OSX and Linux.
|
||
|
*
|
||
|
* @private
|
||
|
*/
|
||
|
function _safely_install_sigint_listener() {
|
||
|
|
||
|
const listeners = process.listeners(SIGINT);
|
||
|
const existingListeners = [];
|
||
|
for (let i = 0, length = listeners.length; i < length; i++) {
|
||
|
const lstnr = listeners[i];
|
||
|
/* istanbul ignore else */
|
||
|
if (lstnr.name === '_tmp$sigint_listener') {
|
||
|
existingListeners.push(lstnr);
|
||
|
process.removeListener(SIGINT, lstnr);
|
||
|
}
|
||
|
}
|
||
|
process.on(SIGINT, function _tmp$sigint_listener(doExit) {
|
||
|
for (let i = 0, length = existingListeners.length; i < length; i++) {
|
||
|
// let the existing listener do the garbage collection (e.g. jest sandbox)
|
||
|
try {
|
||
|
existingListeners[i](false);
|
||
|
} catch (err) {
|
||
|
// ignore
|
||
|
}
|
||
|
}
|
||
|
try {
|
||
|
// force the garbage collector even it is called again in the exit listener
|
||
|
_garbageCollector();
|
||
|
} finally {
|
||
|
if (!!doExit) {
|
||
|
process.exit(0);
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Safely install process exit listener.
|
||
|
*
|
||
|
* @private
|
||
|
*/
|
||
|
function _safely_install_exit_listener() {
|
||
|
const listeners = process.listeners(EXIT);
|
||
|
|
||
|
// collect any existing listeners
|
||
|
const existingListeners = [];
|
||
|
for (let i = 0, length = listeners.length; i < length; i++) {
|
||
|
const lstnr = listeners[i];
|
||
|
/* istanbul ignore else */
|
||
|
// TODO: remove support for legacy listeners once release 1.0.0 is out
|
||
|
if (lstnr.name === '_tmp$safe_listener' || _is_legacy_listener(lstnr)) {
|
||
|
// we must forget about the uncaughtException listener, hopefully it is ours
|
||
|
if (lstnr.name !== '_uncaughtExceptionThrown') {
|
||
|
existingListeners.push(lstnr);
|
||
|
}
|
||
|
process.removeListener(EXIT, lstnr);
|
||
|
}
|
||
|
}
|
||
|
// TODO: what was the data parameter good for?
|
||
|
process.addListener(EXIT, function _tmp$safe_listener(data) {
|
||
|
for (let i = 0, length = existingListeners.length; i < length; i++) {
|
||
|
// let the existing listener do the garbage collection (e.g. jest sandbox)
|
||
|
try {
|
||
|
existingListeners[i](data);
|
||
|
} catch (err) {
|
||
|
// ignore
|
||
|
}
|
||
|
}
|
||
|
_garbageCollector();
|
||
|
});
|
||
|
}
|
||
|
|
||
|
_safely_install_exit_listener();
|
||
|
_safely_install_sigint_listener();
|
||
|
|
||
|
/**
|
||
|
* Configuration options.
|
||
|
*
|
||
|
* @typedef {Object} Options
|
||
|
* @property {?number} tries the number of tries before give up the name generation
|
||
|
* @property {?string} template the "mkstemp" like filename template
|
||
|
* @property {?string} name fix name
|
||
|
* @property {?string} dir the tmp directory to use
|
||
|
* @property {?string} prefix prefix for the generated name
|
||
|
* @property {?string} postfix postfix for the generated name
|
||
|
* @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty
|
||
|
*/
|
||
|
|
||
|
/**
|
||
|
* @typedef {Object} FileSyncObject
|
||
|
* @property {string} name the name of the file
|
||
|
* @property {string} fd the file descriptor
|
||
|
* @property {fileCallback} removeCallback the callback function to remove the file
|
||
|
*/
|
||
|
|
||
|
/**
|
||
|
* @typedef {Object} DirSyncObject
|
||
|
* @property {string} name the name of the directory
|
||
|
* @property {fileCallback} removeCallback the callback function to remove the directory
|
||
|
*/
|
||
|
|
||
|
/**
|
||
|
* @callback tmpNameCallback
|
||
|
* @param {?Error} err the error object if anything goes wrong
|
||
|
* @param {string} name the temporary file name
|
||
|
*/
|
||
|
|
||
|
/**
|
||
|
* @callback fileCallback
|
||
|
* @param {?Error} err the error object if anything goes wrong
|
||
|
* @param {string} name the temporary file name
|
||
|
* @param {number} fd the file descriptor
|
||
|
* @param {cleanupCallback} fn the cleanup callback function
|
||
|
*/
|
||
|
|
||
|
/**
|
||
|
* @callback dirCallback
|
||
|
* @param {?Error} err the error object if anything goes wrong
|
||
|
* @param {string} name the temporary file name
|
||
|
* @param {cleanupCallback} fn the cleanup callback function
|
||
|
*/
|
||
|
|
||
|
/**
|
||
|
* Removes the temporary created file or directory.
|
||
|
*
|
||
|
* @callback cleanupCallback
|
||
|
* @param {simpleCallback} [next] function to call after entry was removed
|
||
|
*/
|
||
|
|
||
|
/**
|
||
|
* Callback function for function composition.
|
||
|
* @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
|
||
|
*
|
||
|
* @callback simpleCallback
|
||
|
*/
|
||
|
|
||
|
// exporting all the needed methods
|
||
|
|
||
|
// evaluate os.tmpdir() lazily, mainly for simplifying testing but it also will
|
||
|
// allow users to reconfigure the temporary directory
|
||
|
Object.defineProperty(module.exports, 'tmpdir', {
|
||
|
enumerable: true,
|
||
|
configurable: false,
|
||
|
get: function () {
|
||
|
return _getTmpDir();
|
||
|
}
|
||
|
});
|
||
|
|
||
|
module.exports.dir = dir;
|
||
|
module.exports.dirSync = dirSync;
|
||
|
|
||
|
module.exports.file = file;
|
||
|
module.exports.fileSync = fileSync;
|
||
|
|
||
|
module.exports.tmpName = tmpName;
|
||
|
module.exports.tmpNameSync = tmpNameSync;
|
||
|
|
||
|
module.exports.setGracefulCleanup = setGracefulCleanup;
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 413:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
module.exports = __webpack_require__(141);
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 417:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("crypto");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 431:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
result["default"] = mod;
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const os = __importStar(__webpack_require__(87));
|
||
|
/**
|
||
|
* Commands
|
||
|
*
|
||
|
* Command Format:
|
||
|
* ::name key=value,key=value::message
|
||
|
*
|
||
|
* Examples:
|
||
|
* ::warning::This is the message
|
||
|
* ::set-env name=MY_VAR::some value
|
||
|
*/
|
||
|
function issueCommand(command, properties, message) {
|
||
|
const cmd = new Command(command, properties, message);
|
||
|
process.stdout.write(cmd.toString() + os.EOL);
|
||
|
}
|
||
|
exports.issueCommand = issueCommand;
|
||
|
function issue(name, message = '') {
|
||
|
issueCommand(name, {}, message);
|
||
|
}
|
||
|
exports.issue = issue;
|
||
|
const CMD_STRING = '::';
|
||
|
class Command {
|
||
|
constructor(command, properties, message) {
|
||
|
if (!command) {
|
||
|
command = 'missing.command';
|
||
|
}
|
||
|
this.command = command;
|
||
|
this.properties = properties;
|
||
|
this.message = message;
|
||
|
}
|
||
|
toString() {
|
||
|
let cmdStr = CMD_STRING + this.command;
|
||
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
||
|
cmdStr += ' ';
|
||
|
let first = true;
|
||
|
for (const key in this.properties) {
|
||
|
if (this.properties.hasOwnProperty(key)) {
|
||
|
const val = this.properties[key];
|
||
|
if (val) {
|
||
|
if (first) {
|
||
|
first = false;
|
||
|
}
|
||
|
else {
|
||
|
cmdStr += ',';
|
||
|
}
|
||
|
cmdStr += `${key}=${escapeProperty(val)}`;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
||
|
return cmdStr;
|
||
|
}
|
||
|
}
|
||
|
function escapeData(s) {
|
||
|
return (s || '')
|
||
|
.replace(/%/g, '%25')
|
||
|
.replace(/\r/g, '%0D')
|
||
|
.replace(/\n/g, '%0A');
|
||
|
}
|
||
|
function escapeProperty(s) {
|
||
|
return (s || '')
|
||
|
.replace(/%/g, '%25')
|
||
|
.replace(/\r/g, '%0D')
|
||
|
.replace(/\n/g, '%0A')
|
||
|
.replace(/:/g, '%3A')
|
||
|
.replace(/,/g, '%2C');
|
||
|
}
|
||
|
//# sourceMappingURL=command.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 452:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const utils_1 = __webpack_require__(870);
|
||
|
/**
|
||
|
* Used for managing http clients during either upload or download
|
||
|
*/
|
||
|
class HttpManager {
|
||
|
constructor(clientCount) {
|
||
|
if (clientCount < 1) {
|
||
|
throw new Error('There must be at least one client');
|
||
|
}
|
||
|
this.clients = new Array(clientCount).fill(utils_1.createHttpClient());
|
||
|
}
|
||
|
getClient(index) {
|
||
|
return this.clients[index];
|
||
|
}
|
||
|
// client disposal is necessary if a keep-alive connection is used to properly close the connection
|
||
|
// for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292
|
||
|
disposeAndReplaceClient(index) {
|
||
|
this.clients[index].dispose();
|
||
|
this.clients[index] = utils_1.createHttpClient();
|
||
|
}
|
||
|
disposeAndReplaceAllClients() {
|
||
|
for (const [index] of this.clients.entries()) {
|
||
|
this.disposeAndReplaceClient(index);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
exports.HttpManager = HttpManager;
|
||
|
//# sourceMappingURL=http-manager.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 470:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
result["default"] = mod;
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const command_1 = __webpack_require__(431);
|
||
|
const os = __importStar(__webpack_require__(87));
|
||
|
const path = __importStar(__webpack_require__(622));
|
||
|
/**
|
||
|
* The code to exit an action
|
||
|
*/
|
||
|
var ExitCode;
|
||
|
(function (ExitCode) {
|
||
|
/**
|
||
|
* A code indicating that the action was successful
|
||
|
*/
|
||
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
||
|
/**
|
||
|
* A code indicating that the action was a failure
|
||
|
*/
|
||
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
||
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
||
|
//-----------------------------------------------------------------------
|
||
|
// Variables
|
||
|
//-----------------------------------------------------------------------
|
||
|
/**
|
||
|
* Sets env variable for this action and future actions in the job
|
||
|
* @param name the name of the variable to set
|
||
|
* @param val the value of the variable
|
||
|
*/
|
||
|
function exportVariable(name, val) {
|
||
|
process.env[name] = val;
|
||
|
command_1.issueCommand('set-env', { name }, val);
|
||
|
}
|
||
|
exports.exportVariable = exportVariable;
|
||
|
/**
|
||
|
* Registers a secret which will get masked from logs
|
||
|
* @param secret value of the secret
|
||
|
*/
|
||
|
function setSecret(secret) {
|
||
|
command_1.issueCommand('add-mask', {}, secret);
|
||
|
}
|
||
|
exports.setSecret = setSecret;
|
||
|
/**
|
||
|
* Prepends inputPath to the PATH (for this action and future actions)
|
||
|
* @param inputPath
|
||
|
*/
|
||
|
function addPath(inputPath) {
|
||
|
command_1.issueCommand('add-path', {}, inputPath);
|
||
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
||
|
}
|
||
|
exports.addPath = addPath;
|
||
|
/**
|
||
|
* Gets the value of an input. The value is also trimmed.
|
||
|
*
|
||
|
* @param name name of the input to get
|
||
|
* @param options optional. See InputOptions.
|
||
|
* @returns string
|
||
|
*/
|
||
|
function getInput(name, options) {
|
||
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
||
|
if (options && options.required && !val) {
|
||
|
throw new Error(`Input required and not supplied: ${name}`);
|
||
|
}
|
||
|
return val.trim();
|
||
|
}
|
||
|
exports.getInput = getInput;
|
||
|
/**
|
||
|
* Sets the value of an output.
|
||
|
*
|
||
|
* @param name name of the output to set
|
||
|
* @param value value to store
|
||
|
*/
|
||
|
function setOutput(name, value) {
|
||
|
command_1.issueCommand('set-output', { name }, value);
|
||
|
}
|
||
|
exports.setOutput = setOutput;
|
||
|
//-----------------------------------------------------------------------
|
||
|
// Results
|
||
|
//-----------------------------------------------------------------------
|
||
|
/**
|
||
|
* Sets the action status to failed.
|
||
|
* When the action exits it will be with an exit code of 1
|
||
|
* @param message add error issue message
|
||
|
*/
|
||
|
function setFailed(message) {
|
||
|
process.exitCode = ExitCode.Failure;
|
||
|
error(message);
|
||
|
}
|
||
|
exports.setFailed = setFailed;
|
||
|
//-----------------------------------------------------------------------
|
||
|
// Logging Commands
|
||
|
//-----------------------------------------------------------------------
|
||
|
/**
|
||
|
* Gets whether Actions Step Debug is on or not
|
||
|
*/
|
||
|
function isDebug() {
|
||
|
return process.env['RUNNER_DEBUG'] === '1';
|
||
|
}
|
||
|
exports.isDebug = isDebug;
|
||
|
/**
|
||
|
* Writes debug message to user log
|
||
|
* @param message debug message
|
||
|
*/
|
||
|
function debug(message) {
|
||
|
command_1.issueCommand('debug', {}, message);
|
||
|
}
|
||
|
exports.debug = debug;
|
||
|
/**
|
||
|
* Adds an error issue
|
||
|
* @param message error issue message
|
||
|
*/
|
||
|
function error(message) {
|
||
|
command_1.issue('error', message);
|
||
|
}
|
||
|
exports.error = error;
|
||
|
/**
|
||
|
* Adds an warning issue
|
||
|
* @param message warning issue message
|
||
|
*/
|
||
|
function warning(message) {
|
||
|
command_1.issue('warning', message);
|
||
|
}
|
||
|
exports.warning = warning;
|
||
|
/**
|
||
|
* Writes info to log with console.log.
|
||
|
* @param message info message
|
||
|
*/
|
||
|
function info(message) {
|
||
|
process.stdout.write(message + os.EOL);
|
||
|
}
|
||
|
exports.info = info;
|
||
|
/**
|
||
|
* Begin an output group.
|
||
|
*
|
||
|
* Output until the next `groupEnd` will be foldable in this group
|
||
|
*
|
||
|
* @param name The name of the output group
|
||
|
*/
|
||
|
function startGroup(name) {
|
||
|
command_1.issue('group', name);
|
||
|
}
|
||
|
exports.startGroup = startGroup;
|
||
|
/**
|
||
|
* End an output group.
|
||
|
*/
|
||
|
function endGroup() {
|
||
|
command_1.issue('endgroup');
|
||
|
}
|
||
|
exports.endGroup = endGroup;
|
||
|
/**
|
||
|
* Wrap an asynchronous function call in a group.
|
||
|
*
|
||
|
* Returns the same type as the function itself.
|
||
|
*
|
||
|
* @param name The name of the group
|
||
|
* @param fn The function to wrap in the group
|
||
|
*/
|
||
|
function group(name, fn) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
startGroup(name);
|
||
|
let result;
|
||
|
try {
|
||
|
result = yield fn();
|
||
|
}
|
||
|
finally {
|
||
|
endGroup();
|
||
|
}
|
||
|
return result;
|
||
|
});
|
||
|
}
|
||
|
exports.group = group;
|
||
|
//-----------------------------------------------------------------------
|
||
|
// Wrapper action state
|
||
|
//-----------------------------------------------------------------------
|
||
|
/**
|
||
|
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
||
|
*
|
||
|
* @param name name of the state to store
|
||
|
* @param value value to store
|
||
|
*/
|
||
|
function saveState(name, value) {
|
||
|
command_1.issueCommand('save-state', { name }, value);
|
||
|
}
|
||
|
exports.saveState = saveState;
|
||
|
/**
|
||
|
* Gets the value of an state set by this action's main execution.
|
||
|
*
|
||
|
* @param name name of the state to get
|
||
|
* @returns string
|
||
|
*/
|
||
|
function getState(name) {
|
||
|
return process.env[`STATE_${name}`] || '';
|
||
|
}
|
||
|
exports.getState = getState;
|
||
|
//# sourceMappingURL=core.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 532:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
result["default"] = mod;
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const path = __importStar(__webpack_require__(622));
|
||
|
/**
|
||
|
* Creates a specification for a set of files that will be downloaded
|
||
|
* @param artifactName the name of the artifact
|
||
|
* @param artifactEntries a set of container entries that describe that files that make up an artifact
|
||
|
* @param downloadPath the path where the artifact will be downloaded to
|
||
|
* @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to
|
||
|
*/
|
||
|
function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) {
|
||
|
const directories = new Set();
|
||
|
const specifications = {
|
||
|
rootDownloadLocation: includeRootDirectory
|
||
|
? path.join(downloadPath, artifactName)
|
||
|
: downloadPath,
|
||
|
directoryStructure: [],
|
||
|
filesToDownload: []
|
||
|
};
|
||
|
for (const entry of artifactEntries) {
|
||
|
// Ignore artifacts in the container that don't begin with the same name
|
||
|
if (entry.path.startsWith(`${artifactName}/`) ||
|
||
|
entry.path.startsWith(`${artifactName}\\`)) {
|
||
|
// normalize all separators to the local OS
|
||
|
const normalizedPathEntry = path.normalize(entry.path);
|
||
|
// entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path
|
||
|
const filePath = path.join(downloadPath, includeRootDirectory
|
||
|
? normalizedPathEntry
|
||
|
: normalizedPathEntry.replace(artifactName, ''));
|
||
|
// Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder'
|
||
|
// itemType cannot be relied upon. The file must be used to determine the directory structure
|
||
|
if (entry.itemType === 'file') {
|
||
|
// Get the directories that we need to create from the filePath for each individual file
|
||
|
directories.add(path.dirname(filePath));
|
||
|
specifications.filesToDownload.push({
|
||
|
sourceLocation: entry.contentLocation,
|
||
|
targetPath: filePath
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
specifications.directoryStructure = Array.from(directories);
|
||
|
return specifications;
|
||
|
}
|
||
|
exports.getDownloadSpecification = getDownloadSpecification;
|
||
|
//# sourceMappingURL=download-specification.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 539:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const url = __webpack_require__(835);
|
||
|
const http = __webpack_require__(605);
|
||
|
const https = __webpack_require__(211);
|
||
|
const pm = __webpack_require__(950);
|
||
|
let tunnel;
|
||
|
var HttpCodes;
|
||
|
(function (HttpCodes) {
|
||
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
||
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
||
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
||
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
||
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
||
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
||
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
||
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
||
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
||
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
||
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
||
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
||
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
||
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
||
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
||
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
||
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
||
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
||
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
||
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
||
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
||
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
||
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
||
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
||
|
var Headers;
|
||
|
(function (Headers) {
|
||
|
Headers["Accept"] = "accept";
|
||
|
Headers["ContentType"] = "content-type";
|
||
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
||
|
var MediaTypes;
|
||
|
(function (MediaTypes) {
|
||
|
MediaTypes["ApplicationJson"] = "application/json";
|
||
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
||
|
/**
|
||
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||
|
*/
|
||
|
function getProxyUrl(serverUrl) {
|
||
|
let proxyUrl = pm.getProxyUrl(url.parse(serverUrl));
|
||
|
return proxyUrl ? proxyUrl.href : '';
|
||
|
}
|
||
|
exports.getProxyUrl = getProxyUrl;
|
||
|
const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect];
|
||
|
const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout];
|
||
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||
|
const ExponentialBackoffCeiling = 10;
|
||
|
const ExponentialBackoffTimeSlice = 5;
|
||
|
class HttpClientResponse {
|
||
|
constructor(message) {
|
||
|
this.message = message;
|
||
|
}
|
||
|
readBody() {
|
||
|
return new Promise(async (resolve, reject) => {
|
||
|
let output = Buffer.alloc(0);
|
||
|
this.message.on('data', (chunk) => {
|
||
|
output = Buffer.concat([output, chunk]);
|
||
|
});
|
||
|
this.message.on('end', () => {
|
||
|
resolve(output.toString());
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
exports.HttpClientResponse = HttpClientResponse;
|
||
|
function isHttps(requestUrl) {
|
||
|
let parsedUrl = url.parse(requestUrl);
|
||
|
return parsedUrl.protocol === 'https:';
|
||
|
}
|
||
|
exports.isHttps = isHttps;
|
||
|
class HttpClient {
|
||
|
constructor(userAgent, handlers, requestOptions) {
|
||
|
this._ignoreSslError = false;
|
||
|
this._allowRedirects = true;
|
||
|
this._allowRedirectDowngrade = false;
|
||
|
this._maxRedirects = 50;
|
||
|
this._allowRetries = false;
|
||
|
this._maxRetries = 1;
|
||
|
this._keepAlive = false;
|
||
|
this._disposed = false;
|
||
|
this.userAgent = userAgent;
|
||
|
this.handlers = handlers || [];
|
||
|
this.requestOptions = requestOptions;
|
||
|
if (requestOptions) {
|
||
|
if (requestOptions.ignoreSslError != null) {
|
||
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
||
|
}
|
||
|
this._socketTimeout = requestOptions.socketTimeout;
|
||
|
if (requestOptions.allowRedirects != null) {
|
||
|
this._allowRedirects = requestOptions.allowRedirects;
|
||
|
}
|
||
|
if (requestOptions.allowRedirectDowngrade != null) {
|
||
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
||
|
}
|
||
|
if (requestOptions.maxRedirects != null) {
|
||
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
||
|
}
|
||
|
if (requestOptions.keepAlive != null) {
|
||
|
this._keepAlive = requestOptions.keepAlive;
|
||
|
}
|
||
|
if (requestOptions.allowRetries != null) {
|
||
|
this._allowRetries = requestOptions.allowRetries;
|
||
|
}
|
||
|
if (requestOptions.maxRetries != null) {
|
||
|
this._maxRetries = requestOptions.maxRetries;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
options(requestUrl, additionalHeaders) {
|
||
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
||
|
}
|
||
|
get(requestUrl, additionalHeaders) {
|
||
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
||
|
}
|
||
|
del(requestUrl, additionalHeaders) {
|
||
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
||
|
}
|
||
|
post(requestUrl, data, additionalHeaders) {
|
||
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
||
|
}
|
||
|
patch(requestUrl, data, additionalHeaders) {
|
||
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
||
|
}
|
||
|
put(requestUrl, data, additionalHeaders) {
|
||
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
||
|
}
|
||
|
head(requestUrl, additionalHeaders) {
|
||
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
||
|
}
|
||
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
||
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
||
|
}
|
||
|
/**
|
||
|
* Gets a typed object from an endpoint
|
||
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||
|
*/
|
||
|
async getJson(requestUrl, additionalHeaders = {}) {
|
||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||
|
let res = await this.get(requestUrl, additionalHeaders);
|
||
|
return this._processResponse(res, this.requestOptions);
|
||
|
}
|
||
|
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
||
|
let data = JSON.stringify(obj, null, 2);
|
||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||
|
let res = await this.post(requestUrl, data, additionalHeaders);
|
||
|
return this._processResponse(res, this.requestOptions);
|
||
|
}
|
||
|
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
||
|
let data = JSON.stringify(obj, null, 2);
|
||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||
|
let res = await this.put(requestUrl, data, additionalHeaders);
|
||
|
return this._processResponse(res, this.requestOptions);
|
||
|
}
|
||
|
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
||
|
let data = JSON.stringify(obj, null, 2);
|
||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||
|
let res = await this.patch(requestUrl, data, additionalHeaders);
|
||
|
return this._processResponse(res, this.requestOptions);
|
||
|
}
|
||
|
/**
|
||
|
* Makes a raw http request.
|
||
|
* All other methods such as get, post, patch, and request ultimately call this.
|
||
|
* Prefer get, del, post and patch
|
||
|
*/
|
||
|
async request(verb, requestUrl, data, headers) {
|
||
|
if (this._disposed) {
|
||
|
throw new Error("Client has already been disposed.");
|
||
|
}
|
||
|
let parsedUrl = url.parse(requestUrl);
|
||
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||
|
// Only perform retries on reads since writes may not be idempotent.
|
||
|
let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1;
|
||
|
let numTries = 0;
|
||
|
let response;
|
||
|
while (numTries < maxTries) {
|
||
|
response = await this.requestRaw(info, data);
|
||
|
// Check if it's an authentication challenge
|
||
|
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
||
|
let authenticationHandler;
|
||
|
for (let i = 0; i < this.handlers.length; i++) {
|
||
|
if (this.handlers[i].canHandleAuthentication(response)) {
|
||
|
authenticationHandler = this.handlers[i];
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
if (authenticationHandler) {
|
||
|
return authenticationHandler.handleAuthentication(this, info, data);
|
||
|
}
|
||
|
else {
|
||
|
// We have received an unauthorized response but have no handlers to handle it.
|
||
|
// Let the response return to the caller.
|
||
|
return response;
|
||
|
}
|
||
|
}
|
||
|
let redirectsRemaining = this._maxRedirects;
|
||
|
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1
|
||
|
&& this._allowRedirects
|
||
|
&& redirectsRemaining > 0) {
|
||
|
const redirectUrl = response.message.headers["location"];
|
||
|
if (!redirectUrl) {
|
||
|
// if there's no location to redirect to, we won't
|
||
|
break;
|
||
|
}
|
||
|
let parsedRedirectUrl = url.parse(redirectUrl);
|
||
|
if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) {
|
||
|
throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.");
|
||
|
}
|
||
|
// we need to finish reading the response before reassigning response
|
||
|
// which will leak the open socket.
|
||
|
await response.readBody();
|
||
|
// let's make the request with the new redirectUrl
|
||
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||
|
response = await this.requestRaw(info, data);
|
||
|
redirectsRemaining--;
|
||
|
}
|
||
|
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
|
||
|
// If not a retry code, return immediately instead of retrying
|
||
|
return response;
|
||
|
}
|
||
|
numTries += 1;
|
||
|
if (numTries < maxTries) {
|
||
|
await response.readBody();
|
||
|
await this._performExponentialBackoff(numTries);
|
||
|
}
|
||
|
}
|
||
|
return response;
|
||
|
}
|
||
|
/**
|
||
|
* Needs to be called if keepAlive is set to true in request options.
|
||
|
*/
|
||
|
dispose() {
|
||
|
if (this._agent) {
|
||
|
this._agent.destroy();
|
||
|
}
|
||
|
this._disposed = true;
|
||
|
}
|
||
|
/**
|
||
|
* Raw request.
|
||
|
* @param info
|
||
|
* @param data
|
||
|
*/
|
||
|
requestRaw(info, data) {
|
||
|
return new Promise((resolve, reject) => {
|
||
|
let callbackForResult = function (err, res) {
|
||
|
if (err) {
|
||
|
reject(err);
|
||
|
}
|
||
|
resolve(res);
|
||
|
};
|
||
|
this.requestRawWithCallback(info, data, callbackForResult);
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Raw request with callback.
|
||
|
* @param info
|
||
|
* @param data
|
||
|
* @param onResult
|
||
|
*/
|
||
|
requestRawWithCallback(info, data, onResult) {
|
||
|
let socket;
|
||
|
if (typeof (data) === 'string') {
|
||
|
info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8');
|
||
|
}
|
||
|
let callbackCalled = false;
|
||
|
let handleResult = (err, res) => {
|
||
|
if (!callbackCalled) {
|
||
|
callbackCalled = true;
|
||
|
onResult(err, res);
|
||
|
}
|
||
|
};
|
||
|
let req = info.httpModule.request(info.options, (msg) => {
|
||
|
let res = new HttpClientResponse(msg);
|
||
|
handleResult(null, res);
|
||
|
});
|
||
|
req.on('socket', (sock) => {
|
||
|
socket = sock;
|
||
|
});
|
||
|
// If we ever get disconnected, we want the socket to timeout eventually
|
||
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
||
|
if (socket) {
|
||
|
socket.end();
|
||
|
}
|
||
|
handleResult(new Error('Request timeout: ' + info.options.path), null);
|
||
|
});
|
||
|
req.on('error', function (err) {
|
||
|
// err has statusCode property
|
||
|
// res should have headers
|
||
|
handleResult(err, null);
|
||
|
});
|
||
|
if (data && typeof (data) === 'string') {
|
||
|
req.write(data, 'utf8');
|
||
|
}
|
||
|
if (data && typeof (data) !== 'string') {
|
||
|
data.on('close', function () {
|
||
|
req.end();
|
||
|
});
|
||
|
data.pipe(req);
|
||
|
}
|
||
|
else {
|
||
|
req.end();
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
||
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||
|
*/
|
||
|
getAgent(serverUrl) {
|
||
|
let parsedUrl = url.parse(serverUrl);
|
||
|
return this._getAgent(parsedUrl);
|
||
|
}
|
||
|
_prepareRequest(method, requestUrl, headers) {
|
||
|
const info = {};
|
||
|
info.parsedUrl = requestUrl;
|
||
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
||
|
info.httpModule = usingSsl ? https : http;
|
||
|
const defaultPort = usingSsl ? 443 : 80;
|
||
|
info.options = {};
|
||
|
info.options.host = info.parsedUrl.hostname;
|
||
|
info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort;
|
||
|
info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||
|
info.options.method = method;
|
||
|
info.options.headers = this._mergeHeaders(headers);
|
||
|
if (this.userAgent != null) {
|
||
|
info.options.headers["user-agent"] = this.userAgent;
|
||
|
}
|
||
|
info.options.agent = this._getAgent(info.parsedUrl);
|
||
|
// gives handlers an opportunity to participate
|
||
|
if (this.handlers) {
|
||
|
this.handlers.forEach((handler) => {
|
||
|
handler.prepareRequest(info.options);
|
||
|
});
|
||
|
}
|
||
|
return info;
|
||
|
}
|
||
|
_mergeHeaders(headers) {
|
||
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
||
|
if (this.requestOptions && this.requestOptions.headers) {
|
||
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
||
|
}
|
||
|
return lowercaseKeys(headers || {});
|
||
|
}
|
||
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
||
|
let clientHeader;
|
||
|
if (this.requestOptions && this.requestOptions.headers) {
|
||
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||
|
}
|
||
|
return additionalHeaders[header] || clientHeader || _default;
|
||
|
}
|
||
|
_getAgent(parsedUrl) {
|
||
|
let agent;
|
||
|
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
||
|
let useProxy = proxyUrl && proxyUrl.hostname;
|
||
|
if (this._keepAlive && useProxy) {
|
||
|
agent = this._proxyAgent;
|
||
|
}
|
||
|
if (this._keepAlive && !useProxy) {
|
||
|
agent = this._agent;
|
||
|
}
|
||
|
// if agent is already assigned use that agent.
|
||
|
if (!!agent) {
|
||
|
return agent;
|
||
|
}
|
||
|
const usingSsl = parsedUrl.protocol === 'https:';
|
||
|
let maxSockets = 100;
|
||
|
if (!!this.requestOptions) {
|
||
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
||
|
}
|
||
|
if (useProxy) {
|
||
|
// If using proxy, need tunnel
|
||
|
if (!tunnel) {
|
||
|
tunnel = __webpack_require__(413);
|
||
|
}
|
||
|
const agentOptions = {
|
||
|
maxSockets: maxSockets,
|
||
|
keepAlive: this._keepAlive,
|
||
|
proxy: {
|
||
|
proxyAuth: proxyUrl.auth,
|
||
|
host: proxyUrl.hostname,
|
||
|
port: proxyUrl.port
|
||
|
},
|
||
|
};
|
||
|
let tunnelAgent;
|
||
|
const overHttps = proxyUrl.protocol === 'https:';
|
||
|
if (usingSsl) {
|
||
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
||
|
}
|
||
|
else {
|
||
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
||
|
}
|
||
|
agent = tunnelAgent(agentOptions);
|
||
|
this._proxyAgent = agent;
|
||
|
}
|
||
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
||
|
if (this._keepAlive && !agent) {
|
||
|
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
|
||
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
||
|
this._agent = agent;
|
||
|
}
|
||
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
||
|
if (!agent) {
|
||
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
||
|
}
|
||
|
if (usingSsl && this._ignoreSslError) {
|
||
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||
|
// we have to cast it to any and change it directly
|
||
|
agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false });
|
||
|
}
|
||
|
return agent;
|
||
|
}
|
||
|
_performExponentialBackoff(retryNumber) {
|
||
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
||
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
||
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
||
|
}
|
||
|
static dateTimeDeserializer(key, value) {
|
||
|
if (typeof value === 'string') {
|
||
|
let a = new Date(value);
|
||
|
if (!isNaN(a.valueOf())) {
|
||
|
return a;
|
||
|
}
|
||
|
}
|
||
|
return value;
|
||
|
}
|
||
|
async _processResponse(res, options) {
|
||
|
return new Promise(async (resolve, reject) => {
|
||
|
const statusCode = res.message.statusCode;
|
||
|
const response = {
|
||
|
statusCode: statusCode,
|
||
|
result: null,
|
||
|
headers: {}
|
||
|
};
|
||
|
// not found leads to null obj returned
|
||
|
if (statusCode == HttpCodes.NotFound) {
|
||
|
resolve(response);
|
||
|
}
|
||
|
let obj;
|
||
|
let contents;
|
||
|
// get the result from the body
|
||
|
try {
|
||
|
contents = await res.readBody();
|
||
|
if (contents && contents.length > 0) {
|
||
|
if (options && options.deserializeDates) {
|
||
|
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
|
||
|
}
|
||
|
else {
|
||
|
obj = JSON.parse(contents);
|
||
|
}
|
||
|
response.result = obj;
|
||
|
}
|
||
|
response.headers = res.message.headers;
|
||
|
}
|
||
|
catch (err) {
|
||
|
// Invalid resource (contents not json); leaving result obj null
|
||
|
}
|
||
|
// note that 3xx redirects are handled by the http layer.
|
||
|
if (statusCode > 299) {
|
||
|
let msg;
|
||
|
// if exception/error in body, attempt to get better error
|
||
|
if (obj && obj.message) {
|
||
|
msg = obj.message;
|
||
|
}
|
||
|
else if (contents && contents.length > 0) {
|
||
|
// it may be the case that the exception is in the body message as string
|
||
|
msg = contents;
|
||
|
}
|
||
|
else {
|
||
|
msg = "Failed request: (" + statusCode + ")";
|
||
|
}
|
||
|
let err = new Error(msg);
|
||
|
// attach statusCode and body obj (if available) to the error object
|
||
|
err['statusCode'] = statusCode;
|
||
|
if (response.result) {
|
||
|
err['result'] = response.result;
|
||
|
}
|
||
|
reject(err);
|
||
|
}
|
||
|
else {
|
||
|
resolve(response);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
exports.HttpClient = HttpClient;
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 569:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
module.exports = rimraf
|
||
|
rimraf.sync = rimrafSync
|
||
|
|
||
|
var assert = __webpack_require__(357)
|
||
|
var path = __webpack_require__(622)
|
||
|
var fs = __webpack_require__(747)
|
||
|
var glob = __webpack_require__(120)
|
||
|
var _0666 = parseInt('666', 8)
|
||
|
|
||
|
var defaultGlobOpts = {
|
||
|
nosort: true,
|
||
|
silent: true
|
||
|
}
|
||
|
|
||
|
// for EMFILE handling
|
||
|
var timeout = 0
|
||
|
|
||
|
var isWindows = (process.platform === "win32")
|
||
|
|
||
|
function defaults (options) {
|
||
|
var methods = [
|
||
|
'unlink',
|
||
|
'chmod',
|
||
|
'stat',
|
||
|
'lstat',
|
||
|
'rmdir',
|
||
|
'readdir'
|
||
|
]
|
||
|
methods.forEach(function(m) {
|
||
|
options[m] = options[m] || fs[m]
|
||
|
m = m + 'Sync'
|
||
|
options[m] = options[m] || fs[m]
|
||
|
})
|
||
|
|
||
|
options.maxBusyTries = options.maxBusyTries || 3
|
||
|
options.emfileWait = options.emfileWait || 1000
|
||
|
if (options.glob === false) {
|
||
|
options.disableGlob = true
|
||
|
}
|
||
|
options.disableGlob = options.disableGlob || false
|
||
|
options.glob = options.glob || defaultGlobOpts
|
||
|
}
|
||
|
|
||
|
function rimraf (p, options, cb) {
|
||
|
if (typeof options === 'function') {
|
||
|
cb = options
|
||
|
options = {}
|
||
|
}
|
||
|
|
||
|
assert(p, 'rimraf: missing path')
|
||
|
assert.equal(typeof p, 'string', 'rimraf: path should be a string')
|
||
|
assert.equal(typeof cb, 'function', 'rimraf: callback function required')
|
||
|
assert(options, 'rimraf: invalid options argument provided')
|
||
|
assert.equal(typeof options, 'object', 'rimraf: options should be object')
|
||
|
|
||
|
defaults(options)
|
||
|
|
||
|
var busyTries = 0
|
||
|
var errState = null
|
||
|
var n = 0
|
||
|
|
||
|
if (options.disableGlob || !glob.hasMagic(p))
|
||
|
return afterGlob(null, [p])
|
||
|
|
||
|
options.lstat(p, function (er, stat) {
|
||
|
if (!er)
|
||
|
return afterGlob(null, [p])
|
||
|
|
||
|
glob(p, options.glob, afterGlob)
|
||
|
})
|
||
|
|
||
|
function next (er) {
|
||
|
errState = errState || er
|
||
|
if (--n === 0)
|
||
|
cb(errState)
|
||
|
}
|
||
|
|
||
|
function afterGlob (er, results) {
|
||
|
if (er)
|
||
|
return cb(er)
|
||
|
|
||
|
n = results.length
|
||
|
if (n === 0)
|
||
|
return cb()
|
||
|
|
||
|
results.forEach(function (p) {
|
||
|
rimraf_(p, options, function CB (er) {
|
||
|
if (er) {
|
||
|
if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") &&
|
||
|
busyTries < options.maxBusyTries) {
|
||
|
busyTries ++
|
||
|
var time = busyTries * 100
|
||
|
// try again, with the same exact callback as this one.
|
||
|
return setTimeout(function () {
|
||
|
rimraf_(p, options, CB)
|
||
|
}, time)
|
||
|
}
|
||
|
|
||
|
// this one won't happen if graceful-fs is used.
|
||
|
if (er.code === "EMFILE" && timeout < options.emfileWait) {
|
||
|
return setTimeout(function () {
|
||
|
rimraf_(p, options, CB)
|
||
|
}, timeout ++)
|
||
|
}
|
||
|
|
||
|
// already gone
|
||
|
if (er.code === "ENOENT") er = null
|
||
|
}
|
||
|
|
||
|
timeout = 0
|
||
|
next(er)
|
||
|
})
|
||
|
})
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// Two possible strategies.
|
||
|
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
|
||
|
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
|
||
|
//
|
||
|
// Both result in an extra syscall when you guess wrong. However, there
|
||
|
// are likely far more normal files in the world than directories. This
|
||
|
// is based on the assumption that a the average number of files per
|
||
|
// directory is >= 1.
|
||
|
//
|
||
|
// If anyone ever complains about this, then I guess the strategy could
|
||
|
// be made configurable somehow. But until then, YAGNI.
|
||
|
function rimraf_ (p, options, cb) {
|
||
|
assert(p)
|
||
|
assert(options)
|
||
|
assert(typeof cb === 'function')
|
||
|
|
||
|
// sunos lets the root user unlink directories, which is... weird.
|
||
|
// so we have to lstat here and make sure it's not a dir.
|
||
|
options.lstat(p, function (er, st) {
|
||
|
if (er && er.code === "ENOENT")
|
||
|
return cb(null)
|
||
|
|
||
|
// Windows can EPERM on stat. Life is suffering.
|
||
|
if (er && er.code === "EPERM" && isWindows)
|
||
|
fixWinEPERM(p, options, er, cb)
|
||
|
|
||
|
if (st && st.isDirectory())
|
||
|
return rmdir(p, options, er, cb)
|
||
|
|
||
|
options.unlink(p, function (er) {
|
||
|
if (er) {
|
||
|
if (er.code === "ENOENT")
|
||
|
return cb(null)
|
||
|
if (er.code === "EPERM")
|
||
|
return (isWindows)
|
||
|
? fixWinEPERM(p, options, er, cb)
|
||
|
: rmdir(p, options, er, cb)
|
||
|
if (er.code === "EISDIR")
|
||
|
return rmdir(p, options, er, cb)
|
||
|
}
|
||
|
return cb(er)
|
||
|
})
|
||
|
})
|
||
|
}
|
||
|
|
||
|
function fixWinEPERM (p, options, er, cb) {
|
||
|
assert(p)
|
||
|
assert(options)
|
||
|
assert(typeof cb === 'function')
|
||
|
if (er)
|
||
|
assert(er instanceof Error)
|
||
|
|
||
|
options.chmod(p, _0666, function (er2) {
|
||
|
if (er2)
|
||
|
cb(er2.code === "ENOENT" ? null : er)
|
||
|
else
|
||
|
options.stat(p, function(er3, stats) {
|
||
|
if (er3)
|
||
|
cb(er3.code === "ENOENT" ? null : er)
|
||
|
else if (stats.isDirectory())
|
||
|
rmdir(p, options, er, cb)
|
||
|
else
|
||
|
options.unlink(p, cb)
|
||
|
})
|
||
|
})
|
||
|
}
|
||
|
|
||
|
function fixWinEPERMSync (p, options, er) {
|
||
|
assert(p)
|
||
|
assert(options)
|
||
|
if (er)
|
||
|
assert(er instanceof Error)
|
||
|
|
||
|
try {
|
||
|
options.chmodSync(p, _0666)
|
||
|
} catch (er2) {
|
||
|
if (er2.code === "ENOENT")
|
||
|
return
|
||
|
else
|
||
|
throw er
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
var stats = options.statSync(p)
|
||
|
} catch (er3) {
|
||
|
if (er3.code === "ENOENT")
|
||
|
return
|
||
|
else
|
||
|
throw er
|
||
|
}
|
||
|
|
||
|
if (stats.isDirectory())
|
||
|
rmdirSync(p, options, er)
|
||
|
else
|
||
|
options.unlinkSync(p)
|
||
|
}
|
||
|
|
||
|
function rmdir (p, options, originalEr, cb) {
|
||
|
assert(p)
|
||
|
assert(options)
|
||
|
if (originalEr)
|
||
|
assert(originalEr instanceof Error)
|
||
|
assert(typeof cb === 'function')
|
||
|
|
||
|
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
|
||
|
// if we guessed wrong, and it's not a directory, then
|
||
|
// raise the original error.
|
||
|
options.rmdir(p, function (er) {
|
||
|
if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM"))
|
||
|
rmkids(p, options, cb)
|
||
|
else if (er && er.code === "ENOTDIR")
|
||
|
cb(originalEr)
|
||
|
else
|
||
|
cb(er)
|
||
|
})
|
||
|
}
|
||
|
|
||
|
function rmkids(p, options, cb) {
|
||
|
assert(p)
|
||
|
assert(options)
|
||
|
assert(typeof cb === 'function')
|
||
|
|
||
|
options.readdir(p, function (er, files) {
|
||
|
if (er)
|
||
|
return cb(er)
|
||
|
var n = files.length
|
||
|
if (n === 0)
|
||
|
return options.rmdir(p, cb)
|
||
|
var errState
|
||
|
files.forEach(function (f) {
|
||
|
rimraf(path.join(p, f), options, function (er) {
|
||
|
if (errState)
|
||
|
return
|
||
|
if (er)
|
||
|
return cb(errState = er)
|
||
|
if (--n === 0)
|
||
|
options.rmdir(p, cb)
|
||
|
})
|
||
|
})
|
||
|
})
|
||
|
}
|
||
|
|
||
|
// this looks simpler, and is strictly *faster*, but will
|
||
|
// tie up the JavaScript thread and fail on excessively
|
||
|
// deep directory trees.
|
||
|
function rimrafSync (p, options) {
|
||
|
options = options || {}
|
||
|
defaults(options)
|
||
|
|
||
|
assert(p, 'rimraf: missing path')
|
||
|
assert.equal(typeof p, 'string', 'rimraf: path should be a string')
|
||
|
assert(options, 'rimraf: missing options')
|
||
|
assert.equal(typeof options, 'object', 'rimraf: options should be object')
|
||
|
|
||
|
var results
|
||
|
|
||
|
if (options.disableGlob || !glob.hasMagic(p)) {
|
||
|
results = [p]
|
||
|
} else {
|
||
|
try {
|
||
|
options.lstatSync(p)
|
||
|
results = [p]
|
||
|
} catch (er) {
|
||
|
results = glob.sync(p, options.glob)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (!results.length)
|
||
|
return
|
||
|
|
||
|
for (var i = 0; i < results.length; i++) {
|
||
|
var p = results[i]
|
||
|
|
||
|
try {
|
||
|
var st = options.lstatSync(p)
|
||
|
} catch (er) {
|
||
|
if (er.code === "ENOENT")
|
||
|
return
|
||
|
|
||
|
// Windows can EPERM on stat. Life is suffering.
|
||
|
if (er.code === "EPERM" && isWindows)
|
||
|
fixWinEPERMSync(p, options, er)
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
// sunos lets the root user unlink directories, which is... weird.
|
||
|
if (st && st.isDirectory())
|
||
|
rmdirSync(p, options, null)
|
||
|
else
|
||
|
options.unlinkSync(p)
|
||
|
} catch (er) {
|
||
|
if (er.code === "ENOENT")
|
||
|
return
|
||
|
if (er.code === "EPERM")
|
||
|
return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
|
||
|
if (er.code !== "EISDIR")
|
||
|
throw er
|
||
|
|
||
|
rmdirSync(p, options, er)
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function rmdirSync (p, options, originalEr) {
|
||
|
assert(p)
|
||
|
assert(options)
|
||
|
if (originalEr)
|
||
|
assert(originalEr instanceof Error)
|
||
|
|
||
|
try {
|
||
|
options.rmdirSync(p)
|
||
|
} catch (er) {
|
||
|
if (er.code === "ENOENT")
|
||
|
return
|
||
|
if (er.code === "ENOTDIR")
|
||
|
throw originalEr
|
||
|
if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")
|
||
|
rmkidsSync(p, options)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function rmkidsSync (p, options) {
|
||
|
assert(p)
|
||
|
assert(options)
|
||
|
options.readdirSync(p).forEach(function (f) {
|
||
|
rimrafSync(path.join(p, f), options)
|
||
|
})
|
||
|
|
||
|
// We only end up here once we got ENOTEMPTY at least once, and
|
||
|
// at this point, we are guaranteed to have removed all the kids.
|
||
|
// So, we know that it won't be ENOENT or ENOTDIR or anything else.
|
||
|
// try really hard to delete stuff on windows, because it has a
|
||
|
// PROFOUNDLY annoying habit of not closing handles promptly when
|
||
|
// files are deleted, resulting in spurious ENOTEMPTY errors.
|
||
|
var retries = isWindows ? 100 : 1
|
||
|
var i = 0
|
||
|
do {
|
||
|
var threw = true
|
||
|
try {
|
||
|
var ret = options.rmdirSync(p, options)
|
||
|
threw = false
|
||
|
return ret
|
||
|
} finally {
|
||
|
if (++i < retries && threw)
|
||
|
continue
|
||
|
}
|
||
|
} while (true)
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 575:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
result["default"] = mod;
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const glob = __importStar(__webpack_require__(281));
|
||
|
const core_1 = __webpack_require__(470);
|
||
|
const fs_1 = __webpack_require__(747);
|
||
|
const path_1 = __webpack_require__(622);
|
||
|
function getDefaultGlobOptions() {
|
||
|
return {
|
||
|
followSymbolicLinks: true,
|
||
|
implicitDescendants: true,
|
||
|
omitBrokenSymbolicLinks: true
|
||
|
};
|
||
|
}
|
||
|
function findFilesToUpload(searchPath, globOptions) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const searchResults = [];
|
||
|
const globber = yield glob.create(searchPath, globOptions || getDefaultGlobOptions());
|
||
|
const rawSearchResults = yield globber.glob();
|
||
|
/*
|
||
|
Directories will be rejected if attempted to be uploaded. This includes just empty
|
||
|
directories so filter any directories out from the raw search results
|
||
|
*/
|
||
|
for (const searchResult of rawSearchResults) {
|
||
|
if (!fs_1.lstatSync(searchResult).isDirectory()) {
|
||
|
core_1.debug(`File:${searchResult} was found using the provided searchPath`);
|
||
|
searchResults.push(searchResult);
|
||
|
}
|
||
|
else {
|
||
|
core_1.debug(`Removing ${searchResult} from rawSearchResults because it is a directory`);
|
||
|
}
|
||
|
}
|
||
|
/*
|
||
|
Only a single search pattern is being included so only 1 searchResult is expected. In the future if multiple search patterns are
|
||
|
simultaneously supported this will change
|
||
|
*/
|
||
|
const searchPaths = globber.getSearchPaths();
|
||
|
if (searchPaths.length > 1) {
|
||
|
throw new Error('Only 1 search path should be returned');
|
||
|
}
|
||
|
/*
|
||
|
Special case for a single file artifact that is uploaded without a directory or wildcard pattern. The directory structure is
|
||
|
not preserved and the root directory will be the single files parent directory
|
||
|
*/
|
||
|
if (searchResults.length === 1 && searchPaths[0] === searchResults[0]) {
|
||
|
return {
|
||
|
filesToUpload: searchResults,
|
||
|
rootDirectory: path_1.dirname(searchResults[0])
|
||
|
};
|
||
|
}
|
||
|
return {
|
||
|
filesToUpload: searchResults,
|
||
|
rootDirectory: searchPaths[0]
|
||
|
};
|
||
|
});
|
||
|
}
|
||
|
exports.findFilesToUpload = findFilesToUpload;
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 590:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
result["default"] = mod;
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const fs = __importStar(__webpack_require__(747));
|
||
|
const core_1 = __webpack_require__(470);
|
||
|
const path_1 = __webpack_require__(622);
|
||
|
const utils_1 = __webpack_require__(870);
|
||
|
/**
|
||
|
* Creates a specification that describes how each file that is part of the artifact will be uploaded
|
||
|
* @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server
|
||
|
* @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file
|
||
|
* @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact
|
||
|
*/
|
||
|
function getUploadSpecification(artifactName, rootDirectory, artifactFiles) {
|
||
|
utils_1.checkArtifactName(artifactName);
|
||
|
const specifications = [];
|
||
|
if (!fs.existsSync(rootDirectory)) {
|
||
|
throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`);
|
||
|
}
|
||
|
if (!fs.lstatSync(rootDirectory).isDirectory()) {
|
||
|
throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`);
|
||
|
}
|
||
|
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||
|
rootDirectory = path_1.normalize(rootDirectory);
|
||
|
rootDirectory = path_1.resolve(rootDirectory);
|
||
|
/*
|
||
|
Example to demonstrate behavior
|
||
|
|
||
|
Input:
|
||
|
artifactName: my-artifact
|
||
|
rootDirectory: '/home/user/files/plz-upload'
|
||
|
artifactFiles: [
|
||
|
'/home/user/files/plz-upload/file1.txt',
|
||
|
'/home/user/files/plz-upload/file2.txt',
|
||
|
'/home/user/files/plz-upload/dir/file3.txt'
|
||
|
]
|
||
|
|
||
|
Output:
|
||
|
specifications: [
|
||
|
['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'],
|
||
|
['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'],
|
||
|
['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt']
|
||
|
]
|
||
|
*/
|
||
|
for (let file of artifactFiles) {
|
||
|
if (!fs.existsSync(file)) {
|
||
|
throw new Error(`File ${file} does not exist`);
|
||
|
}
|
||
|
if (!fs.lstatSync(file).isDirectory()) {
|
||
|
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||
|
file = path_1.normalize(file);
|
||
|
file = path_1.resolve(file);
|
||
|
if (!file.startsWith(rootDirectory)) {
|
||
|
throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`);
|
||
|
}
|
||
|
// Check for forbidden characters in file paths that will be rejected during upload
|
||
|
const uploadPath = file.replace(rootDirectory, '');
|
||
|
utils_1.checkArtifactFilePath(uploadPath);
|
||
|
/*
|
||
|
uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all
|
||
|
be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
|
||
|
|
||
|
path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt
|
||
|
join('artifact-name/', 'file-to-upload.txt')
|
||
|
join('artifact-name/', '/file-to-upload.txt')
|
||
|
join('artifact-name', 'file-to-upload.txt')
|
||
|
join('artifact-name', '/file-to-upload.txt')
|
||
|
*/
|
||
|
specifications.push({
|
||
|
absoluteFilePath: file,
|
||
|
uploadFilePath: path_1.join(artifactName, uploadPath)
|
||
|
});
|
||
|
}
|
||
|
else {
|
||
|
// Directories are rejected by the server during upload
|
||
|
core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`);
|
||
|
}
|
||
|
}
|
||
|
return specifications;
|
||
|
}
|
||
|
exports.getUploadSpecification = getUploadSpecification;
|
||
|
//# sourceMappingURL=upload-specification.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 597:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const pathHelper = __webpack_require__(972);
|
||
|
const internal_match_kind_1 = __webpack_require__(327);
|
||
|
const IS_WINDOWS = process.platform === 'win32';
|
||
|
/**
|
||
|
* Given an array of patterns, returns an array of paths to search.
|
||
|
* Duplicates and paths under other included paths are filtered out.
|
||
|
*/
|
||
|
function getSearchPaths(patterns) {
|
||
|
// Ignore negate patterns
|
||
|
patterns = patterns.filter(x => !x.negate);
|
||
|
// Create a map of all search paths
|
||
|
const searchPathMap = {};
|
||
|
for (const pattern of patterns) {
|
||
|
const key = IS_WINDOWS
|
||
|
? pattern.searchPath.toUpperCase()
|
||
|
: pattern.searchPath;
|
||
|
searchPathMap[key] = 'candidate';
|
||
|
}
|
||
|
const result = [];
|
||
|
for (const pattern of patterns) {
|
||
|
// Check if already included
|
||
|
const key = IS_WINDOWS
|
||
|
? pattern.searchPath.toUpperCase()
|
||
|
: pattern.searchPath;
|
||
|
if (searchPathMap[key] === 'included') {
|
||
|
continue;
|
||
|
}
|
||
|
// Check for an ancestor search path
|
||
|
let foundAncestor = false;
|
||
|
let tempKey = key;
|
||
|
let parent = pathHelper.dirname(tempKey);
|
||
|
while (parent !== tempKey) {
|
||
|
if (searchPathMap[parent]) {
|
||
|
foundAncestor = true;
|
||
|
break;
|
||
|
}
|
||
|
tempKey = parent;
|
||
|
parent = pathHelper.dirname(tempKey);
|
||
|
}
|
||
|
// Include the search pattern in the result
|
||
|
if (!foundAncestor) {
|
||
|
result.push(pattern.searchPath);
|
||
|
searchPathMap[key] = 'included';
|
||
|
}
|
||
|
}
|
||
|
return result;
|
||
|
}
|
||
|
exports.getSearchPaths = getSearchPaths;
|
||
|
/**
|
||
|
* Matches the patterns against the path
|
||
|
*/
|
||
|
function match(patterns, itemPath) {
|
||
|
let result = internal_match_kind_1.MatchKind.None;
|
||
|
for (const pattern of patterns) {
|
||
|
if (pattern.negate) {
|
||
|
result &= ~pattern.match(itemPath);
|
||
|
}
|
||
|
else {
|
||
|
result |= pattern.match(itemPath);
|
||
|
}
|
||
|
}
|
||
|
return result;
|
||
|
}
|
||
|
exports.match = match;
|
||
|
/**
|
||
|
* Checks whether to descend further into the directory
|
||
|
*/
|
||
|
function partialMatch(patterns, itemPath) {
|
||
|
return patterns.some(x => !x.negate && x.partialMatch(itemPath));
|
||
|
}
|
||
|
exports.partialMatch = partialMatch;
|
||
|
//# sourceMappingURL=internal-pattern-helper.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 601:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const core = __webpack_require__(470);
|
||
|
/**
|
||
|
* Returns a copy with defaults filled in.
|
||
|
*/
|
||
|
function getOptions(copy) {
|
||
|
const result = {
|
||
|
followSymbolicLinks: true,
|
||
|
implicitDescendants: true,
|
||
|
omitBrokenSymbolicLinks: true
|
||
|
};
|
||
|
if (copy) {
|
||
|
if (typeof copy.followSymbolicLinks === 'boolean') {
|
||
|
result.followSymbolicLinks = copy.followSymbolicLinks;
|
||
|
core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);
|
||
|
}
|
||
|
if (typeof copy.implicitDescendants === 'boolean') {
|
||
|
result.implicitDescendants = copy.implicitDescendants;
|
||
|
core.debug(`implicitDescendants '${result.implicitDescendants}'`);
|
||
|
}
|
||
|
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
||
|
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
||
|
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
||
|
}
|
||
|
}
|
||
|
return result;
|
||
|
}
|
||
|
exports.getOptions = getOptions;
|
||
|
//# sourceMappingURL=internal-glob-options-helper.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 605:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("http");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 608:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
result["default"] = mod;
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const fs = __importStar(__webpack_require__(747));
|
||
|
const tmp = __importStar(__webpack_require__(875));
|
||
|
const stream = __importStar(__webpack_require__(794));
|
||
|
const utils_1 = __webpack_require__(870);
|
||
|
const config_variables_1 = __webpack_require__(401);
|
||
|
const util_1 = __webpack_require__(669);
|
||
|
const url_1 = __webpack_require__(835);
|
||
|
const perf_hooks_1 = __webpack_require__(630);
|
||
|
const upload_status_reporter_1 = __webpack_require__(221);
|
||
|
const core_1 = __webpack_require__(470);
|
||
|
const http_manager_1 = __webpack_require__(452);
|
||
|
const upload_gzip_1 = __webpack_require__(647);
|
||
|
const stat = util_1.promisify(fs.stat);
|
||
|
class UploadHttpClient {
|
||
|
constructor() {
|
||
|
this.uploadHttpManager = new http_manager_1.HttpManager(config_variables_1.getUploadFileConcurrency());
|
||
|
this.statusReporter = new upload_status_reporter_1.UploadStatusReporter();
|
||
|
}
|
||
|
/**
|
||
|
* Creates a file container for the new artifact in the remote blob storage/file service
|
||
|
* @param {string} artifactName Name of the artifact being created
|
||
|
* @returns The response from the Artifact Service if the file container was successfully created
|
||
|
*/
|
||
|
createArtifactInFileContainer(artifactName) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const parameters = {
|
||
|
Type: 'actions_storage',
|
||
|
Name: artifactName
|
||
|
};
|
||
|
const data = JSON.stringify(parameters, null, 2);
|
||
|
const artifactUrl = utils_1.getArtifactUrl();
|
||
|
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediatly
|
||
|
const client = this.uploadHttpManager.getClient(0);
|
||
|
const requestOptions = utils_1.getRequestOptions('application/json', false, false);
|
||
|
const rawResponse = yield client.post(artifactUrl, data, requestOptions);
|
||
|
const body = yield rawResponse.readBody();
|
||
|
if (utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||
|
return JSON.parse(body);
|
||
|
}
|
||
|
else {
|
||
|
// eslint-disable-next-line no-console
|
||
|
console.log(rawResponse);
|
||
|
throw new Error(`Unable to create a container for the artifact ${artifactName}`);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Concurrently upload all of the files in chunks
|
||
|
* @param {string} uploadUrl Base Url for the artifact that was created
|
||
|
* @param {SearchResult[]} filesToUpload A list of information about the files being uploaded
|
||
|
* @returns The size of all the files uploaded in bytes
|
||
|
*/
|
||
|
uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const FILE_CONCURRENCY = config_variables_1.getUploadFileConcurrency();
|
||
|
const MAX_CHUNK_SIZE = config_variables_1.getUploadChunkSize();
|
||
|
core_1.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`);
|
||
|
const parameters = [];
|
||
|
// by default, file uploads will continue if there is an error unless specified differently in the options
|
||
|
let continueOnError = true;
|
||
|
if (options) {
|
||
|
if (options.continueOnError === false) {
|
||
|
continueOnError = false;
|
||
|
}
|
||
|
}
|
||
|
// prepare the necessary parameters to upload all the files
|
||
|
for (const file of filesToUpload) {
|
||
|
const resourceUrl = new url_1.URL(uploadUrl);
|
||
|
resourceUrl.searchParams.append('itemPath', file.uploadFilePath);
|
||
|
parameters.push({
|
||
|
file: file.absoluteFilePath,
|
||
|
resourceUrl: resourceUrl.toString(),
|
||
|
maxChunkSize: MAX_CHUNK_SIZE,
|
||
|
continueOnError
|
||
|
});
|
||
|
}
|
||
|
const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()];
|
||
|
const failedItemsToReport = [];
|
||
|
let currentFile = 0;
|
||
|
let completedFiles = 0;
|
||
|
let uploadFileSize = 0;
|
||
|
let totalFileSize = 0;
|
||
|
let abortPendingFileUploads = false;
|
||
|
this.statusReporter.setTotalNumberOfFilesToUpload(filesToUpload.length);
|
||
|
this.statusReporter.start();
|
||
|
// only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors
|
||
|
yield Promise.all(parallelUploads.map((index) => __awaiter(this, void 0, void 0, function* () {
|
||
|
while (currentFile < filesToUpload.length) {
|
||
|
const currentFileParameters = parameters[currentFile];
|
||
|
currentFile += 1;
|
||
|
if (abortPendingFileUploads) {
|
||
|
failedItemsToReport.push(currentFileParameters.file);
|
||
|
continue;
|
||
|
}
|
||
|
const startTime = perf_hooks_1.performance.now();
|
||
|
const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters);
|
||
|
core_1.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`);
|
||
|
uploadFileSize += uploadFileResult.successfullUploadSize;
|
||
|
totalFileSize += uploadFileResult.totalSize;
|
||
|
if (uploadFileResult.isSuccess === false) {
|
||
|
failedItemsToReport.push(currentFileParameters.file);
|
||
|
if (!continueOnError) {
|
||
|
// existing uploads will be able to finish however all pending uploads will fail fast
|
||
|
abortPendingFileUploads = true;
|
||
|
}
|
||
|
}
|
||
|
this.statusReporter.incrementProcessedCount();
|
||
|
}
|
||
|
})));
|
||
|
this.statusReporter.stop();
|
||
|
// done uploading, safety dispose all connections
|
||
|
this.uploadHttpManager.disposeAndReplaceAllClients();
|
||
|
core_1.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`);
|
||
|
return {
|
||
|
uploadSize: uploadFileSize,
|
||
|
totalSize: totalFileSize,
|
||
|
failedItems: failedItemsToReport
|
||
|
};
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space.
|
||
|
* If the upload file is bigger than the max chunk size it will be uploaded via multiple calls
|
||
|
* @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls
|
||
|
* @param {UploadFileParameters} parameters Information about the file that needs to be uploaded
|
||
|
* @returns The size of the file that was uploaded in bytes along with any failed uploads
|
||
|
*/
|
||
|
uploadFileAsync(httpClientIndex, parameters) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const totalFileSize = (yield stat(parameters.file)).size;
|
||
|
let offset = 0;
|
||
|
let isUploadSuccessful = true;
|
||
|
let failedChunkSizes = 0;
|
||
|
let uploadFileSize = 0;
|
||
|
let isGzip = true;
|
||
|
// the file that is being uploaded is less than 64k in size, to increase thoroughput and to minimize disk I/O
|
||
|
// for creating a new GZip file, an in-memory buffer is used for compression
|
||
|
if (totalFileSize < 65536) {
|
||
|
const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file);
|
||
|
let uploadStream;
|
||
|
if (totalFileSize < buffer.byteLength) {
|
||
|
// compression did not help with reducing the size, use a readable stream from the original file for upload
|
||
|
uploadStream = fs.createReadStream(parameters.file);
|
||
|
isGzip = false;
|
||
|
uploadFileSize = totalFileSize;
|
||
|
}
|
||
|
else {
|
||
|
// create a readable stream using a PassThrough stream that is both readable and writable
|
||
|
const passThrough = new stream.PassThrough();
|
||
|
passThrough.end(buffer);
|
||
|
uploadStream = passThrough;
|
||
|
uploadFileSize = buffer.byteLength;
|
||
|
}
|
||
|
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, uploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize);
|
||
|
if (!result) {
|
||
|
// chunk failed to upload
|
||
|
isUploadSuccessful = false;
|
||
|
failedChunkSizes += uploadFileSize;
|
||
|
core_1.warning(`Aborting upload for ${parameters.file} due to failure`);
|
||
|
}
|
||
|
return {
|
||
|
isSuccess: isUploadSuccessful,
|
||
|
successfullUploadSize: uploadFileSize - failedChunkSizes,
|
||
|
totalSize: totalFileSize
|
||
|
};
|
||
|
}
|
||
|
else {
|
||
|
// the file that is being uploaded is greater than 64k in size, a temprorary file gets created on disk using the
|
||
|
// npm tmp-promise package and this file gets used during compression for the GZip file that gets created
|
||
|
return tmp
|
||
|
.file()
|
||
|
.then((tmpFile) => __awaiter(this, void 0, void 0, function* () {
|
||
|
// create a GZip file of the original file being uploaded, the original file should not be modified in any way
|
||
|
uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tmpFile.path);
|
||
|
let uploadFilePath = tmpFile.path;
|
||
|
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file
|
||
|
if (totalFileSize < uploadFileSize) {
|
||
|
uploadFileSize = totalFileSize;
|
||
|
uploadFilePath = parameters.file;
|
||
|
isGzip = false;
|
||
|
tmpFile.cleanup();
|
||
|
}
|
||
|
let abortFileUpload = false;
|
||
|
// upload only a single chunk at a time
|
||
|
while (offset < uploadFileSize) {
|
||
|
const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize);
|
||
|
if (abortFileUpload) {
|
||
|
// if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
|
||
|
failedChunkSizes += chunkSize;
|
||
|
continue;
|
||
|
}
|
||
|
// if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status
|
||
|
if (uploadFileSize > 104857600) {
|
||
|
this.statusReporter.updateLargeFileStatus(parameters.file, offset, uploadFileSize);
|
||
|
}
|
||
|
const start = offset;
|
||
|
const end = offset + chunkSize - 1;
|
||
|
offset += parameters.maxChunkSize;
|
||
|
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, fs.createReadStream(uploadFilePath, {
|
||
|
start,
|
||
|
end,
|
||
|
autoClose: false
|
||
|
}), start, end, uploadFileSize, isGzip, totalFileSize);
|
||
|
if (!result) {
|
||
|
// Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
|
||
|
// successfully uploaded so the server may report a different size for what was uploaded
|
||
|
isUploadSuccessful = false;
|
||
|
failedChunkSizes += chunkSize;
|
||
|
core_1.warning(`Aborting upload for ${parameters.file} due to failure`);
|
||
|
abortFileUpload = true;
|
||
|
}
|
||
|
}
|
||
|
}))
|
||
|
.then(() => __awaiter(this, void 0, void 0, function* () {
|
||
|
// only after the file upload is complete and the temporary file is deleted, return the UploadResult
|
||
|
return new Promise(resolve => {
|
||
|
resolve({
|
||
|
isSuccess: isUploadSuccessful,
|
||
|
successfullUploadSize: uploadFileSize - failedChunkSizes,
|
||
|
totalSize: totalFileSize
|
||
|
});
|
||
|
});
|
||
|
}));
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code
|
||
|
* indicates a retryable status, we try to upload the chunk as well
|
||
|
* @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls
|
||
|
* @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
|
||
|
* @param {NodeJS.ReadableStream} data Stream of the file that will be uploaded
|
||
|
* @param {number} start Starting byte index of file that the chunk belongs to
|
||
|
* @param {number} end Ending byte index of file that the chunk belongs to
|
||
|
* @param {number} uploadFileSize Total size of the file in bytes that is being uploaded
|
||
|
* @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream
|
||
|
* @param {number} totalFileSize Original total size of the file that is being uploaded
|
||
|
* @returns if the chunk was successfully uploaded
|
||
|
*/
|
||
|
uploadChunk(httpClientIndex, resourceUrl, data, start, end, uploadFileSize, isGzip, totalFileSize) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
// prepare all the necessary headers before making any http call
|
||
|
const requestOptions = utils_1.getRequestOptions('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize));
|
||
|
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
|
||
|
const client = this.uploadHttpManager.getClient(httpClientIndex);
|
||
|
return yield client.sendStream('PUT', resourceUrl, data, requestOptions);
|
||
|
});
|
||
|
let retryCount = 0;
|
||
|
const retryLimit = config_variables_1.getUploadRetryCount();
|
||
|
// allow for failed chunks to be retried multiple times
|
||
|
while (retryCount <= retryLimit) {
|
||
|
try {
|
||
|
const response = yield uploadChunkRequest();
|
||
|
// Always read the body of the response. There is potential for a resource leak if the body is not read which will
|
||
|
// result in the connection remaining open along with unintended consequences when trying to dispose of the client
|
||
|
yield response.readBody();
|
||
|
if (utils_1.isSuccessStatusCode(response.message.statusCode)) {
|
||
|
return true;
|
||
|
}
|
||
|
else if (utils_1.isRetryableStatusCode(response.message.statusCode)) {
|
||
|
retryCount++;
|
||
|
if (retryCount > retryLimit) {
|
||
|
core_1.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`);
|
||
|
return false;
|
||
|
}
|
||
|
else {
|
||
|
core_1.info(`HTTP ${response.message.statusCode} during chunk upload, will retry at offset ${start} after ${config_variables_1.getRetryWaitTimeInMilliseconds} milliseconds. Retry count #${retryCount}. URL ${resourceUrl}`);
|
||
|
this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex);
|
||
|
yield new Promise(resolve => setTimeout(resolve, config_variables_1.getRetryWaitTimeInMilliseconds()));
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
core_1.info(`#ERROR# Unable to upload chunk to ${resourceUrl}`);
|
||
|
// eslint-disable-next-line no-console
|
||
|
console.log(response);
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
catch (error) {
|
||
|
// eslint-disable-next-line no-console
|
||
|
console.log(error);
|
||
|
retryCount++;
|
||
|
if (retryCount > retryLimit) {
|
||
|
core_1.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`);
|
||
|
return false;
|
||
|
}
|
||
|
else {
|
||
|
core_1.info(`Retrying chunk upload after encountering an error`);
|
||
|
this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex);
|
||
|
yield new Promise(resolve => setTimeout(resolve, config_variables_1.getRetryWaitTimeInMilliseconds()));
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return false;
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact.
|
||
|
* Updating the size indicates that we are done uploading all the contents of the artifact
|
||
|
*/
|
||
|
patchArtifactSize(size, artifactName) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const requestOptions = utils_1.getRequestOptions('application/json', false, false);
|
||
|
const resourceUrl = new url_1.URL(utils_1.getArtifactUrl());
|
||
|
resourceUrl.searchParams.append('artifactName', artifactName);
|
||
|
const parameters = { Size: size };
|
||
|
const data = JSON.stringify(parameters, null, 2);
|
||
|
core_1.debug(`URL is ${resourceUrl.toString()}`);
|
||
|
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediatly
|
||
|
const client = this.uploadHttpManager.getClient(0);
|
||
|
const rawResponse = yield client.patch(resourceUrl.toString(), data, requestOptions);
|
||
|
const body = yield rawResponse.readBody();
|
||
|
if (utils_1.isSuccessStatusCode(rawResponse.message.statusCode)) {
|
||
|
core_1.debug(`Artifact ${artifactName} has been successfully uploaded, total size ${size}`);
|
||
|
}
|
||
|
else if (rawResponse.message.statusCode === 404) {
|
||
|
throw new Error(`An Artifact with the name ${artifactName} was not found`);
|
||
|
}
|
||
|
else {
|
||
|
// eslint-disable-next-line no-console
|
||
|
console.log(body);
|
||
|
throw new Error(`Unable to finish uploading artifact ${artifactName}`);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
exports.UploadHttpClient = UploadHttpClient;
|
||
|
//# sourceMappingURL=upload-http-client.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 614:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("events");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 621:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
module.exports = balanced;
|
||
|
function balanced(a, b, str) {
|
||
|
if (a instanceof RegExp) a = maybeMatch(a, str);
|
||
|
if (b instanceof RegExp) b = maybeMatch(b, str);
|
||
|
|
||
|
var r = range(a, b, str);
|
||
|
|
||
|
return r && {
|
||
|
start: r[0],
|
||
|
end: r[1],
|
||
|
pre: str.slice(0, r[0]),
|
||
|
body: str.slice(r[0] + a.length, r[1]),
|
||
|
post: str.slice(r[1] + b.length)
|
||
|
};
|
||
|
}
|
||
|
|
||
|
function maybeMatch(reg, str) {
|
||
|
var m = str.match(reg);
|
||
|
return m ? m[0] : null;
|
||
|
}
|
||
|
|
||
|
balanced.range = range;
|
||
|
function range(a, b, str) {
|
||
|
var begs, beg, left, right, result;
|
||
|
var ai = str.indexOf(a);
|
||
|
var bi = str.indexOf(b, ai + 1);
|
||
|
var i = ai;
|
||
|
|
||
|
if (ai >= 0 && bi > 0) {
|
||
|
begs = [];
|
||
|
left = str.length;
|
||
|
|
||
|
while (i >= 0 && !result) {
|
||
|
if (i == ai) {
|
||
|
begs.push(i);
|
||
|
ai = str.indexOf(a, i + 1);
|
||
|
} else if (begs.length == 1) {
|
||
|
result = [ begs.pop(), bi ];
|
||
|
} else {
|
||
|
beg = begs.pop();
|
||
|
if (beg < left) {
|
||
|
left = beg;
|
||
|
right = bi;
|
||
|
}
|
||
|
|
||
|
bi = str.indexOf(b, i + 1);
|
||
|
}
|
||
|
|
||
|
i = ai < bi && ai >= 0 ? ai : bi;
|
||
|
}
|
||
|
|
||
|
if (begs.length) {
|
||
|
result = [ left, right ];
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return result;
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 622:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("path");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 630:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("perf_hooks");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 631:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("net");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 647:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||
|
var m = o[Symbol.asyncIterator], i;
|
||
|
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||
|
};
|
||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
result["default"] = mod;
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const fs = __importStar(__webpack_require__(747));
|
||
|
const zlib = __importStar(__webpack_require__(761));
|
||
|
const util_1 = __webpack_require__(669);
|
||
|
const stat = util_1.promisify(fs.stat);
|
||
|
/**
|
||
|
* Creates a Gzip compressed file of an original file at the provided temporary filepath location
|
||
|
* @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified
|
||
|
* @param {string} tempFilePath the location of where the Gzip file will be created
|
||
|
* @returns the size of gzip file that gets created
|
||
|
*/
|
||
|
function createGZipFileOnDisk(originalFilePath, tempFilePath) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return new Promise((resolve, reject) => {
|
||
|
const inputStream = fs.createReadStream(originalFilePath);
|
||
|
const gzip = zlib.createGzip();
|
||
|
const outputStream = fs.createWriteStream(tempFilePath);
|
||
|
inputStream.pipe(gzip).pipe(outputStream);
|
||
|
outputStream.on('finish', () => __awaiter(this, void 0, void 0, function* () {
|
||
|
// wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload
|
||
|
const size = (yield stat(tempFilePath)).size;
|
||
|
resolve(size);
|
||
|
}));
|
||
|
outputStream.on('error', error => {
|
||
|
// eslint-disable-next-line no-console
|
||
|
console.log(error);
|
||
|
reject;
|
||
|
});
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
exports.createGZipFileOnDisk = createGZipFileOnDisk;
|
||
|
/**
|
||
|
* Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O
|
||
|
* @param originalFilePath the path to the original file that is being GZipped
|
||
|
* @returns a buffer with the GZip file
|
||
|
*/
|
||
|
function createGZipFileInBuffer(originalFilePath) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||
|
var e_1, _a;
|
||
|
const inputStream = fs.createReadStream(originalFilePath);
|
||
|
const gzip = zlib.createGzip();
|
||
|
inputStream.pipe(gzip);
|
||
|
// read stream into buffer, using experimental async itterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043
|
||
|
const chunks = [];
|
||
|
try {
|
||
|
for (var gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), !gzip_1_1.done;) {
|
||
|
const chunk = gzip_1_1.value;
|
||
|
chunks.push(chunk);
|
||
|
}
|
||
|
}
|
||
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||
|
finally {
|
||
|
try {
|
||
|
if (gzip_1_1 && !gzip_1_1.done && (_a = gzip_1.return)) yield _a.call(gzip_1);
|
||
|
}
|
||
|
finally { if (e_1) throw e_1.error; }
|
||
|
}
|
||
|
resolve(Buffer.concat(chunks));
|
||
|
}));
|
||
|
});
|
||
|
}
|
||
|
exports.createGZipFileInBuffer = createGZipFileInBuffer;
|
||
|
//# sourceMappingURL=upload-gzip.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 669:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("util");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 674:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
var wrappy = __webpack_require__(11)
|
||
|
var reqs = Object.create(null)
|
||
|
var once = __webpack_require__(49)
|
||
|
|
||
|
module.exports = wrappy(inflight)
|
||
|
|
||
|
function inflight (key, cb) {
|
||
|
if (reqs[key]) {
|
||
|
reqs[key].push(cb)
|
||
|
return null
|
||
|
} else {
|
||
|
reqs[key] = [cb]
|
||
|
return makeres(key)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function makeres (key) {
|
||
|
return once(function RES () {
|
||
|
var cbs = reqs[key]
|
||
|
var len = cbs.length
|
||
|
var args = slice(arguments)
|
||
|
|
||
|
// XXX It's somewhat ambiguous whether a new callback added in this
|
||
|
// pass should be queued for later execution if something in the
|
||
|
// list of callbacks throws, or if it should just be discarded.
|
||
|
// However, it's such an edge case that it hardly matters, and either
|
||
|
// choice is likely as surprising as the other.
|
||
|
// As it happens, we do go ahead and schedule it for later execution.
|
||
|
try {
|
||
|
for (var i = 0; i < len; i++) {
|
||
|
cbs[i].apply(null, args)
|
||
|
}
|
||
|
} finally {
|
||
|
if (cbs.length > len) {
|
||
|
// added more in the interim.
|
||
|
// de-zalgo, just in case, but don't call again.
|
||
|
cbs.splice(0, len)
|
||
|
process.nextTick(function () {
|
||
|
RES.apply(null, args)
|
||
|
})
|
||
|
} else {
|
||
|
delete reqs[key]
|
||
|
}
|
||
|
}
|
||
|
})
|
||
|
}
|
||
|
|
||
|
function slice (args) {
|
||
|
var length = args.length
|
||
|
var array = []
|
||
|
|
||
|
for (var i = 0; i < length; i++) array[i] = args[i]
|
||
|
return array
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 681:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
|
||
|
function posix(path) {
|
||
|
return path.charAt(0) === '/';
|
||
|
}
|
||
|
|
||
|
function win32(path) {
|
||
|
// https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56
|
||
|
var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/;
|
||
|
var result = splitDeviceRe.exec(path);
|
||
|
var device = result[1] || '';
|
||
|
var isUnc = Boolean(device && device.charAt(1) !== ':');
|
||
|
|
||
|
// UNC paths are always absolute
|
||
|
return Boolean(result[2] || isUnc);
|
||
|
}
|
||
|
|
||
|
module.exports = process.platform === 'win32' ? win32 : posix;
|
||
|
module.exports.posix = posix;
|
||
|
module.exports.win32 = win32;
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 689:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
try {
|
||
|
var util = __webpack_require__(669);
|
||
|
/* istanbul ignore next */
|
||
|
if (typeof util.inherits !== 'function') throw '';
|
||
|
module.exports = util.inherits;
|
||
|
} catch (e) {
|
||
|
/* istanbul ignore next */
|
||
|
module.exports = __webpack_require__(315);
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 694:
|
||
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
var Inputs;
|
||
|
(function (Inputs) {
|
||
|
Inputs["Name"] = "name";
|
||
|
Inputs["Path"] = "path";
|
||
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||
|
function getDefaultArtifactName() {
|
||
|
return 'artifact';
|
||
|
}
|
||
|
exports.getDefaultArtifactName = getDefaultArtifactName;
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 728:
|
||
|
/***/ (function(__unusedmodule, exports) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
class SearchState {
|
||
|
constructor(path, level) {
|
||
|
this.path = path;
|
||
|
this.level = level;
|
||
|
}
|
||
|
}
|
||
|
exports.SearchState = SearchState;
|
||
|
//# sourceMappingURL=internal-search-state.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 747:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("fs");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 761:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("zlib");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 794:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("stream");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 835:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = require("url");
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 855:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||
|
result["default"] = mod;
|
||
|
return result;
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const fs = __importStar(__webpack_require__(747));
|
||
|
const zlib = __importStar(__webpack_require__(761));
|
||
|
const utils_1 = __webpack_require__(870);
|
||
|
const url_1 = __webpack_require__(835);
|
||
|
const http_manager_1 = __webpack_require__(452);
|
||
|
const config_variables_1 = __webpack_require__(401);
|
||
|
const core_1 = __webpack_require__(470);
|
||
|
class DownloadHttpClient {
|
||
|
constructor() {
|
||
|
this.downloadHttpManager = new http_manager_1.HttpManager(config_variables_1.getDownloadFileConcurrency());
|
||
|
}
|
||
|
/**
|
||
|
* Gets a list of all artifacts that are in a specific container
|
||
|
*/
|
||
|
listArtifacts() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const artifactUrl = utils_1.getArtifactUrl();
|
||
|
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediatly
|
||
|
const client = this.downloadHttpManager.getClient(0);
|
||
|
const requestOptions = utils_1.getRequestOptions('application/json');
|
||
|
const rawResponse = yield client.get(artifactUrl, requestOptions);
|
||
|
const body = yield rawResponse.readBody();
|
||
|
if (utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||
|
return JSON.parse(body);
|
||
|
}
|
||
|
// eslint-disable-next-line no-console
|
||
|
console.log(rawResponse);
|
||
|
throw new Error(`Unable to list artifacts for the run`);
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Fetches a set of container items that describe the contents of an artifact
|
||
|
* @param artifactName the name of the artifact
|
||
|
* @param containerUrl the artifact container URL for the run
|
||
|
*/
|
||
|
getContainerItems(artifactName, containerUrl) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
// the itemPath search parameter controls which containers will be returned
|
||
|
const resourceUrl = new url_1.URL(containerUrl);
|
||
|
resourceUrl.searchParams.append('itemPath', artifactName);
|
||
|
// no concurrent calls so a single httpClient without the http-manager is sufficient
|
||
|
const client = utils_1.createHttpClient();
|
||
|
// no keep-alive header, client disposal is not necessary
|
||
|
const requestOptions = utils_1.getRequestOptions('application/json');
|
||
|
const rawResponse = yield client.get(resourceUrl.toString(), requestOptions);
|
||
|
const body = yield rawResponse.readBody();
|
||
|
if (utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||
|
return JSON.parse(body);
|
||
|
}
|
||
|
// eslint-disable-next-line no-console
|
||
|
console.log(rawResponse);
|
||
|
throw new Error(`Unable to get ContainersItems from ${resourceUrl}`);
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Concurrently downloads all the files that are part of an artifact
|
||
|
* @param downloadItems information about what items to download and where to save them
|
||
|
*/
|
||
|
downloadSingleArtifact(downloadItems) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const DOWNLOAD_CONCURRENCY = config_variables_1.getDownloadFileConcurrency();
|
||
|
// limit the number of files downloaded at a single time
|
||
|
const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()];
|
||
|
let downloadedFiles = 0;
|
||
|
yield Promise.all(parallelDownloads.map((index) => __awaiter(this, void 0, void 0, function* () {
|
||
|
while (downloadedFiles < downloadItems.length) {
|
||
|
const currentFileToDownload = downloadItems[downloadedFiles];
|
||
|
downloadedFiles += 1;
|
||
|
yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath);
|
||
|
}
|
||
|
})));
|
||
|
// done downloading, safety dispose all connections
|
||
|
this.downloadHttpManager.disposeAndReplaceAllClients();
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Downloads an individual file
|
||
|
* @param httpClientIndex the index of the http client that is used to make all of the calls
|
||
|
* @param artifactLocation origin location where a file will be downloaded from
|
||
|
* @param downloadPath destination location for the file being downloaded
|
||
|
*/
|
||
|
downloadIndividualFile(httpClientIndex, artifactLocation, downloadPath) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const stream = fs.createWriteStream(downloadPath);
|
||
|
const client = this.downloadHttpManager.getClient(httpClientIndex);
|
||
|
const requestOptions = utils_1.getRequestOptions('application/octet-stream', true);
|
||
|
const response = yield client.get(artifactLocation, requestOptions);
|
||
|
// check the response headers to determine if the file was compressed using gzip
|
||
|
const isGzip = (headers) => {
|
||
|
return ('content-encoding' in headers && headers['content-encoding'] === 'gzip');
|
||
|
};
|
||
|
if (utils_1.isSuccessStatusCode(response.message.statusCode)) {
|
||
|
yield this.pipeResponseToStream(response, stream, isGzip(response.message.headers));
|
||
|
}
|
||
|
else if (utils_1.isRetryableStatusCode(response.message.statusCode)) {
|
||
|
core_1.warning(`Received http ${response.message.statusCode} during file download, will retry ${artifactLocation} after 10 seconds`);
|
||
|
// if an error is encountered, dispose of the http connection, and create a new one
|
||
|
this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex);
|
||
|
yield new Promise(resolve => setTimeout(resolve, config_variables_1.getRetryWaitTimeInMilliseconds()));
|
||
|
const retryResponse = yield client.get(artifactLocation);
|
||
|
if (utils_1.isSuccessStatusCode(retryResponse.message.statusCode)) {
|
||
|
yield this.pipeResponseToStream(response, stream, isGzip(response.message.headers));
|
||
|
}
|
||
|
else {
|
||
|
// eslint-disable-next-line no-console
|
||
|
console.log(retryResponse);
|
||
|
throw new Error(`Unable to download ${artifactLocation}`);
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
// eslint-disable-next-line no-console
|
||
|
console.log(response);
|
||
|
throw new Error(`Unable to download ${artifactLocation}`);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Pipes the response from downloading an individual file to the appropriate stream
|
||
|
* @param response the http response recieved when downloading a file
|
||
|
* @param stream the stream where the file should be written to
|
||
|
* @param isGzip does the response need to be be uncompressed
|
||
|
*/
|
||
|
pipeResponseToStream(response, stream, isGzip) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return new Promise(resolve => {
|
||
|
if (isGzip) {
|
||
|
// pipe the response into gunzip to decompress
|
||
|
const gunzip = zlib.createGunzip();
|
||
|
response.message
|
||
|
.pipe(gunzip)
|
||
|
.pipe(stream)
|
||
|
.on('close', () => {
|
||
|
resolve();
|
||
|
});
|
||
|
}
|
||
|
else {
|
||
|
response.message.pipe(stream).on('close', () => {
|
||
|
resolve();
|
||
|
});
|
||
|
}
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
exports.DownloadHttpClient = DownloadHttpClient;
|
||
|
//# sourceMappingURL=download-http-client.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 856:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
exports.alphasort = alphasort
|
||
|
exports.alphasorti = alphasorti
|
||
|
exports.setopts = setopts
|
||
|
exports.ownProp = ownProp
|
||
|
exports.makeAbs = makeAbs
|
||
|
exports.finish = finish
|
||
|
exports.mark = mark
|
||
|
exports.isIgnored = isIgnored
|
||
|
exports.childrenIgnored = childrenIgnored
|
||
|
|
||
|
function ownProp (obj, field) {
|
||
|
return Object.prototype.hasOwnProperty.call(obj, field)
|
||
|
}
|
||
|
|
||
|
var path = __webpack_require__(622)
|
||
|
var minimatch = __webpack_require__(93)
|
||
|
var isAbsolute = __webpack_require__(681)
|
||
|
var Minimatch = minimatch.Minimatch
|
||
|
|
||
|
function alphasorti (a, b) {
|
||
|
return a.toLowerCase().localeCompare(b.toLowerCase())
|
||
|
}
|
||
|
|
||
|
function alphasort (a, b) {
|
||
|
return a.localeCompare(b)
|
||
|
}
|
||
|
|
||
|
function setupIgnores (self, options) {
|
||
|
self.ignore = options.ignore || []
|
||
|
|
||
|
if (!Array.isArray(self.ignore))
|
||
|
self.ignore = [self.ignore]
|
||
|
|
||
|
if (self.ignore.length) {
|
||
|
self.ignore = self.ignore.map(ignoreMap)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// ignore patterns are always in dot:true mode.
|
||
|
function ignoreMap (pattern) {
|
||
|
var gmatcher = null
|
||
|
if (pattern.slice(-3) === '/**') {
|
||
|
var gpattern = pattern.replace(/(\/\*\*)+$/, '')
|
||
|
gmatcher = new Minimatch(gpattern, { dot: true })
|
||
|
}
|
||
|
|
||
|
return {
|
||
|
matcher: new Minimatch(pattern, { dot: true }),
|
||
|
gmatcher: gmatcher
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function setopts (self, pattern, options) {
|
||
|
if (!options)
|
||
|
options = {}
|
||
|
|
||
|
// base-matching: just use globstar for that.
|
||
|
if (options.matchBase && -1 === pattern.indexOf("/")) {
|
||
|
if (options.noglobstar) {
|
||
|
throw new Error("base matching requires globstar")
|
||
|
}
|
||
|
pattern = "**/" + pattern
|
||
|
}
|
||
|
|
||
|
self.silent = !!options.silent
|
||
|
self.pattern = pattern
|
||
|
self.strict = options.strict !== false
|
||
|
self.realpath = !!options.realpath
|
||
|
self.realpathCache = options.realpathCache || Object.create(null)
|
||
|
self.follow = !!options.follow
|
||
|
self.dot = !!options.dot
|
||
|
self.mark = !!options.mark
|
||
|
self.nodir = !!options.nodir
|
||
|
if (self.nodir)
|
||
|
self.mark = true
|
||
|
self.sync = !!options.sync
|
||
|
self.nounique = !!options.nounique
|
||
|
self.nonull = !!options.nonull
|
||
|
self.nosort = !!options.nosort
|
||
|
self.nocase = !!options.nocase
|
||
|
self.stat = !!options.stat
|
||
|
self.noprocess = !!options.noprocess
|
||
|
self.absolute = !!options.absolute
|
||
|
|
||
|
self.maxLength = options.maxLength || Infinity
|
||
|
self.cache = options.cache || Object.create(null)
|
||
|
self.statCache = options.statCache || Object.create(null)
|
||
|
self.symlinks = options.symlinks || Object.create(null)
|
||
|
|
||
|
setupIgnores(self, options)
|
||
|
|
||
|
self.changedCwd = false
|
||
|
var cwd = process.cwd()
|
||
|
if (!ownProp(options, "cwd"))
|
||
|
self.cwd = cwd
|
||
|
else {
|
||
|
self.cwd = path.resolve(options.cwd)
|
||
|
self.changedCwd = self.cwd !== cwd
|
||
|
}
|
||
|
|
||
|
self.root = options.root || path.resolve(self.cwd, "/")
|
||
|
self.root = path.resolve(self.root)
|
||
|
if (process.platform === "win32")
|
||
|
self.root = self.root.replace(/\\/g, "/")
|
||
|
|
||
|
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
|
||
|
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
|
||
|
self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
|
||
|
if (process.platform === "win32")
|
||
|
self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
|
||
|
self.nomount = !!options.nomount
|
||
|
|
||
|
// disable comments and negation in Minimatch.
|
||
|
// Note that they are not supported in Glob itself anyway.
|
||
|
options.nonegate = true
|
||
|
options.nocomment = true
|
||
|
|
||
|
self.minimatch = new Minimatch(pattern, options)
|
||
|
self.options = self.minimatch.options
|
||
|
}
|
||
|
|
||
|
function finish (self) {
|
||
|
var nou = self.nounique
|
||
|
var all = nou ? [] : Object.create(null)
|
||
|
|
||
|
for (var i = 0, l = self.matches.length; i < l; i ++) {
|
||
|
var matches = self.matches[i]
|
||
|
if (!matches || Object.keys(matches).length === 0) {
|
||
|
if (self.nonull) {
|
||
|
// do like the shell, and spit out the literal glob
|
||
|
var literal = self.minimatch.globSet[i]
|
||
|
if (nou)
|
||
|
all.push(literal)
|
||
|
else
|
||
|
all[literal] = true
|
||
|
}
|
||
|
} else {
|
||
|
// had matches
|
||
|
var m = Object.keys(matches)
|
||
|
if (nou)
|
||
|
all.push.apply(all, m)
|
||
|
else
|
||
|
m.forEach(function (m) {
|
||
|
all[m] = true
|
||
|
})
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (!nou)
|
||
|
all = Object.keys(all)
|
||
|
|
||
|
if (!self.nosort)
|
||
|
all = all.sort(self.nocase ? alphasorti : alphasort)
|
||
|
|
||
|
// at *some* point we statted all of these
|
||
|
if (self.mark) {
|
||
|
for (var i = 0; i < all.length; i++) {
|
||
|
all[i] = self._mark(all[i])
|
||
|
}
|
||
|
if (self.nodir) {
|
||
|
all = all.filter(function (e) {
|
||
|
var notDir = !(/\/$/.test(e))
|
||
|
var c = self.cache[e] || self.cache[makeAbs(self, e)]
|
||
|
if (notDir && c)
|
||
|
notDir = c !== 'DIR' && !Array.isArray(c)
|
||
|
return notDir
|
||
|
})
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (self.ignore.length)
|
||
|
all = all.filter(function(m) {
|
||
|
return !isIgnored(self, m)
|
||
|
})
|
||
|
|
||
|
self.found = all
|
||
|
}
|
||
|
|
||
|
function mark (self, p) {
|
||
|
var abs = makeAbs(self, p)
|
||
|
var c = self.cache[abs]
|
||
|
var m = p
|
||
|
if (c) {
|
||
|
var isDir = c === 'DIR' || Array.isArray(c)
|
||
|
var slash = p.slice(-1) === '/'
|
||
|
|
||
|
if (isDir && !slash)
|
||
|
m += '/'
|
||
|
else if (!isDir && slash)
|
||
|
m = m.slice(0, -1)
|
||
|
|
||
|
if (m !== p) {
|
||
|
var mabs = makeAbs(self, m)
|
||
|
self.statCache[mabs] = self.statCache[abs]
|
||
|
self.cache[mabs] = self.cache[abs]
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return m
|
||
|
}
|
||
|
|
||
|
// lotta situps...
|
||
|
function makeAbs (self, f) {
|
||
|
var abs = f
|
||
|
if (f.charAt(0) === '/') {
|
||
|
abs = path.join(self.root, f)
|
||
|
} else if (isAbsolute(f) || f === '') {
|
||
|
abs = f
|
||
|
} else if (self.changedCwd) {
|
||
|
abs = path.resolve(self.cwd, f)
|
||
|
} else {
|
||
|
abs = path.resolve(f)
|
||
|
}
|
||
|
|
||
|
if (process.platform === 'win32')
|
||
|
abs = abs.replace(/\\/g, '/')
|
||
|
|
||
|
return abs
|
||
|
}
|
||
|
|
||
|
|
||
|
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
|
||
|
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
|
||
|
function isIgnored (self, path) {
|
||
|
if (!self.ignore.length)
|
||
|
return false
|
||
|
|
||
|
return self.ignore.some(function(item) {
|
||
|
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
|
||
|
})
|
||
|
}
|
||
|
|
||
|
function childrenIgnored (self, path) {
|
||
|
if (!self.ignore.length)
|
||
|
return false
|
||
|
|
||
|
return self.ignore.some(function(item) {
|
||
|
return !!(item.gmatcher && item.gmatcher.match(path))
|
||
|
})
|
||
|
}
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 870:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const core_1 = __webpack_require__(470);
|
||
|
const fs_1 = __webpack_require__(747);
|
||
|
const http_client_1 = __webpack_require__(539);
|
||
|
const auth_1 = __webpack_require__(226);
|
||
|
const config_variables_1 = __webpack_require__(401);
|
||
|
/**
|
||
|
* Parses a env variable that is a number
|
||
|
*/
|
||
|
function parseEnvNumber(key) {
|
||
|
const value = Number(process.env[key]);
|
||
|
if (Number.isNaN(value) || value < 0) {
|
||
|
return undefined;
|
||
|
}
|
||
|
return value;
|
||
|
}
|
||
|
exports.parseEnvNumber = parseEnvNumber;
|
||
|
/**
|
||
|
* Various utility functions to help with the necessary API calls
|
||
|
*/
|
||
|
function getApiVersion() {
|
||
|
return '6.0-preview';
|
||
|
}
|
||
|
exports.getApiVersion = getApiVersion;
|
||
|
function isSuccessStatusCode(statusCode) {
|
||
|
if (!statusCode) {
|
||
|
return false;
|
||
|
}
|
||
|
return statusCode >= 200 && statusCode < 300;
|
||
|
}
|
||
|
exports.isSuccessStatusCode = isSuccessStatusCode;
|
||
|
function isRetryableStatusCode(statusCode) {
|
||
|
if (!statusCode) {
|
||
|
return false;
|
||
|
}
|
||
|
const retryableStatusCodes = [
|
||
|
http_client_1.HttpCodes.BadGateway,
|
||
|
http_client_1.HttpCodes.ServiceUnavailable,
|
||
|
http_client_1.HttpCodes.GatewayTimeout
|
||
|
];
|
||
|
return retryableStatusCodes.includes(statusCode);
|
||
|
}
|
||
|
exports.isRetryableStatusCode = isRetryableStatusCode;
|
||
|
function getContentRange(start, end, total) {
|
||
|
// Format: `bytes start-end/fileSize
|
||
|
// start and end are inclusive
|
||
|
// For a 200 byte chunk starting at byte 0:
|
||
|
// Content-Range: bytes 0-199/200
|
||
|
return `bytes ${start}-${end}/${total}`;
|
||
|
}
|
||
|
exports.getContentRange = getContentRange;
|
||
|
/**
|
||
|
* Sets all the necessary headers when making HTTP calls
|
||
|
* @param {string} contentType the type of content being uploaded
|
||
|
* @param {boolean} isKeepAlive is the same connection being used to make multiple calls
|
||
|
* @param {boolean} isGzip is the connection being used to upload GZip compressed content
|
||
|
* @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed
|
||
|
* @param {number} contentLength the length of the content that is being uploaded
|
||
|
* @param {string} contentRange the range of the content that is being uploaded
|
||
|
* @returns appropriate request options to make a specific http call
|
||
|
*/
|
||
|
function getRequestOptions(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange) {
|
||
|
const requestOptions = {
|
||
|
// same Accept type for each http call that gets made
|
||
|
Accept: `application/json;api-version=${getApiVersion()}`
|
||
|
};
|
||
|
if (contentType) {
|
||
|
requestOptions['Content-Type'] = contentType;
|
||
|
}
|
||
|
if (isKeepAlive) {
|
||
|
requestOptions['Connection'] = 'Keep-Alive';
|
||
|
// keep alive for at least 10 seconds before closing the connection
|
||
|
requestOptions['Keep-Alive'] = '10';
|
||
|
}
|
||
|
if (isGzip) {
|
||
|
requestOptions['Content-Encoding'] = 'gzip';
|
||
|
requestOptions['x-tfs-filelength'] = uncompressedLength;
|
||
|
}
|
||
|
if (contentLength) {
|
||
|
requestOptions['Content-Length'] = contentLength;
|
||
|
}
|
||
|
if (contentRange) {
|
||
|
requestOptions['Content-Range'] = contentRange;
|
||
|
}
|
||
|
return requestOptions;
|
||
|
}
|
||
|
exports.getRequestOptions = getRequestOptions;
|
||
|
function createHttpClient() {
|
||
|
return new http_client_1.HttpClient('action/artifact', [
|
||
|
new auth_1.BearerCredentialHandler(config_variables_1.getRuntimeToken())
|
||
|
]);
|
||
|
}
|
||
|
exports.createHttpClient = createHttpClient;
|
||
|
function getArtifactUrl() {
|
||
|
const artifactUrl = `${config_variables_1.getRuntimeUrl()}_apis/pipelines/workflows/${config_variables_1.getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}`;
|
||
|
core_1.debug(`Artifact Url: ${artifactUrl}`);
|
||
|
return artifactUrl;
|
||
|
}
|
||
|
exports.getArtifactUrl = getArtifactUrl;
|
||
|
/**
|
||
|
* Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected
|
||
|
* from the server if attempted to be sent over. These characters are not allowed due to limitations with certain
|
||
|
* file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an
|
||
|
* individual filesystem/platform will not be supported on all fileSystems/platforms
|
||
|
*
|
||
|
* FilePaths can include characters such as \ and / which are not permitted in the artifact name alone
|
||
|
*/
|
||
|
const invalidArtifactFilePathCharacters = [
|
||
|
'"',
|
||
|
':',
|
||
|
'<',
|
||
|
'>',
|
||
|
'|',
|
||
|
'*',
|
||
|
'?',
|
||
|
' '
|
||
|
];
|
||
|
const invalidArtifactNameCharacters = [
|
||
|
...invalidArtifactFilePathCharacters,
|
||
|
'\\',
|
||
|
'/'
|
||
|
];
|
||
|
/**
|
||
|
* Scans the name of the artifact to make sure there are no illegal characters
|
||
|
*/
|
||
|
function checkArtifactName(name) {
|
||
|
if (!name) {
|
||
|
throw new Error(`Artifact name: ${name}, is incorrectly provided`);
|
||
|
}
|
||
|
for (const invalidChar of invalidArtifactNameCharacters) {
|
||
|
if (name.includes(invalidChar)) {
|
||
|
throw new Error(`Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid artifact name characters include: ${invalidArtifactNameCharacters.toString()}.`);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
exports.checkArtifactName = checkArtifactName;
|
||
|
/**
|
||
|
* Scans the name of the filePath used to make sure there are no illegal characters
|
||
|
*/
|
||
|
function checkArtifactFilePath(path) {
|
||
|
if (!path) {
|
||
|
throw new Error(`Artifact path: ${path}, is incorrectly provided`);
|
||
|
}
|
||
|
for (const invalidChar of invalidArtifactFilePathCharacters) {
|
||
|
if (path.includes(invalidChar)) {
|
||
|
throw new Error(`Artifact path is not valid: ${path}. Contains character: "${invalidChar}". Invalid characters include: ${invalidArtifactFilePathCharacters.toString()}.`);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
exports.checkArtifactFilePath = checkArtifactFilePath;
|
||
|
function createDirectoriesForArtifact(directories) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
for (const directory of directories) {
|
||
|
yield fs_1.promises.mkdir(directory, {
|
||
|
recursive: true
|
||
|
});
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
exports.createDirectoriesForArtifact = createDirectoriesForArtifact;
|
||
|
//# sourceMappingURL=utils.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 875:
|
||
|
/***/ (function(module, __unusedexports, __webpack_require__) {
|
||
|
|
||
|
const {promisify} = __webpack_require__(669);
|
||
|
const tmp = __webpack_require__(402);
|
||
|
|
||
|
// file
|
||
|
module.exports.fileSync = tmp.fileSync;
|
||
|
const fileWithOptions = promisify((options, cb) =>
|
||
|
tmp.file(options, (err, path, fd, cleanup) =>
|
||
|
err ? cb(err) : cb(undefined, { path, fd, cleanup: promisify(cleanup) })
|
||
|
)
|
||
|
);
|
||
|
module.exports.file = async (options) => fileWithOptions(options);
|
||
|
|
||
|
module.exports.withFile = async function withFile(fn, options) {
|
||
|
const { path, fd, cleanup } = await module.exports.file(options);
|
||
|
try {
|
||
|
return await fn({ path, fd });
|
||
|
} finally {
|
||
|
await cleanup();
|
||
|
}
|
||
|
};
|
||
|
|
||
|
|
||
|
// directory
|
||
|
module.exports.dirSync = tmp.dirSync;
|
||
|
const dirWithOptions = promisify((options, cb) =>
|
||
|
tmp.dir(options, (err, path, cleanup) =>
|
||
|
err ? cb(err) : cb(undefined, { path, cleanup: promisify(cleanup) })
|
||
|
)
|
||
|
);
|
||
|
module.exports.dir = async (options) => dirWithOptions(options);
|
||
|
|
||
|
module.exports.withDir = async function withDir(fn, options) {
|
||
|
const { path, cleanup } = await module.exports.dir(options);
|
||
|
try {
|
||
|
return await fn({ path });
|
||
|
} finally {
|
||
|
await cleanup();
|
||
|
}
|
||
|
};
|
||
|
|
||
|
|
||
|
// name generation
|
||
|
module.exports.tmpNameSync = tmp.tmpNameSync;
|
||
|
module.exports.tmpName = promisify(tmp.tmpName);
|
||
|
|
||
|
module.exports.tmpdir = tmp.tmpdir;
|
||
|
|
||
|
module.exports.setGracefulCleanup = tmp.setGracefulCleanup;
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 896:
|
||
|
/***/ (function(module) {
|
||
|
|
||
|
module.exports = function (xs, fn) {
|
||
|
var res = [];
|
||
|
for (var i = 0; i < xs.length; i++) {
|
||
|
var x = fn(xs[i], i);
|
||
|
if (isArray(x)) res.push.apply(res, x);
|
||
|
else res.push(x);
|
||
|
}
|
||
|
return res;
|
||
|
};
|
||
|
|
||
|
var isArray = Array.isArray || function (xs) {
|
||
|
return Object.prototype.toString.call(xs) === '[object Array]';
|
||
|
};
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 923:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const assert = __webpack_require__(357);
|
||
|
const os = __webpack_require__(87);
|
||
|
const path = __webpack_require__(622);
|
||
|
const pathHelper = __webpack_require__(972);
|
||
|
const minimatch_1 = __webpack_require__(93);
|
||
|
const internal_match_kind_1 = __webpack_require__(327);
|
||
|
const internal_path_1 = __webpack_require__(383);
|
||
|
const IS_WINDOWS = process.platform === 'win32';
|
||
|
class Pattern {
|
||
|
constructor(patternOrNegate, segments) {
|
||
|
/**
|
||
|
* Indicates whether matches should be excluded from the result set
|
||
|
*/
|
||
|
this.negate = false;
|
||
|
// Pattern overload
|
||
|
let pattern;
|
||
|
if (typeof patternOrNegate === 'string') {
|
||
|
pattern = patternOrNegate.trim();
|
||
|
}
|
||
|
// Segments overload
|
||
|
else {
|
||
|
// Convert to pattern
|
||
|
segments = segments || [];
|
||
|
assert(segments.length, `Parameter 'segments' must not empty`);
|
||
|
const root = Pattern.getLiteral(segments[0]);
|
||
|
assert(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
||
|
pattern = new internal_path_1.Path(segments).toString().trim();
|
||
|
if (patternOrNegate) {
|
||
|
pattern = `!${pattern}`;
|
||
|
}
|
||
|
}
|
||
|
// Negate
|
||
|
while (pattern.startsWith('!')) {
|
||
|
this.negate = !this.negate;
|
||
|
pattern = pattern.substr(1).trim();
|
||
|
}
|
||
|
// Normalize slashes and ensures absolute root
|
||
|
pattern = Pattern.fixupPattern(pattern);
|
||
|
// Segments
|
||
|
this.segments = new internal_path_1.Path(pattern).segments;
|
||
|
// Trailing slash indicates the pattern should only match directories, not regular files
|
||
|
this.trailingSeparator = pathHelper
|
||
|
.normalizeSeparators(pattern)
|
||
|
.endsWith(path.sep);
|
||
|
pattern = pathHelper.safeTrimTrailingSeparator(pattern);
|
||
|
// Search path (literal path prior to the first glob segment)
|
||
|
let foundGlob = false;
|
||
|
const searchSegments = this.segments
|
||
|
.map(x => Pattern.getLiteral(x))
|
||
|
.filter(x => !foundGlob && !(foundGlob = x === ''));
|
||
|
this.searchPath = new internal_path_1.Path(searchSegments).toString();
|
||
|
// Root RegExp (required when determining partial match)
|
||
|
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
|
||
|
// Create minimatch
|
||
|
const minimatchOptions = {
|
||
|
dot: true,
|
||
|
nobrace: true,
|
||
|
nocase: IS_WINDOWS,
|
||
|
nocomment: true,
|
||
|
noext: true,
|
||
|
nonegate: true
|
||
|
};
|
||
|
pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern;
|
||
|
this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions);
|
||
|
}
|
||
|
/**
|
||
|
* Matches the pattern against the specified path
|
||
|
*/
|
||
|
match(itemPath) {
|
||
|
// Last segment is globstar?
|
||
|
if (this.segments[this.segments.length - 1] === '**') {
|
||
|
// Normalize slashes
|
||
|
itemPath = pathHelper.normalizeSeparators(itemPath);
|
||
|
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
|
||
|
// preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
||
|
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
|
||
|
if (!itemPath.endsWith(path.sep)) {
|
||
|
// Note, this is safe because the constructor ensures the pattern has an absolute root.
|
||
|
// For example, formats like C: and C:foo on Windows are resolved to an aboslute root.
|
||
|
itemPath = `${itemPath}${path.sep}`;
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
// Normalize slashes and trim unnecessary trailing slash
|
||
|
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||
|
}
|
||
|
// Match
|
||
|
if (this.minimatch.match(itemPath)) {
|
||
|
return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All;
|
||
|
}
|
||
|
return internal_match_kind_1.MatchKind.None;
|
||
|
}
|
||
|
/**
|
||
|
* Indicates whether the pattern may match descendants of the specified path
|
||
|
*/
|
||
|
partialMatch(itemPath) {
|
||
|
// Normalize slashes and trim unnecessary trailing slash
|
||
|
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||
|
// matchOne does not handle root path correctly
|
||
|
if (pathHelper.dirname(itemPath) === itemPath) {
|
||
|
return this.rootRegExp.test(itemPath);
|
||
|
}
|
||
|
return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true);
|
||
|
}
|
||
|
/**
|
||
|
* Escapes glob patterns within a path
|
||
|
*/
|
||
|
static globEscape(s) {
|
||
|
return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS
|
||
|
.replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment
|
||
|
.replace(/\?/g, '[?]') // escape '?'
|
||
|
.replace(/\*/g, '[*]'); // escape '*'
|
||
|
}
|
||
|
/**
|
||
|
* Normalizes slashes and ensures absolute root
|
||
|
*/
|
||
|
static fixupPattern(pattern) {
|
||
|
// Empty
|
||
|
assert(pattern, 'pattern cannot be empty');
|
||
|
// Must not contain `.` segment, unless first segment
|
||
|
// Must not contain `..` segment
|
||
|
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
||
|
assert(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
||
|
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
||
|
assert(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
||
|
// Normalize slashes
|
||
|
pattern = pathHelper.normalizeSeparators(pattern);
|
||
|
// Replace leading `.` segment
|
||
|
if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) {
|
||
|
pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1);
|
||
|
}
|
||
|
// Replace leading `~` segment
|
||
|
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
||
|
const homedir = os.homedir();
|
||
|
assert(homedir, 'Unable to determine HOME directory');
|
||
|
assert(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
||
|
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
||
|
}
|
||
|
// Replace relative drive root, e.g. pattern is C: or C:foo
|
||
|
else if (IS_WINDOWS &&
|
||
|
(pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) {
|
||
|
let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2));
|
||
|
if (pattern.length > 2 && !root.endsWith('\\')) {
|
||
|
root += '\\';
|
||
|
}
|
||
|
pattern = Pattern.globEscape(root) + pattern.substr(2);
|
||
|
}
|
||
|
// Replace relative root, e.g. pattern is \ or \foo
|
||
|
else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) {
|
||
|
let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\');
|
||
|
if (!root.endsWith('\\')) {
|
||
|
root += '\\';
|
||
|
}
|
||
|
pattern = Pattern.globEscape(root) + pattern.substr(1);
|
||
|
}
|
||
|
// Otherwise ensure absolute root
|
||
|
else {
|
||
|
pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern);
|
||
|
}
|
||
|
return pathHelper.normalizeSeparators(pattern);
|
||
|
}
|
||
|
/**
|
||
|
* Attempts to unescape a pattern segment to create a literal path segment.
|
||
|
* Otherwise returns empty string.
|
||
|
*/
|
||
|
static getLiteral(segment) {
|
||
|
let literal = '';
|
||
|
for (let i = 0; i < segment.length; i++) {
|
||
|
const c = segment[i];
|
||
|
// Escape
|
||
|
if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) {
|
||
|
literal += segment[++i];
|
||
|
continue;
|
||
|
}
|
||
|
// Wildcard
|
||
|
else if (c === '*' || c === '?') {
|
||
|
return '';
|
||
|
}
|
||
|
// Character set
|
||
|
else if (c === '[' && i + 1 < segment.length) {
|
||
|
let set = '';
|
||
|
let closed = -1;
|
||
|
for (let i2 = i + 1; i2 < segment.length; i2++) {
|
||
|
const c2 = segment[i2];
|
||
|
// Escape
|
||
|
if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) {
|
||
|
set += segment[++i2];
|
||
|
continue;
|
||
|
}
|
||
|
// Closed
|
||
|
else if (c2 === ']') {
|
||
|
closed = i2;
|
||
|
break;
|
||
|
}
|
||
|
// Otherwise
|
||
|
else {
|
||
|
set += c2;
|
||
|
}
|
||
|
}
|
||
|
// Closed?
|
||
|
if (closed >= 0) {
|
||
|
// Cannot convert
|
||
|
if (set.length > 1) {
|
||
|
return '';
|
||
|
}
|
||
|
// Convert to literal
|
||
|
if (set) {
|
||
|
literal += set;
|
||
|
i = closed;
|
||
|
continue;
|
||
|
}
|
||
|
}
|
||
|
// Otherwise fall thru
|
||
|
}
|
||
|
// Append
|
||
|
literal += c;
|
||
|
}
|
||
|
return literal;
|
||
|
}
|
||
|
/**
|
||
|
* Escapes regexp special characters
|
||
|
* https://javascript.info/regexp-escaping
|
||
|
*/
|
||
|
static regExpEscape(s) {
|
||
|
return s.replace(/[[\\^$.|?*+()]/g, '\\$&');
|
||
|
}
|
||
|
}
|
||
|
exports.Pattern = Pattern;
|
||
|
//# sourceMappingURL=internal-pattern.js.map
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 950:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const url = __webpack_require__(835);
|
||
|
function getProxyUrl(reqUrl) {
|
||
|
let usingSsl = reqUrl.protocol === 'https:';
|
||
|
let proxyUrl;
|
||
|
if (checkBypass(reqUrl)) {
|
||
|
return proxyUrl;
|
||
|
}
|
||
|
let proxyVar;
|
||
|
if (usingSsl) {
|
||
|
proxyVar = process.env["https_proxy"] ||
|
||
|
process.env["HTTPS_PROXY"];
|
||
|
}
|
||
|
else {
|
||
|
proxyVar = process.env["http_proxy"] ||
|
||
|
process.env["HTTP_PROXY"];
|
||
|
}
|
||
|
if (proxyVar) {
|
||
|
proxyUrl = url.parse(proxyVar);
|
||
|
}
|
||
|
return proxyUrl;
|
||
|
}
|
||
|
exports.getProxyUrl = getProxyUrl;
|
||
|
function checkBypass(reqUrl) {
|
||
|
if (!reqUrl.hostname) {
|
||
|
return false;
|
||
|
}
|
||
|
let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || '';
|
||
|
if (!noProxy) {
|
||
|
return false;
|
||
|
}
|
||
|
// Determine the request port
|
||
|
let reqPort;
|
||
|
if (reqUrl.port) {
|
||
|
reqPort = Number(reqUrl.port);
|
||
|
}
|
||
|
else if (reqUrl.protocol === 'http:') {
|
||
|
reqPort = 80;
|
||
|
}
|
||
|
else if (reqUrl.protocol === 'https:') {
|
||
|
reqPort = 443;
|
||
|
}
|
||
|
// Format the request hostname and hostname with port
|
||
|
let upperReqHosts = [reqUrl.hostname.toUpperCase()];
|
||
|
if (typeof reqPort === 'number') {
|
||
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
||
|
}
|
||
|
// Compare request host against noproxy
|
||
|
for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) {
|
||
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
exports.checkBypass = checkBypass;
|
||
|
|
||
|
|
||
|
/***/ }),
|
||
|
|
||
|
/***/ 972:
|
||
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
const assert = __webpack_require__(357);
|
||
|
const path = __webpack_require__(622);
|
||
|
const IS_WINDOWS = process.platform === 'win32';
|
||
|
/**
|
||
|
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||
|
*
|
||
|
* For example, on Linux/macOS:
|
||
|
* - `/ => /`
|
||
|
* - `/hello => /`
|
||
|
*
|
||
|
* For example, on Windows:
|
||
|
* - `C:\ => C:\`
|
||
|
* - `C:\hello => C:\`
|
||
|
* - `C: => C:`
|
||
|
* - `C:hello => C:`
|
||
|
* - `\ => \`
|
||
|
* - `\hello => \`
|
||
|
* - `\\hello => \\hello`
|
||
|
* - `\\hello\world => \\hello\world`
|
||
|
*/
|
||
|
function dirname(p) {
|
||
|
// Normalize slashes and trim unnecessary trailing slash
|
||
|
p = safeTrimTrailingSeparator(p);
|
||
|
// Windows UNC root, e.g. \\hello or \\hello\world
|
||
|
if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) {
|
||
|
return p;
|
||
|
}
|
||
|
// Get dirname
|
||
|
let result = path.dirname(p);
|
||
|
// Trim trailing slash for Windows UNC root, e.g. \\hello\world\
|
||
|
if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) {
|
||
|
result = safeTrimTrailingSeparator(result);
|
||
|
}
|
||
|
return result;
|
||
|
}
|
||
|
exports.dirname = dirname;
|
||
|
/**
|
||
|
* Roots the path if not already rooted. On Windows, relative roots like `\`
|
||
|
* or `C:` are expanded based on the current working directory.
|
||
|
*/
|
||
|
function ensureAbsoluteRoot(root, itemPath) {
|
||
|
assert(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
||
|
assert(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||
|
// Already rooted
|
||
|
if (hasAbsoluteRoot(itemPath)) {
|
||
|
return itemPath;
|
||
|
}
|
||
|
// Windows
|
||
|
if (IS_WINDOWS) {
|
||
|
// Check for itemPath like C: or C:foo
|
||
|
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
||
|
let cwd = process.cwd();
|
||
|
assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||
|
// Drive letter matches cwd? Expand to cwd
|
||
|
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
||
|
// Drive only, e.g. C:
|
||
|
if (itemPath.length === 2) {
|
||
|
// Preserve specified drive letter case (upper or lower)
|
||
|
return `${itemPath[0]}:\\${cwd.substr(3)}`;
|
||
|
}
|
||
|
// Drive + path, e.g. C:foo
|
||
|
else {
|
||
|
if (!cwd.endsWith('\\')) {
|
||
|
cwd += '\\';
|
||
|
}
|
||
|
// Preserve specified drive letter case (upper or lower)
|
||
|
return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`;
|
||
|
}
|
||
|
}
|
||
|
// Different drive
|
||
|
else {
|
||
|
return `${itemPath[0]}:\\${itemPath.substr(2)}`;
|
||
|
}
|
||
|
}
|
||
|
// Check for itemPath like \ or \foo
|
||
|
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
||
|
const cwd = process.cwd();
|
||
|
assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||
|
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
||
|
}
|
||
|
}
|
||
|
assert(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
||
|
// Otherwise ensure root ends with a separator
|
||
|
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
||
|
// Intentionally empty
|
||
|
}
|
||
|
else {
|
||
|
// Append separator
|
||
|
root += path.sep;
|
||
|
}
|
||
|
return root + itemPath;
|
||
|
}
|
||
|
exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
|
||
|
/**
|
||
|
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||
|
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||
|
*/
|
||
|
function hasAbsoluteRoot(itemPath) {
|
||
|
assert(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||
|
// Normalize separators
|
||
|
itemPath = normalizeSeparators(itemPath);
|
||
|
// Windows
|
||
|
if (IS_WINDOWS) {
|
||
|
// E.g. \\hello\share or C:\hello
|
||
|
return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath);
|
||
|
}
|
||
|
// E.g. /hello
|
||
|
return itemPath.startsWith('/');
|
||
|
}
|
||
|
exports.hasAbsoluteRoot = hasAbsoluteRoot;
|
||
|
/**
|
||
|
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||
|
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||
|
*/
|
||
|
function hasRoot(itemPath) {
|
||
|
assert(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
||
|
// Normalize separators
|
||
|
itemPath = normalizeSeparators(itemPath);
|
||
|
// Windows
|
||
|
if (IS_WINDOWS) {
|
||
|
// E.g. \ or \hello or \\hello
|
||
|
// E.g. C: or C:\hello
|
||
|
return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath);
|
||
|
}
|
||
|
// E.g. /hello
|
||
|
return itemPath.startsWith('/');
|
||
|
}
|
||
|
exports.hasRoot = hasRoot;
|
||
|
/**
|
||
|
* Removes redundant slashes and converts `/` to `\` on Windows
|
||
|
*/
|
||
|
function normalizeSeparators(p) {
|
||
|
p = p || '';
|
||
|
// Windows
|
||
|
if (IS_WINDOWS) {
|
||
|
// Convert slashes on Windows
|
||
|
p = p.replace(/\//g, '\\');
|
||
|
// Remove redundant slashes
|
||
|
const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello
|
||
|
return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC
|
||
|
}
|
||
|
// Remove redundant slashes
|
||
|
return p.replace(/\/\/+/g, '/');
|
||
|
}
|
||
|
exports.normalizeSeparators = normalizeSeparators;
|
||
|
/**
|
||
|
* Normalizes the path separators and trims the trailing separator (when safe).
|
||
|
* For example, `/foo/ => /foo` but `/ => /`
|
||
|
*/
|
||
|
function safeTrimTrailingSeparator(p) {
|
||
|
// Short-circuit if empty
|
||
|
if (!p) {
|
||
|
return '';
|
||
|
}
|
||
|
// Normalize separators
|
||
|
p = normalizeSeparators(p);
|
||
|
// No trailing slash
|
||
|
if (!p.endsWith(path.sep)) {
|
||
|
return p;
|
||
|
}
|
||
|
// Check '/' on Linux/macOS and '\' on Windows
|
||
|
if (p === path.sep) {
|
||
|
return p;
|
||
|
}
|
||
|
// On Windows check if drive root. E.g. C:\
|
||
|
if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) {
|
||
|
return p;
|
||
|
}
|
||
|
// Otherwise trim trailing slash
|
||
|
return p.substr(0, p.length - 1);
|
||
|
}
|
||
|
exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
||
|
//# sourceMappingURL=internal-path-helper.js.map
|
||
|
|
||
|
/***/ })
|
||
|
|
||
|
/******/ });
|