chore: audit fix

Signed-off-by: Dwi Siswanto <git@dw1.io>
This commit is contained in:
Dwi Siswanto 2025-01-12 09:06:52 +07:00
parent 31149f3261
commit 6604e0979a
No known key found for this signature in database
GPG Key ID: 3BB198907EF44CED
18 changed files with 178 additions and 85 deletions

16
node_modules/.package-lock.json generated vendored
View File

@ -571,10 +571,11 @@
}
},
"node_modules/minipass": {
"version": "4.2.4",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.4.tgz",
"integrity": "sha512-lwycX3cBMTvcejsHITUgYj6Gy6A7Nh4Q6h9NP4sTHY1ccJlC7yKzDmiShEHsJ16Jf1nKGDEaiHxiltsJEvk0nQ==",
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
"integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=8"
}
@ -704,14 +705,15 @@
}
},
"node_modules/tar": {
"version": "6.1.13",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz",
"integrity": "sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==",
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
"integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
"dev": true,
"license": "ISC",
"dependencies": {
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
"minipass": "^4.0.0",
"minipass": "^5.0.0",
"minizlib": "^2.1.1",
"mkdirp": "^1.0.3",
"yallist": "^4.0.0"

14
node_modules/minipass/README.md generated vendored
View File

@ -92,9 +92,9 @@ Example:
```js
// hybrid module, either works
import Minipass from 'minipass'
import { Minipass } from 'minipass'
// or:
const Minipass = require('minipass')
const { Minipass } = require('minipass')
const stream = new Minipass()
stream.on('data', () => console.log('data event'))
@ -116,9 +116,9 @@ options, or by setting `stream.async = true` later on.
```js
// hybrid module, either works
import Minipass from 'minipass'
import { Minipass } from 'minipass'
// or:
const Minipass = require('minipass')
const { Minipass } = require('minipass')
const asyncStream = new Minipass({ async: true })
asyncStream.on('data', () => console.log('data event'))
@ -135,7 +135,7 @@ Switching _out_ of async mode is unsafe, as it could cause data
corruption, and so is not enabled. Example:
```js
import Minipass from 'minipass'
import { Minipass } from 'minipass'
const stream = new Minipass({ encoding: 'utf8' })
stream.on('data', chunk => console.log(chunk))
stream.async = true
@ -156,7 +156,7 @@ To avoid this problem, once set into async mode, any attempt to
make the stream sync again will be ignored.
```js
const Minipass = require('minipass')
const { Minipass } = require('minipass')
const stream = new Minipass({ encoding: 'utf8' })
stream.on('data', chunk => console.log(chunk))
stream.async = true
@ -384,7 +384,7 @@ It's a stream! Use it like a stream and it'll most likely do what
you want.
```js
import Minipass from 'minipass'
import { Minipass } from 'minipass'
const mp = new Minipass(options) // optional: { encoding, objectMode }
mp.write('foo')
mp.pipe(someOtherStream)

34
node_modules/minipass/index.d.ts generated vendored
View File

@ -6,58 +6,62 @@
import { EventEmitter } from 'events'
import { Stream } from 'stream'
declare namespace Minipass {
type Encoding = BufferEncoding | 'buffer' | null
export namespace Minipass {
export type Encoding = BufferEncoding | 'buffer' | null
interface Writable extends EventEmitter {
export interface Writable extends EventEmitter {
end(): any
write(chunk: any, ...args: any[]): any
}
interface Readable extends EventEmitter {
export interface Readable extends EventEmitter {
pause(): any
resume(): any
pipe(): any
}
type DualIterable<T> = Iterable<T> & AsyncIterable<T>
export type DualIterable<T> = Iterable<T> & AsyncIterable<T>
type ContiguousData = Buffer | ArrayBufferLike | ArrayBufferView | string
export type ContiguousData =
| Buffer
| ArrayBufferLike
| ArrayBufferView
| string
type BufferOrString = Buffer | string
export type BufferOrString = Buffer | string
interface SharedOptions {
export interface SharedOptions {
async?: boolean
signal?: AbortSignal
}
interface StringOptions extends SharedOptions {
export interface StringOptions extends SharedOptions {
encoding: BufferEncoding
objectMode?: boolean
}
interface BufferOptions extends SharedOptions {
export interface BufferOptions extends SharedOptions {
encoding?: null | 'buffer'
objectMode?: boolean
}
interface ObjectModeOptions extends SharedOptions {
export interface ObjectModeOptions extends SharedOptions {
objectMode: true
}
interface PipeOptions {
export interface PipeOptions {
end?: boolean
proxyErrors?: boolean
}
type Options<T> = T extends string
export type Options<T> = T extends string
? StringOptions
: T extends Buffer
? BufferOptions
: ObjectModeOptions
}
declare class Minipass<
export class Minipass<
RType extends any = Buffer,
WType extends any = RType extends Minipass.BufferOrString
? Minipass.ContiguousData
@ -146,5 +150,3 @@ declare class Minipass<
[Symbol.iterator](): Generator<RType, void, void>
[Symbol.asyncIterator](): AsyncGenerator<RType, void, void>
}
export = Minipass

7
node_modules/minipass/index.js generated vendored
View File

@ -593,18 +593,21 @@ class Minipass extends Stream {
const onerr = er => {
this.removeListener('data', ondata)
this.removeListener('end', onend)
this.removeListener(DESTROYED, ondestroy)
stop()
reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.removeListener(DESTROYED, ondestroy)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
this.removeListener(DESTROYED, ondestroy)
stop()
resolve({ done: true })
}
@ -635,6 +638,7 @@ class Minipass extends Stream {
const stop = () => {
this.pause()
this.removeListener(ERROR, stop)
this.removeListener(DESTROYED, stop)
this.removeListener('end', stop)
stopped = true
return { done: true }
@ -647,6 +651,7 @@ class Minipass extends Stream {
}
this.once('end', stop)
this.once(ERROR, stop)
this.once(DESTROYED, stop)
return {
next,
@ -694,4 +699,4 @@ class Minipass extends Stream {
}
}
module.exports = Minipass
exports.Minipass = Minipass

9
node_modules/minipass/index.mjs generated vendored
View File

@ -97,7 +97,7 @@ class PipeProxyErrors extends Pipe {
}
}
class Minipass extends Stream {
export class Minipass extends Stream {
constructor(options) {
super()
this[FLOWING] = false
@ -593,18 +593,21 @@ class Minipass extends Stream {
const onerr = er => {
this.removeListener('data', ondata)
this.removeListener('end', onend)
this.removeListener(DESTROYED, ondestroy)
stop()
reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.removeListener(DESTROYED, ondestroy)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
this.removeListener(DESTROYED, ondestroy)
stop()
resolve({ done: true })
}
@ -635,6 +638,7 @@ class Minipass extends Stream {
const stop = () => {
this.pause()
this.removeListener(ERROR, stop)
this.removeListener(DESTROYED, stop)
this.removeListener('end', stop)
stopped = true
return { done: true }
@ -647,6 +651,7 @@ class Minipass extends Stream {
}
this.once('end', stop)
this.once(ERROR, stop)
this.once(DESTROYED, stop)
return {
next,
@ -694,4 +699,4 @@ class Minipass extends Stream {
}
}
export default Minipass

2
node_modules/minipass/package.json generated vendored
View File

@ -1,6 +1,6 @@
{
"name": "minipass",
"version": "4.2.4",
"version": "5.0.0",
"description": "minimal implementation of a PassThrough stream",
"main": "./index.js",
"module": "./index.mjs",

10
node_modules/tar/README.md generated vendored
View File

@ -115,6 +115,8 @@ Handlers receive 3 arguments:
encountered an error which prevented it from being unpacked. This occurs
when:
- an unrecoverable fs error happens during unpacking,
- an entry is trying to extract into an excessively deep
location (by default, limited to 1024 subfolders),
- an entry has `..` in the path and `preservePaths` is not set, or
- an entry is extracting through a symbolic link, when `preservePaths` is
not set.
@ -427,6 +429,10 @@ The following options are supported:
`process.umask()` to determine the default umask value, since tar will
extract with whatever mode is provided, and let the process `umask` apply
normally.
- `maxDepth` The maximum depth of subfolders to extract into. This
defaults to 1024. Anything deeper than the limit will raise a
warning and skip the entry. Set to `Infinity` to remove the
limitation.
The following options are mostly internal, but can be modified in some
advanced use cases, such as re-using caches between runs.
@ -749,6 +755,10 @@ Most unpack errors will cause a `warn` event to be emitted. If the
`process.umask()` to determine the default umask value, since tar will
extract with whatever mode is provided, and let the process `umask` apply
normally.
- `maxDepth` The maximum depth of subfolders to extract into. This
defaults to 1024. Anything deeper than the limit will raise a
warning and skip the entry. Set to `Infinity` to remove the
limitation.
### class tar.Unpack.Sync

View File

@ -6,7 +6,7 @@ const normalizeCache = Object.create(null)
const { hasOwnProperty } = Object.prototype
module.exports = s => {
if (!hasOwnProperty.call(normalizeCache, s)) {
normalizeCache[s] = s.normalize('NFKD')
normalizeCache[s] = s.normalize('NFD')
}
return normalizeCache[s]
}

28
node_modules/tar/lib/pack.js generated vendored
View File

@ -22,7 +22,7 @@ class PackJob {
}
}
const MiniPass = require('minipass')
const { Minipass } = require('minipass')
const zlib = require('minizlib')
const ReadEntry = require('./read-entry.js')
const WriteEntry = require('./write-entry.js')
@ -56,7 +56,7 @@ const path = require('path')
const warner = require('./warn-mixin.js')
const normPath = require('./normalize-windows-path.js')
const Pack = warner(class Pack extends MiniPass {
const Pack = warner(class Pack extends Minipass {
constructor (opt) {
super(opt)
opt = opt || Object.create(null)
@ -79,14 +79,26 @@ const Pack = warner(class Pack extends MiniPass {
this.portable = !!opt.portable
this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object') {
opt.gzip = {}
if (opt.gzip || opt.brotli) {
if (opt.gzip && opt.brotli) {
throw new TypeError('gzip and brotli are mutually exclusive')
}
if (this.portable) {
opt.gzip.portable = true
if (opt.gzip) {
if (typeof opt.gzip !== 'object') {
opt.gzip = {}
}
if (this.portable) {
opt.gzip.portable = true
}
this.zip = new zlib.Gzip(opt.gzip)
}
if (opt.brotli) {
if (typeof opt.brotli !== 'object') {
opt.brotli = {}
}
this.zip = new zlib.BrotliCompress(opt.brotli)
}
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())

49
node_modules/tar/lib/parse.js generated vendored
View File

@ -97,6 +97,16 @@ module.exports = warner(class Parser extends EE {
this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
this.filter = typeof opt.filter === 'function' ? opt.filter : noop
// Unlike gzip, brotli doesn't have any magic bytes to identify it
// Users need to explicitly tell us they're extracting a brotli file
// Or we infer from the file extension
const isTBR = (opt.file && (
opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')))
// if it's a tbr file it MIGHT be brotli, but we don't know until
// we look at it and verify it's not a valid tar file.
this.brotli = !opt.gzip && opt.brotli !== undefined ? opt.brotli
: isTBR ? undefined
: false
// have to set this so that streams are ok piping into it
this.writable = true
@ -347,7 +357,9 @@ module.exports = warner(class Parser extends EE {
}
// first write, might be gzipped
if (this[UNZIP] === null && chunk) {
const needSniff = this[UNZIP] === null ||
this.brotli === undefined && this[UNZIP] === false
if (needSniff && chunk) {
if (this[BUFFER]) {
chunk = Buffer.concat([this[BUFFER], chunk])
this[BUFFER] = null
@ -356,15 +368,45 @@ module.exports = warner(class Parser extends EE {
this[BUFFER] = chunk
return true
}
// look for gzip header
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i]) {
this[UNZIP] = false
}
}
if (this[UNZIP] === null) {
const maybeBrotli = this.brotli === undefined
if (this[UNZIP] === false && maybeBrotli) {
// read the first header to see if it's a valid tar file. If so,
// we can safely assume that it's not actually brotli, despite the
// .tbr or .tar.br file extension.
// if we ended before getting a full chunk, yes, def brotli
if (chunk.length < 512) {
if (this[ENDED]) {
this.brotli = true
} else {
this[BUFFER] = chunk
return true
}
} else {
// if it's tar, it's pretty reliably not brotli, chances of
// that happening are astronomical.
try {
new Header(chunk.slice(0, 512))
this.brotli = false
} catch (_) {
this.brotli = true
}
}
}
if (this[UNZIP] === null || (this[UNZIP] === false && this.brotli)) {
const ended = this[ENDED]
this[ENDED] = false
this[UNZIP] = new zlib.Unzip()
this[UNZIP] = this[UNZIP] === null
? new zlib.Unzip()
: new zlib.BrotliDecompress()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er => this.abort(er))
this[UNZIP].on('end', _ => {
@ -502,6 +544,7 @@ module.exports = warner(class Parser extends EE {
this[UNZIP].end(chunk)
} else {
this[ENDED] = true
if (this.brotli === undefined) chunk = chunk || Buffer.alloc(0)
this.write(chunk)
}
}

View File

@ -123,7 +123,7 @@ module.exports = () => {
// effectively removing all parallelization on windows.
paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {
// don't need normPath, because we skip this entirely for windows
return normalize(stripSlashes(join(p))).toLowerCase()
return stripSlashes(join(normalize(p))).toLowerCase()
})
const dirs = new Set(

4
node_modules/tar/lib/read-entry.js generated vendored
View File

@ -1,9 +1,9 @@
'use strict'
const MiniPass = require('minipass')
const { Minipass } = require('minipass')
const normPath = require('./normalize-windows-path.js')
const SLURP = Symbol('slurp')
module.exports = class ReadEntry extends MiniPass {
module.exports = class ReadEntry extends Minipass {
constructor (header, ex, gex) {
super()
// read entries always start life paused. this is to avoid the

2
node_modules/tar/lib/replace.js generated vendored
View File

@ -23,7 +23,7 @@ module.exports = (opt_, files, cb) => {
throw new TypeError('file is required')
}
if (opt.gzip) {
if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives')
}

29
node_modules/tar/lib/unpack.js generated vendored
View File

@ -48,6 +48,7 @@ const crypto = require('crypto')
const getFlag = require('./get-write-flag.js')
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
const DEFAULT_MAX_DEPTH = 1024
// Unlinks on Windows are not atomic.
//
@ -105,7 +106,7 @@ const uint32 = (a, b, c) =>
// Note that on windows, we always drop the entire cache whenever a
// symbolic link is encountered, because 8.3 filenames are impossible
// to reason about, and collisions are hazards rather than just failures.
const cacheKeyNormalize = path => normalize(stripSlash(normPath(path)))
const cacheKeyNormalize = path => stripSlash(normPath(normalize(path)))
.toLowerCase()
const pruneCache = (cache, abs) => {
@ -181,6 +182,12 @@ class Unpack extends Parser {
this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
process.getgid() : null
// prevent excessively deep nesting of subfolders
// set to `Infinity` to remove this restriction
this.maxDepth = typeof opt.maxDepth === 'number'
? opt.maxDepth
: DEFAULT_MAX_DEPTH
// mostly just for testing, but useful in some cases.
// Forcibly trigger a chown on every entry, no matter what
this.forceChown = opt.forceChown === true
@ -238,13 +245,13 @@ class Unpack extends Parser {
}
[CHECKPATH] (entry) {
const p = normPath(entry.path)
const parts = p.split('/')
if (this.strip) {
const parts = normPath(entry.path).split('/')
if (parts.length < this.strip) {
return false
}
entry.path = parts.slice(this.strip).join('/')
if (entry.type === 'Link') {
const linkparts = normPath(entry.linkpath).split('/')
if (linkparts.length >= this.strip) {
@ -253,11 +260,21 @@ class Unpack extends Parser {
return false
}
}
parts.splice(0, this.strip)
entry.path = parts.join('/')
}
if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
entry,
path: p,
depth: parts.length,
maxDepth: this.maxDepth,
})
return false
}
if (!this.preservePaths) {
const p = normPath(entry.path)
const parts = p.split('/')
if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) {
this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
entry,

2
node_modules/tar/lib/update.js generated vendored
View File

@ -13,7 +13,7 @@ module.exports = (opt_, files, cb) => {
throw new TypeError('file is required')
}
if (opt.gzip) {
if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives')
}

View File

@ -1,5 +1,5 @@
'use strict'
const MiniPass = require('minipass')
const { Minipass } = require('minipass')
const Pax = require('./pax.js')
const Header = require('./header.js')
const fs = require('fs')
@ -41,7 +41,7 @@ const stripAbsolutePath = require('./strip-absolute-path.js')
const modeFix = require('./mode-fix.js')
const WriteEntry = warner(class WriteEntry extends MiniPass {
const WriteEntry = warner(class WriteEntry extends Minipass {
constructor (p, opt) {
opt = opt || {}
super(opt)
@ -417,7 +417,7 @@ class WriteEntrySync extends WriteEntry {
}
}
const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
const WriteEntryTar = warner(class WriteEntryTar extends Minipass {
constructor (readEntry, opt) {
opt = opt || {}
super(opt)

17
node_modules/tar/package.json generated vendored
View File

@ -2,32 +2,27 @@
"author": "GitHub Inc.",
"name": "tar",
"description": "tar for node",
"version": "6.1.13",
"version": "6.2.1",
"repository": {
"type": "git",
"url": "https://github.com/npm/node-tar.git"
"url": "https://github.com/isaacs/node-tar.git"
},
"scripts": {
"genparse": "node scripts/generate-parse-fixtures.js",
"template-oss-apply": "template-oss-apply --force",
"lint": "eslint \"**/*.js\"",
"postlint": "template-oss-check",
"lintfix": "npm run lint -- --fix",
"snap": "tap",
"test": "tap",
"posttest": "npm run lint"
"test": "tap"
},
"dependencies": {
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
"minipass": "^4.0.0",
"minipass": "^5.0.0",
"minizlib": "^2.1.1",
"mkdirp": "^1.0.3",
"yallist": "^4.0.0"
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.10.0",
"@npmcli/template-oss": "4.11.0",
"chmodr": "^1.2.0",
"end-of-stream": "^1.4.3",
"events-to-array": "^2.0.3",
@ -55,7 +50,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.10.0",
"version": "4.11.0",
"content": "scripts/template-oss",
"engines": ">=10",
"distPaths": [

30
package-lock.json generated
View File

@ -591,10 +591,11 @@
}
},
"node_modules/minipass": {
"version": "4.2.4",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.4.tgz",
"integrity": "sha512-lwycX3cBMTvcejsHITUgYj6Gy6A7Nh4Q6h9NP4sTHY1ccJlC7yKzDmiShEHsJ16Jf1nKGDEaiHxiltsJEvk0nQ==",
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
"integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
"dev": true,
"license": "ISC",
"engines": {
"node": ">=8"
}
@ -724,14 +725,15 @@
}
},
"node_modules/tar": {
"version": "6.1.13",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz",
"integrity": "sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==",
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
"integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
"dev": true,
"license": "ISC",
"dependencies": {
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
"minipass": "^4.0.0",
"minipass": "^5.0.0",
"minizlib": "^2.1.1",
"mkdirp": "^1.0.3",
"yallist": "^4.0.0"
@ -1319,9 +1321,9 @@
}
},
"minipass": {
"version": "4.2.4",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.4.tgz",
"integrity": "sha512-lwycX3cBMTvcejsHITUgYj6Gy6A7Nh4Q6h9NP4sTHY1ccJlC7yKzDmiShEHsJ16Jf1nKGDEaiHxiltsJEvk0nQ==",
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
"integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
"dev": true
},
"minizlib": {
@ -1422,14 +1424,14 @@
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="
},
"tar": {
"version": "6.1.13",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz",
"integrity": "sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==",
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
"integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
"dev": true,
"requires": {
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
"minipass": "^4.0.0",
"minipass": "^5.0.0",
"minizlib": "^2.1.1",
"mkdirp": "^1.0.3",
"yallist": "^4.0.0"