Add files

This commit is contained in:
Ecolipsy 2021-12-04 20:00:33 +01:00
commit a2f355ac82
1757 changed files with 320133 additions and 0 deletions

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
token.txt

81
index.js Normal file
View file

@ -0,0 +1,81 @@
const dc = require("discord.js");
const client = new dc.Client();
const fs = require("fs");
client.login(JSON.parse(fs.readFileSync("token.txt").toString()));
const ytdl = require("ytdl-core");
const vc = "879091976832684062";
const gu = "879081086817288264";
var loop = false;
client.on("ready", () => {
console.log("Ready!");
});
function isYT(url = new String){
try{
var urlObj = new URL(url);
if(urlObj.protocol !== "https:") return false;
if(urlObj.host !== "youtube.com" && urlObj.host !== "www.youtube.com" && urlObj.host !== "youtu.be") return false;
if(!urlObj.search.startsWith("?v=")) return false;
return true;
} catch(e){
return false;
}
}
async function play(conn=new dc.VoiceConnection, url=new String){
var playing = await conn.play(await ytdl(url));
playing.on("finish", () => {
if(loop){
return play(conn, url);
}
});
}
client.on("message", (msg) => {
if(msg.mentions.has(client.user.id)) return msg.channel.send("I'LL BING YOU TOO BITCH <@!" + msg.author.id + ">");
if(!msg.content.startsWith("m!")) return;
if(msg.content.startsWith("m!join")){
client.channels.cache.get(vc).join().then((conn=new dc.VoiceConnection) => {
conn.voice.setDeaf(true);
msg.channel.send("I have joined the channel.");
}).catch(e => {
console.log(e);
msg.channel.send("An error occured: ```js\n" + e.stack + "```");
});
} else if(msg.content.startsWith("m!play")){
var args = msg.content.split(" ");
args.shift();
if(args.length > 1) return msg.channel.send("You're missing the url of the video to play.");
const conn = client.voice.connections.get(gu);
if(!conn) return msg.channel.send("I'm not in the music voice channel, please do m!join and try again.");
if(isYT(args[0])){
console.log("YT");
try{
play(conn, args[0]);
msg.channel.send("I have started playing the song.");
} catch(e){
msg.channel.send("Hm that doesn't seem to have worked, check console for error.");
console.log(e);
}
} else{
console.log("Non-YT");
try{
conn.play(args[0]);
msg.channel.send("I have started playing the song.");
} catch(e){
msg.channel.send("Hm that doesn't seem to have worked, check console for error.");
console.log(e);
}
}
} else if(msg.content.startsWith("m!loop")){
loop = !loop;
var status = loop ? "now loop" : "no longer loop";
msg.channel.send("I will " + status + " all the songs played.");
}
});
client.on("voiceStateUpdate", (vcState) => {
vcState.setDeaf(true);
vcState.setMute(false);
});

12
node_modules/.bin/color-support generated vendored Normal file
View file

@ -0,0 +1,12 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../color-support/bin.js" "$@"
else
exec node "$basedir/../color-support/bin.js" "$@"
fi

17
node_modules/.bin/color-support.cmd generated vendored Normal file
View file

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\color-support\bin.js" %*

28
node_modules/.bin/color-support.ps1 generated vendored Normal file
View file

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../color-support/bin.js" $args
} else {
& "$basedir/node$exe" "$basedir/../color-support/bin.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../color-support/bin.js" $args
} else {
& "node$exe" "$basedir/../color-support/bin.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

12
node_modules/.bin/detect-libc generated vendored Normal file
View file

@ -0,0 +1,12 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../detect-libc/bin/detect-libc.js" "$@"
else
exec node "$basedir/../detect-libc/bin/detect-libc.js" "$@"
fi

17
node_modules/.bin/detect-libc.cmd generated vendored Normal file
View file

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\detect-libc\bin\detect-libc.js" %*

28
node_modules/.bin/detect-libc.ps1 generated vendored Normal file
View file

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
} else {
& "$basedir/node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
} else {
& "node$exe" "$basedir/../detect-libc/bin/detect-libc.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

12
node_modules/.bin/mkdirp generated vendored Normal file
View file

@ -0,0 +1,12 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../mkdirp/bin/cmd.js" "$@"
else
exec node "$basedir/../mkdirp/bin/cmd.js" "$@"
fi

17
node_modules/.bin/mkdirp.cmd generated vendored Normal file
View file

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mkdirp\bin\cmd.js" %*

28
node_modules/.bin/mkdirp.ps1 generated vendored Normal file
View file

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
} else {
& "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
} else {
& "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

12
node_modules/.bin/node-pre-gyp generated vendored Normal file
View file

@ -0,0 +1,12 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../@discordjs/node-pre-gyp/bin/node-pre-gyp" "$@"
else
exec node "$basedir/../@discordjs/node-pre-gyp/bin/node-pre-gyp" "$@"
fi

17
node_modules/.bin/node-pre-gyp.cmd generated vendored Normal file
View file

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\@discordjs\node-pre-gyp\bin\node-pre-gyp" %*

28
node_modules/.bin/node-pre-gyp.ps1 generated vendored Normal file
View file

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../@discordjs/node-pre-gyp/bin/node-pre-gyp" $args
} else {
& "$basedir/node$exe" "$basedir/../@discordjs/node-pre-gyp/bin/node-pre-gyp" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../@discordjs/node-pre-gyp/bin/node-pre-gyp" $args
} else {
& "node$exe" "$basedir/../@discordjs/node-pre-gyp/bin/node-pre-gyp" $args
}
$ret=$LASTEXITCODE
}
exit $ret

12
node_modules/.bin/nopt generated vendored Normal file
View file

@ -0,0 +1,12 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../nopt/bin/nopt.js" "$@"
else
exec node "$basedir/../nopt/bin/nopt.js" "$@"
fi

17
node_modules/.bin/nopt.cmd generated vendored Normal file
View file

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\nopt\bin\nopt.js" %*

28
node_modules/.bin/nopt.ps1 generated vendored Normal file
View file

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../nopt/bin/nopt.js" $args
} else {
& "$basedir/node$exe" "$basedir/../nopt/bin/nopt.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../nopt/bin/nopt.js" $args
} else {
& "node$exe" "$basedir/../nopt/bin/nopt.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

12
node_modules/.bin/rimraf generated vendored Normal file
View file

@ -0,0 +1,12 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../rimraf/bin.js" "$@"
else
exec node "$basedir/../rimraf/bin.js" "$@"
fi

17
node_modules/.bin/rimraf.cmd generated vendored Normal file
View file

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rimraf\bin.js" %*

28
node_modules/.bin/rimraf.ps1 generated vendored Normal file
View file

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../rimraf/bin.js" $args
} else {
& "$basedir/node$exe" "$basedir/../rimraf/bin.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../rimraf/bin.js" $args
} else {
& "node$exe" "$basedir/../rimraf/bin.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

12
node_modules/.bin/semver generated vendored Normal file
View file

@ -0,0 +1,12 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@"
else
exec node "$basedir/../semver/bin/semver.js" "$@"
fi

17
node_modules/.bin/semver.cmd generated vendored Normal file
View file

@ -0,0 +1,17 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %*

28
node_modules/.bin/semver.ps1 generated vendored Normal file
View file

@ -0,0 +1,28 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args
} else {
& "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../semver/bin/semver.js" $args
} else {
& "node$exe" "$basedir/../semver/bin/semver.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

887
node_modules/.package-lock.json generated vendored Normal file
View file

@ -0,0 +1,887 @@
{
"name": "moosiktest",
"version": "1.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"node_modules/@derhuerst/http-basic": {
"version": "8.2.1",
"resolved": "https://registry.npmjs.org/@derhuerst/http-basic/-/http-basic-8.2.1.tgz",
"integrity": "sha512-Rmn7qQQulw2sxJ8qGfZ7OuqMWuhz8V+L5xnYKMF5cXVcYqmgWqlVEAme90pF7Ya8OVhxVxLmhh0rI2k6t7ITWw==",
"dependencies": {
"caseless": "^0.12.0",
"concat-stream": "^1.6.2",
"http-response-object": "^3.0.1",
"parse-cache-control": "^1.0.1"
},
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@discordjs/collection": {
"version": "0.1.6",
"resolved": "https://registry.npmjs.org/@discordjs/collection/-/collection-0.1.6.tgz",
"integrity": "sha512-utRNxnd9kSS2qhyivo9lMlt5qgAUasH2gb7BEOn6p0efFh24gjGomHzWKMAPn2hEReOPQZCJaRKoURwRotKucQ=="
},
"node_modules/@discordjs/form-data": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/@discordjs/form-data/-/form-data-3.0.1.tgz",
"integrity": "sha512-ZfFsbgEXW71Rw/6EtBdrP5VxBJy4dthyC0tpQKGKmYFImlmmrykO14Za+BiIVduwjte0jXEBlhSKf0MWbFp9Eg==",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/@discordjs/node-pre-gyp": {
"version": "0.4.2",
"resolved": "https://registry.npmjs.org/@discordjs/node-pre-gyp/-/node-pre-gyp-0.4.2.tgz",
"integrity": "sha512-V239Czn+DXFGLhhuccwEDBoTdgMGrRu30dOlzm1GzrSIjwFj01ZJerNX7x+CEX1NG1Q/1gGfOOkeZFNHjycrRA==",
"dependencies": {
"detect-libc": "^1.0.3",
"https-proxy-agent": "^5.0.0",
"make-dir": "^3.1.0",
"node-fetch": "^2.6.5",
"nopt": "^5.0.0",
"npmlog": "^5.0.1",
"rimraf": "^3.0.2",
"semver": "^7.3.5",
"tar": "^6.1.11"
},
"bin": {
"node-pre-gyp": "bin/node-pre-gyp"
}
},
"node_modules/@discordjs/opus": {
"version": "0.5.3",
"resolved": "https://registry.npmjs.org/@discordjs/opus/-/opus-0.5.3.tgz",
"integrity": "sha512-IQhCwCy2WKXLe+qkOkwO1Wjgk20uqeAbqM62tCbzIqbTsXX4YAge8Me9RFnI77Lx+UTkgm4rSVM3VPVdS/GsUw==",
"hasInstallScript": true,
"dependencies": {
"@discordjs/node-pre-gyp": "^0.4.0",
"node-addon-api": "^3.2.1"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/@types/node": {
"version": "10.17.60",
"resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.60.tgz",
"integrity": "sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw=="
},
"node_modules/abbrev": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
"integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q=="
},
"node_modules/abort-controller": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
"dependencies": {
"event-target-shim": "^5.0.0"
},
"engines": {
"node": ">=6.5"
}
},
"node_modules/agent-base": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
"dependencies": {
"debug": "4"
},
"engines": {
"node": ">= 6.0.0"
}
},
"node_modules/ansi-regex": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz",
"integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=",
"engines": {
"node": ">=4"
}
},
"node_modules/aproba": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz",
"integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ=="
},
"node_modules/are-we-there-yet": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz",
"integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==",
"dependencies": {
"delegates": "^1.0.0",
"readable-stream": "^3.6.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/are-we-there-yet/node_modules/readable-stream": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
},
"node_modules/balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"node_modules/buffer-from": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="
},
"node_modules/caseless": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
"integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
},
"node_modules/chownr": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz",
"integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
"engines": {
"node": ">=10"
}
},
"node_modules/color-support": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
"integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
"bin": {
"color-support": "bin.js"
}
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
},
"node_modules/concat-stream": {
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz",
"integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==",
"engines": [
"node >= 0.8"
],
"dependencies": {
"buffer-from": "^1.0.0",
"inherits": "^2.0.3",
"readable-stream": "^2.2.2",
"typedarray": "^0.0.6"
}
},
"node_modules/console-control-strings": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
"integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4="
},
"node_modules/core-util-is": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="
},
"node_modules/debug": {
"version": "4.3.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz",
"integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==",
"dependencies": {
"ms": "2.1.2"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/delegates": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
"integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o="
},
"node_modules/detect-libc": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
"integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=",
"bin": {
"detect-libc": "bin/detect-libc.js"
},
"engines": {
"node": ">=0.10"
}
},
"node_modules/discord.js": {
"version": "12.5.3",
"resolved": "https://registry.npmjs.org/discord.js/-/discord.js-12.5.3.tgz",
"integrity": "sha512-D3nkOa/pCkNyn6jLZnAiJApw2N9XrIsXUAdThf01i7yrEuqUmDGc7/CexVWwEcgbQR97XQ+mcnqJpmJ/92B4Aw==",
"deprecated": "no longer supported",
"dependencies": {
"@discordjs/collection": "^0.1.6",
"@discordjs/form-data": "^3.0.1",
"abort-controller": "^3.0.0",
"node-fetch": "^2.6.1",
"prism-media": "^1.2.9",
"setimmediate": "^1.0.5",
"tweetnacl": "^1.0.3",
"ws": "^7.4.4"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/env-paths": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz",
"integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==",
"engines": {
"node": ">=6"
}
},
"node_modules/event-target-shim": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
"engines": {
"node": ">=6"
}
},
"node_modules/ffmpeg": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/ffmpeg/-/ffmpeg-0.0.4.tgz",
"integrity": "sha1-HEYN+OfaUSf2LO70v6BsWciWMMs=",
"dependencies": {
"when": ">= 0.0.1"
}
},
"node_modules/ffmpeg-static": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/ffmpeg-static/-/ffmpeg-static-4.4.0.tgz",
"integrity": "sha512-NIJHVPXlSsIK9pYvsTPh4ZlppauorpPLLeOaIG7VOXWQck4Fx4Qi7Ahe+j8mj8KZXhWwCg3Hx46JdWAIOWLcpg==",
"hasInstallScript": true,
"dependencies": {
"@derhuerst/http-basic": "^8.2.0",
"env-paths": "^2.2.0",
"https-proxy-agent": "^5.0.0",
"progress": "^2.0.3"
},
"engines": {
"node": ">=10"
}
},
"node_modules/fs-minipass": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz",
"integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==",
"dependencies": {
"minipass": "^3.0.0"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
},
"node_modules/gauge": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.1.tgz",
"integrity": "sha512-6STz6KdQgxO4S/ko+AbjlFGGdGcknluoqU+79GOFCDqqyYj5OanQf9AjxwN0jCidtT+ziPMmPSt9E4hfQ0CwIQ==",
"dependencies": {
"aproba": "^1.0.3 || ^2.0.0",
"color-support": "^1.1.2",
"console-control-strings": "^1.0.0",
"has-unicode": "^2.0.1",
"object-assign": "^4.1.1",
"signal-exit": "^3.0.0",
"string-width": "^1.0.1 || ^2.0.0",
"strip-ansi": "^3.0.1 || ^4.0.0",
"wide-align": "^1.1.2"
},
"engines": {
"node": ">=10"
}
},
"node_modules/glob": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz",
"integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==",
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/has-unicode": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
"integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk="
},
"node_modules/http-response-object": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/http-response-object/-/http-response-object-3.0.2.tgz",
"integrity": "sha512-bqX0XTF6fnXSQcEJ2Iuyr75yVakyjIDCqroJQ/aHfSdlM743Cwqoi2nDYMzLGWUcuTWGWy8AAvOKXTfiv6q9RA==",
"dependencies": {
"@types/node": "^10.0.3"
}
},
"node_modules/https-proxy-agent": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz",
"integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==",
"dependencies": {
"agent-base": "6",
"debug": "4"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
"dependencies": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"node_modules/is-fullwidth-code-point": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz",
"integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=",
"engines": {
"node": ">=4"
}
},
"node_modules/isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
},
"node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/m3u8stream": {
"version": "0.8.4",
"resolved": "https://registry.npmjs.org/m3u8stream/-/m3u8stream-0.8.4.tgz",
"integrity": "sha512-sco80Db+30RvcaIOndenX6E6oQNgTiBKeJbFPc+yDXwPQIkryfboEbCvXPlBRq3mQTCVPQO93TDVlfRwqpD35w==",
"dependencies": {
"miniget": "^4.0.0",
"sax": "^1.2.4"
},
"engines": {
"node": ">=10"
}
},
"node_modules/make-dir": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
"integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==",
"dependencies": {
"semver": "^6.0.0"
},
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/make-dir/node_modules/semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/mime-db": {
"version": "1.51.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz",
"integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.34",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz",
"integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==",
"dependencies": {
"mime-db": "1.51.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/miniget": {
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/miniget/-/miniget-4.2.1.tgz",
"integrity": "sha512-O/DduzDR6f+oDtVype9S/Qu5hhnx73EDYGyZKwU/qN82lehFZdfhoa4DT51SpsO+8epYrB3gcRmws56ROfTIoQ==",
"engines": {
"node": ">=10"
}
},
"node_modules/minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/minipass": {
"version": "3.1.5",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.5.tgz",
"integrity": "sha512-+8NzxD82XQoNKNrl1d/FSi+X8wAEWR+sbYAfIvub4Nz0d22plFG72CEVVaufV8PNf4qSslFTD8VMOxNVhHCjTw==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/minizlib": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
"integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
"dependencies": {
"minipass": "^3.0.0",
"yallist": "^4.0.0"
},
"engines": {
"node": ">= 8"
}
},
"node_modules/mkdirp": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
"integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
"bin": {
"mkdirp": "bin/cmd.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"node_modules/node-addon-api": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz",
"integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A=="
},
"node_modules/node-fetch": {
"version": "2.6.6",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.6.tgz",
"integrity": "sha512-Z8/6vRlTUChSdIgMa51jxQ4lrw/Jy5SOW10ObaA47/RElsAN2c5Pn8bTgFGWn/ibwzXTE8qwr1Yzx28vsecXEA==",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
}
},
"node_modules/nopt": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz",
"integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==",
"dependencies": {
"abbrev": "1"
},
"bin": {
"nopt": "bin/nopt.js"
},
"engines": {
"node": ">=6"
}
},
"node_modules/npmlog": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
"integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==",
"dependencies": {
"are-we-there-yet": "^2.0.0",
"console-control-strings": "^1.1.0",
"gauge": "^3.0.0",
"set-blocking": "^2.0.0"
}
},
"node_modules/object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
"dependencies": {
"wrappy": "1"
}
},
"node_modules/opusscript": {
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/opusscript/-/opusscript-0.0.8.tgz",
"integrity": "sha512-VSTi1aWFuCkRCVq+tx/BQ5q9fMnQ9pVZ3JU4UHKqTkf0ED3fKEPdr+gKAAl3IA2hj9rrP6iyq3hlcJq3HELtNQ==",
"optional": true,
"peer": true
},
"node_modules/parse-cache-control": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/parse-cache-control/-/parse-cache-control-1.0.1.tgz",
"integrity": "sha1-juqz5U+laSD+Fro493+iGqzC104="
},
"node_modules/path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/prism-media": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/prism-media/-/prism-media-1.3.2.tgz",
"integrity": "sha512-L6UsGHcT6i4wrQhFF1aPK+MNYgjRqR2tUoIqEY+CG1NqVkMjPRKzS37j9f8GiYPlD6wG9ruBj+q5Ax+bH8Ik1g==",
"peerDependencies": {
"@discordjs/opus": "^0.5.0",
"ffmpeg-static": "^4.2.7 || ^3.0.0 || ^2.4.0",
"node-opus": "^0.3.3",
"opusscript": "^0.0.8"
},
"peerDependenciesMeta": {
"@discordjs/opus": {
"optional": true
},
"ffmpeg-static": {
"optional": true
},
"node-opus": {
"optional": true
},
"opusscript": {
"optional": true
}
}
},
"node_modules/process-nextick-args": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
"integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="
},
"node_modules/progress": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz",
"integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/readable-stream": {
"version": "2.3.7",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
"integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
"dependencies": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.3",
"isarray": "~1.0.0",
"process-nextick-args": "~2.0.0",
"safe-buffer": "~5.1.1",
"string_decoder": "~1.1.1",
"util-deprecate": "~1.0.1"
}
},
"node_modules/rimraf": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
"integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
"dependencies": {
"glob": "^7.1.3"
},
"bin": {
"rimraf": "bin.js"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/safe-buffer": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
},
"node_modules/sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"node_modules/semver": {
"version": "7.3.5",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
"integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/set-blocking": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
"integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc="
},
"node_modules/setimmediate": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz",
"integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU="
},
"node_modules/signal-exit": {
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.6.tgz",
"integrity": "sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ=="
},
"node_modules/string_decoder": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
"dependencies": {
"safe-buffer": "~5.1.0"
}
},
"node_modules/string-width": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz",
"integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==",
"dependencies": {
"is-fullwidth-code-point": "^2.0.0",
"strip-ansi": "^4.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/strip-ansi": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
"integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=",
"dependencies": {
"ansi-regex": "^3.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/tar": {
"version": "6.1.11",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz",
"integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==",
"dependencies": {
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
"minipass": "^3.0.0",
"minizlib": "^2.1.1",
"mkdirp": "^1.0.3",
"yallist": "^4.0.0"
},
"engines": {
"node": ">= 10"
}
},
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o="
},
"node_modules/tweetnacl": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz",
"integrity": "sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw=="
},
"node_modules/typedarray": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz",
"integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c="
},
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8="
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE="
},
"node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=",
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"node_modules/when": {
"version": "3.7.8",
"resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz",
"integrity": "sha1-xxMLan6gRpPoQs3J56Hyqjmjn4I="
},
"node_modules/wide-align": {
"version": "1.1.5",
"resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz",
"integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==",
"dependencies": {
"string-width": "^1.0.2 || 2 || 3 || 4"
}
},
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
},
"node_modules/ws": {
"version": "7.5.6",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.6.tgz",
"integrity": "sha512-6GLgCqo2cy2A2rjCNFlxQS6ZljG/coZfZXclldI8FB/1G3CCI36Zd8xy2HrFVACi8tfk5XrgLQEk+P0Tnz9UcA==",
"engines": {
"node": ">=8.3.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/ytdl-core": {
"version": "4.9.1",
"resolved": "https://registry.npmjs.org/ytdl-core/-/ytdl-core-4.9.1.tgz",
"integrity": "sha512-6Jbp5RDhUEozlaJQAR+l8oV8AHsx3WUXxSyPxzE6wOIAaLql7Hjiy0ZM58wZoyj1YEenlEPjEqcJIjKYKxvHtQ==",
"dependencies": {
"m3u8stream": "^0.8.3",
"miniget": "^4.0.0",
"sax": "^1.1.3"
},
"engines": {
"node": ">=10"
}
},
"node_modules/ytdl-core-discord": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/ytdl-core-discord/-/ytdl-core-discord-1.3.1.tgz",
"integrity": "sha512-KW8zYY35jRSkxZTEQtT9EiR2exFwYKhCE8QZbRg5Ge9a0YWDDhBOixSdWb8Cn41B1uHhz8FR15E4E/k0kHNX3w==",
"dependencies": {
"@types/node": "^15.12.2",
"prism-media": "^1.3.1",
"ytdl-core": "^4.8.2"
}
},
"node_modules/ytdl-core-discord/node_modules/@types/node": {
"version": "15.14.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-15.14.9.tgz",
"integrity": "sha512-qjd88DrCxupx/kJD5yQgZdcYKZKSIGBVDIBE1/LTGcNm3d2Np/jxojkdePDdfnBHJc5W7vSMpbJ1aB7p/Py69A=="
}
}
}

19
node_modules/@derhuerst/http-basic/LICENSE generated vendored Normal file
View file

@ -0,0 +1,19 @@
Copyright (c) 2014 Forbes Lindesay
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

97
node_modules/@derhuerst/http-basic/README.md generated vendored Normal file
View file

@ -0,0 +1,97 @@
# http-basic
Simple wrapper arround http.request/https.request
[![Build Status](https://img.shields.io/travis/ForbesLindesay/http-basic/master.svg)](https://travis-ci.org/ForbesLindesay/http-basic)
[![Dependency Status](https://img.shields.io/david/ForbesLindesay/http-basic.svg)](https://david-dm.org/ForbesLindesay/http-basic)
[![NPM version](https://img.shields.io/npm/v/http-basic.svg)](https://www.npmjs.org/package/http-basic)
## Installation
npm install http-basic
## Usage
```js
var request = require('http-basic');
var options = {followRedirects: true, gzip: true, cache: 'memory'};
var req = request('GET', 'http://example.com', options, function (err, res) {
if (err) throw err;
console.dir(res.statusCode);
res.body.resume();
});
req.end();
```
**method:**
The http method (e.g. `GET`, `POST`, `PUT`, `DELETE` etc.)
**url:**
The url as a string (e.g. `http://example.com`). It must be fully qualified and either http or https.
**options:**
- `headers` - (default `{}`) http headers
- `agent` - (default: `false`) controlls keep-alive (see http://nodejs.org/api/http.html#http_http_request_options_callback)
- `duplex` - (default: `true` except for `GET`, `OPTIONS` and `HEAD` requests) allows you to explicitly set a body on a request that uses a method that normally would not have a body
- `followRedirects` - (default: `false`) - if true, redirects are followed (note that this only affects the result in the callback)
- `maxRedirects` - (default: `Infinity`) - limit the number of redirects allowed.
- `allowRedirectHeaders` (default: `null`) - an array of headers allowed for redirects (none if `null`).
- `gzip` (default: `false`) - automatically accept gzip and deflate encodings. This is kept completely transparent to the user.
- `cache` - (default: `null`) - `'memory'` or `'file'` to use the default built in caches or you can pass your own cache implementation.
- `timeout` (default: `false`) - times out if no response is returned within the given number of milliseconds.
- `socketTimeout` (default: `false`) - calls `req.setTimeout` internally which causes the request to timeout if no new data is seen for the given number of milliseconds.
- `retry` (default: `false`) - retry GET requests. Set this to `true` to retry when the request errors or returns a status code greater than or equal to 400 (can also be a function that takes `(err, req, attemptNo) => shouldRetry`)
- `retryDelay` (default: `200`) - the delay between retries (can also be set to a function that takes `(err, res, attemptNo) => delay`)
- `maxRetries` (default: `5`) - the number of times to retry before giving up.
- `ignoreFailedInvalidation` (default: `false`) - whether the cache should swallow errors if there is a problem removing a cached response. Note that enabling this setting may result in incorrect, cached data being returned to the user.
- `isMatch` - `(requestHeaders: Headers, cachedResponse: CachedResponse, defaultValue: boolean) => boolean` - override the default behaviour for testing whether a cached response matches a request.
- `isExpired` - `(cachedResponse: CachedResponse, defaultValue: boolean) => boolean` - override the default behaviour for testing whether a cached response has expired
- `canCache` - `(res: Response<NodeJS.ReadableStream>, defaultValue: boolean) => boolean` - override the default behaviour for testing whether a response can be cached
**callback:**
The callback is called with `err` as the first argument and `res` as the second argument. `res` is an [http-response-object](https://github.com/ForbesLindesay/http-response-object). It has the following properties:
- `statusCode` - a number representing the HTTP Status Code
- `headers` - an object representing the HTTP headers
- `body` - a readable stream respresenting the request body.
- `url` - the URL that was requested (in the case of redirects, this is the final url that was requested)
**returns:**
If the method is `GET`, `DELETE` or `HEAD`, it returns `undefined`.
Otherwise, it returns a writable stream for the body of the request.
## Implementing a Cache
A `Cache` is an object with three methods:
- `getResponse(url, callback)` - retrieve a cached response object
- `setResponse(url, response)` - cache a response object
- `invalidateResponse(url, callback)` - remove a response which is no longer valid
A cached response object is an object with the following properties:
- `statusCode` - Number
- `headers` - Object (key value pairs of strings)
- `body` - Stream (a stream of binary data)
- `requestHeaders` - Object (key value pairs of strings)
- `requestTimestamp` - Number
`getResponse` should call the callback with an optional error and either `null` or a cached response object, depending on whether the url can be found in the cache. Only `GET`s are cached.
`setResponse` should just swallow any errors it has (or resport them using `console.warn`).
`invalidateResponse` should call the callback with an optional error if it is unable to invalidate a response.
A cache may also define any of the methods from `lib/cache-utils.js` to override behaviour for what gets cached. It is currently still only possible to cache "get" requests, although this could be changed.
## License
MIT

View file

@ -0,0 +1,9 @@
import { Headers } from './Headers';
interface CachedResponse {
statusCode: number;
headers: Headers;
body: NodeJS.ReadableStream;
requestHeaders: Headers;
requestTimestamp: number;
}
export { CachedResponse };

View file

@ -0,0 +1,2 @@
"use strict";
exports.__esModule = true;

3
node_modules/@derhuerst/http-basic/lib/Callback.d.ts generated vendored Normal file
View file

@ -0,0 +1,3 @@
import Response = require('http-response-object');
declare type Callback = (err: NodeJS.ErrnoException | null, response?: Response<NodeJS.ReadableStream>) => void;
export { Callback };

2
node_modules/@derhuerst/http-basic/lib/Callback.js generated vendored Normal file
View file

@ -0,0 +1,2 @@
"use strict";
exports.__esModule = true;

11
node_modules/@derhuerst/http-basic/lib/FileCache.d.ts generated vendored Normal file
View file

@ -0,0 +1,11 @@
import { ICache } from './ICache';
import { CachedResponse } from './CachedResponse';
export default class FileCache implements ICache {
private readonly _location;
constructor(location: string);
getResponse(url: string, callback: (err: null | Error, response: null | CachedResponse) => void): void;
setResponse(url: string, response: CachedResponse): void;
updateResponseHeaders(url: string, response: Pick<CachedResponse, 'headers' | 'requestTimestamp'>): void;
invalidateResponse(url: string, callback: (err: NodeJS.ErrnoException | null) => void): void;
getCacheKey(url: string): string;
}

107
node_modules/@derhuerst/http-basic/lib/FileCache.js generated vendored Normal file
View file

@ -0,0 +1,107 @@
'use strict';
exports.__esModule = true;
var fs = require("fs");
var path_1 = require("path");
var crypto_1 = require("crypto");
function jsonParse(data, cb) {
var result = null;
try {
result = JSON.parse(data);
}
catch (ex) {
return cb(ex);
}
cb(null, result);
}
var FileCache = /** @class */ (function () {
function FileCache(location) {
this._location = location;
}
FileCache.prototype.getResponse = function (url, callback) {
var key = path_1.resolve(this._location, this.getCacheKey(url));
fs.readFile(key + '.json', 'utf8', function (err, data) {
if (err && err.code === 'ENOENT')
return callback(null, null);
else if (err)
return callback(err, null);
jsonParse(data, function (err, response) {
if (err) {
return callback(err, null);
}
var body = fs.createReadStream(key + '.body');
response.body = body;
callback(null, response);
});
});
};
FileCache.prototype.setResponse = function (url, response) {
var key = path_1.resolve(this._location, this.getCacheKey(url));
var errored = false;
fs.mkdir(this._location, { recursive: true }, function (err) {
if (err && err.code !== 'EEXIST') {
console.warn('Error creating cache: ' + err.message);
return;
}
response.body.pipe(fs.createWriteStream(key + '.body')).on('error', function (err) {
errored = true;
console.warn('Error writing to cache: ' + err.message);
}).on('close', function () {
if (!errored) {
fs.writeFile(key + '.json', JSON.stringify({
statusCode: response.statusCode,
headers: response.headers,
requestHeaders: response.requestHeaders,
requestTimestamp: response.requestTimestamp
}, null, ' '), function (err) {
if (err) {
console.warn('Error writing to cache: ' + err.message);
}
});
}
});
});
};
FileCache.prototype.updateResponseHeaders = function (url, response) {
var key = path_1.resolve(this._location, this.getCacheKey(url));
fs.readFile(key + '.json', 'utf8', function (err, data) {
if (err) {
console.warn('Error writing to cache: ' + err.message);
return;
}
var parsed = null;
try {
parsed = JSON.parse(data);
}
catch (ex) {
console.warn('Error writing to cache: ' + ex.message);
return;
}
fs.writeFile(key + '.json', JSON.stringify({
statusCode: parsed.statusCode,
headers: response.headers,
requestHeaders: parsed.requestHeaders,
requestTimestamp: response.requestTimestamp
}, null, ' '), function (err) {
if (err) {
console.warn('Error writing to cache: ' + err.message);
}
});
});
};
FileCache.prototype.invalidateResponse = function (url, callback) {
var key = path_1.resolve(this._location, this.getCacheKey(url));
fs.unlink(key + '.json', function (err) {
if (err && err.code === 'ENOENT')
return callback(null);
else
callback(err || null);
});
};
FileCache.prototype.getCacheKey = function (url) {
var hash = crypto_1.createHash('sha512');
hash.update(url);
return hash.digest('hex');
};
return FileCache;
}());
exports["default"] = FileCache;

2
node_modules/@derhuerst/http-basic/lib/Headers.d.ts generated vendored Normal file
View file

@ -0,0 +1,2 @@
import { IncomingHttpHeaders } from 'http';
export declare type Headers = IncomingHttpHeaders;

2
node_modules/@derhuerst/http-basic/lib/Headers.js generated vendored Normal file
View file

@ -0,0 +1,2 @@
"use strict";
exports.__esModule = true;

2
node_modules/@derhuerst/http-basic/lib/HttpVerb.d.ts generated vendored Normal file
View file

@ -0,0 +1,2 @@
declare type HttpVerb = ('GET' | 'HEAD' | 'POST' | 'PUT' | 'DELETE' | 'CONNECT' | 'OPTIONS' | 'TRACE' | 'PATCH');
export { HttpVerb };

2
node_modules/@derhuerst/http-basic/lib/HttpVerb.js generated vendored Normal file
View file

@ -0,0 +1,2 @@
"use strict";
exports.__esModule = true;

8
node_modules/@derhuerst/http-basic/lib/ICache.d.ts generated vendored Normal file
View file

@ -0,0 +1,8 @@
import { CachedResponse } from './CachedResponse';
interface ICache {
getResponse(url: string, cb: (err: Error | null, response: CachedResponse | null) => void): void;
setResponse(url: string, response: CachedResponse | null): void;
updateResponseHeaders?: (url: string, response: Pick<CachedResponse, 'headers' | 'requestTimestamp'>) => void;
invalidateResponse(url: string, cb: (err: Error | null) => void): void;
}
export { ICache };

2
node_modules/@derhuerst/http-basic/lib/ICache.js generated vendored Normal file
View file

@ -0,0 +1,2 @@
"use strict";
exports.__esModule = true;

View file

@ -0,0 +1,8 @@
import { CachedResponse } from './CachedResponse';
export default class MemoryCache {
private readonly _cache;
getResponse(url: string, callback: (err: null | Error, response: null | CachedResponse) => void): void;
updateResponseHeaders(url: string, response: Pick<CachedResponse, 'headers' | 'requestTimestamp'>): void;
setResponse(url: string, response: CachedResponse): void;
invalidateResponse(url: string, callback: (err: NodeJS.ErrnoException | null) => void): void;
}

56
node_modules/@derhuerst/http-basic/lib/MemoryCache.js generated vendored Normal file
View file

@ -0,0 +1,56 @@
'use strict';
var __assign = (this && this.__assign) || Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
exports.__esModule = true;
var stream_1 = require("stream");
var concat = require("concat-stream");
var MemoryCache = /** @class */ (function () {
function MemoryCache() {
this._cache = {};
}
MemoryCache.prototype.getResponse = function (url, callback) {
var cache = this._cache;
if (cache[url]) {
var body = new stream_1.PassThrough();
body.end(cache[url].body);
callback(null, {
statusCode: cache[url].statusCode,
headers: cache[url].headers,
body: body,
requestHeaders: cache[url].requestHeaders,
requestTimestamp: cache[url].requestTimestamp
});
}
else {
callback(null, null);
}
};
MemoryCache.prototype.updateResponseHeaders = function (url, response) {
this._cache[url] = __assign({}, this._cache[url], { headers: response.headers, requestTimestamp: response.requestTimestamp });
};
MemoryCache.prototype.setResponse = function (url, response) {
var cache = this._cache;
response.body.pipe(concat(function (body) {
cache[url] = {
statusCode: response.statusCode,
headers: response.headers,
body: body,
requestHeaders: response.requestHeaders,
requestTimestamp: response.requestTimestamp
};
}));
};
MemoryCache.prototype.invalidateResponse = function (url, callback) {
var cache = this._cache;
delete cache[url];
callback(null);
};
return MemoryCache;
}());
exports["default"] = MemoryCache;

25
node_modules/@derhuerst/http-basic/lib/Options.d.ts generated vendored Normal file
View file

@ -0,0 +1,25 @@
import { Agent } from 'http';
import { Headers } from './Headers';
import { ICache } from './ICache';
import Response = require('http-response-object');
import { CachedResponse } from './CachedResponse';
interface Options {
agent?: Agent | boolean;
allowRedirectHeaders?: string[];
cache?: 'file' | 'memory' | ICache;
duplex?: boolean;
followRedirects?: boolean;
gzip?: boolean;
headers?: Headers;
ignoreFailedInvalidation?: boolean;
maxRedirects?: number;
maxRetries?: number;
retry?: boolean | ((err: NodeJS.ErrnoException | null, res: Response<NodeJS.ReadableStream> | void, attemptNumber: number) => boolean);
retryDelay?: number | ((err: NodeJS.ErrnoException | null, res: Response<NodeJS.ReadableStream> | void, attemptNumber: number) => number);
socketTimeout?: number;
timeout?: number;
isMatch?: (requestHeaders: Headers, cachedResponse: CachedResponse, defaultValue: boolean) => boolean;
isExpired?: (cachedResponse: CachedResponse, defaultValue: boolean) => boolean;
canCache?: (res: Response<NodeJS.ReadableStream>, defaultValue: boolean) => boolean;
}
export { Options };

2
node_modules/@derhuerst/http-basic/lib/Options.js generated vendored Normal file
View file

@ -0,0 +1,2 @@
"use strict";
exports.__esModule = true;

View file

@ -0,0 +1,14 @@
import { CachedResponse } from './CachedResponse';
import Response = require('http-response-object');
export declare type Policy = {
maxage: number | null;
};
/**
* returns true if this response is cacheable (according to cache-control headers)
*/
export declare function isCacheable<T>(res: Response<T> | CachedResponse): boolean;
/**
* if the response is cacheable, returns an object detailing the maxage of the cache
* otherwise returns null
*/
export declare function cachePolicy<T>(res: Response<T> | CachedResponse): Policy | null;

View file

@ -0,0 +1,53 @@
"use strict";
exports.__esModule = true;
var parseCacheControl = require('parse-cache-control');
function parseCacheControlHeader(res) {
var cacheControl = res.headers['cache-control'];
var normalisedCacheControl = typeof cacheControl === 'string' ? cacheControl.trim() : ''; // must be normalised for parsing (e.g. parseCacheControl)
if (!cacheControl) {
return null;
}
return parseCacheControl(cacheControl);
}
// for the purposes of this library, we err on the side of caution and do not cache anything except public (or implicit public)
var nonCaching = ['private', 'no-cache', 'no-store', 'no-transform', 'must-revalidate', 'proxy-revalidate'];
function isCacheControlCacheable(parsedCacheControl) {
if (!parsedCacheControl) {
return false;
}
if (parsedCacheControl.public) {
return true;
}
// note that the library does not currently support s-maxage
if (parsedCacheControl["max-age"]) {
// https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9.3
// The max-age directive on a response implies that the response is cacheable (i.e., "public") unless some other, more restrictive cache directive is also present.
for (var i = 0; i < nonCaching.length; i++) {
if (parsedCacheControl[nonCaching[i]]) {
return false;
}
}
return true;
}
return false;
}
/**
* returns true if this response is cacheable (according to cache-control headers)
*/
function isCacheable(res) {
return isCacheControlCacheable(parseCacheControlHeader(res));
}
exports.isCacheable = isCacheable;
function buildPolicy(parsedCacheControl) {
// note that the library does not currently support s-maxage
return { maxage: parsedCacheControl['max-age'] || null };
}
/**
* if the response is cacheable, returns an object detailing the maxage of the cache
* otherwise returns null
*/
function cachePolicy(res) {
var parsed = parseCacheControlHeader(res);
return parsed && isCacheControlCacheable(parsed) ? buildPolicy(parsed) : null;
}
exports.cachePolicy = cachePolicy;

View file

@ -0,0 +1,6 @@
import Response = require('http-response-object');
import { Headers } from './Headers';
import { CachedResponse } from './CachedResponse';
export declare function isMatch(requestHeaders: Headers, cachedResponse: CachedResponse): boolean;
export declare function isExpired(cachedResponse: CachedResponse): boolean;
export declare function canCache<T>(res: Response<T>): boolean;

44
node_modules/@derhuerst/http-basic/lib/cache-utils.js generated vendored Normal file
View file

@ -0,0 +1,44 @@
"use strict";
exports.__esModule = true;
var cache_control_utils_1 = require("./cache-control-utils");
function isMatch(requestHeaders, cachedResponse) {
var vary = cachedResponse.headers['vary'];
if (vary && cachedResponse.requestHeaders) {
vary = '' + vary;
return vary.split(',').map(function (header) { return header.trim().toLowerCase(); }).every(function (header) {
return requestHeaders[header] === cachedResponse.requestHeaders[header];
});
}
else {
return true;
}
}
exports.isMatch = isMatch;
;
function isExpired(cachedResponse) {
var policy = cache_control_utils_1.cachePolicy(cachedResponse);
if (policy) {
var time = (Date.now() - cachedResponse.requestTimestamp) / 1000;
if (policy.maxage !== null && policy.maxage > time) {
return false;
}
}
if (cachedResponse.statusCode === 301 || cachedResponse.statusCode === 308)
return false;
return true;
}
exports.isExpired = isExpired;
;
function canCache(res) {
if (res.headers['etag'])
return true;
if (res.headers['last-modified'])
return true;
if (cache_control_utils_1.isCacheable(res))
return true;
if (res.statusCode === 301 || res.statusCode === 308)
return true;
return false;
}
exports.canCache = canCache;
;

16
node_modules/@derhuerst/http-basic/lib/index.d.ts generated vendored Normal file
View file

@ -0,0 +1,16 @@
import { Callback } from './Callback';
import { CachedResponse } from './CachedResponse';
import { HttpVerb } from './HttpVerb';
import { ICache } from './ICache';
import { Options } from './Options';
import Response = require('http-response-object');
import { URL } from 'url';
declare function request(method: HttpVerb, url: string | URL, options: Options | null | void, callback: Callback): void | NodeJS.WritableStream;
declare function request(method: HttpVerb, url: string | URL, callback: Callback): void | NodeJS.WritableStream;
export default request;
export { HttpVerb };
export { Options };
export { Callback };
export { Response };
export { CachedResponse };
export { ICache };

380
node_modules/@derhuerst/http-basic/lib/index.js generated vendored Normal file
View file

@ -0,0 +1,380 @@
"use strict";
var __assign = (this && this.__assign) || Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
exports.__esModule = true;
var cacheUtils = require("./cache-utils");
var FileCache_1 = require("./FileCache");
var MemoryCache_1 = require("./MemoryCache");
var http_1 = require("http");
var zlib_1 = require("zlib");
var url_1 = require("url");
var stream_1 = require("stream");
var https_1 = require("https");
var Response = require("http-response-object");
exports.Response = Response;
var caseless = require('caseless');
var fileCache = new FileCache_1["default"](__dirname + '/cache');
var memoryCache = new MemoryCache_1["default"]();
function requestProtocol(protocol, options, callback) {
if (protocol === 'http') {
return http_1.request(options, callback);
}
else if (protocol === 'https') {
return https_1.request(options, callback);
}
throw new Error('Unsupported protocol ' + protocol);
}
function request(method, url, options, callback) {
if (typeof options === 'function') {
callback = options;
options = null;
}
if (options === null || options === undefined) {
options = {};
}
if (typeof options !== 'object') {
throw new TypeError('options must be an object (or null)');
}
if (typeof callback !== 'function') {
throw new TypeError('callback must be a function');
}
return _request(method, ((url && typeof url === 'object') ? url.href : url), options, callback);
}
function _request(method, url, options, callback) {
var start = Date.now();
if (typeof method !== 'string') {
throw new TypeError('The method must be a string.');
}
if (typeof url !== 'string') {
throw new TypeError('The URL/path must be a string or a URL object.');
}
method = method.toUpperCase();
var urlObject = url_1.parse(url);
var protocol = (urlObject.protocol || '').replace(/\:$/, '');
if (protocol !== 'http' && protocol !== 'https') {
throw new TypeError('The protocol "' + protocol + '" is not supported, cannot load "' + url + '"');
}
var rawHeaders = options.headers || {};
var headers = caseless(rawHeaders);
if (urlObject.auth) {
headers.set('Authorization', 'Basic ' + (Buffer.from(urlObject.auth)).toString('base64'));
}
var agent = 'agent' in options ? options.agent : false;
var cache = options.cache;
if (typeof cache === 'string') {
if (cache === 'file') {
cache = fileCache;
}
else if (cache === 'memory') {
cache = memoryCache;
}
}
if (cache && !(typeof cache === 'object' && typeof cache.getResponse === 'function' && typeof cache.setResponse === 'function' && typeof cache.invalidateResponse === 'function')) {
throw new TypeError(cache + ' is not a valid cache, caches must have `getResponse`, `setResponse` and `invalidateResponse` methods.');
}
var ignoreFailedInvalidation = options.ignoreFailedInvalidation;
if (options.duplex !== undefined && typeof options.duplex !== 'boolean') {
throw new Error('expected options.duplex to be a boolean if provided');
}
var duplex = options.duplex !== undefined ? options.duplex : !(method === 'GET' || method === 'DELETE' || method === 'HEAD');
var unsafe = !(method === 'GET' || method === 'OPTIONS' || method === 'HEAD');
if (options.gzip) {
headers.set('Accept-Encoding', headers.has('Accept-Encoding') ? headers.get('Accept-Encoding') + ',gzip,deflate' : 'gzip,deflate');
return _request(method, url, {
allowRedirectHeaders: options.allowRedirectHeaders,
duplex: duplex,
headers: rawHeaders,
agent: agent,
followRedirects: options.followRedirects,
retry: options.retry,
retryDelay: options.retryDelay,
maxRetries: options.maxRetries,
cache: cache,
timeout: options.timeout
}, function (err, res) {
if (err)
return callback(err);
if (!res)
return callback(new Error('Response should not be undefined if there is no error.'));
var newHeaders = __assign({}, res.headers);
var newBody = res.body;
switch (newHeaders['content-encoding']) {
case 'gzip':
delete newHeaders['content-encoding'];
newBody = res.body.pipe(zlib_1.createGunzip());
break;
case 'deflate':
delete newHeaders['content-encoding'];
newBody = res.body.pipe(zlib_1.createInflate());
break;
}
return callback(err, new Response(res.statusCode, newHeaders, newBody, res.url));
});
}
if (options.followRedirects) {
return _request(method, url, {
allowRedirectHeaders: options.allowRedirectHeaders,
duplex: duplex,
headers: rawHeaders,
agent: agent,
retry: options.retry,
retryDelay: options.retryDelay,
maxRetries: options.maxRetries,
cache: cache,
timeout: options.timeout
}, function (err, res) {
if (err)
return callback(err);
if (!res)
return callback(new Error('Response should not be undefined if there is no error.'));
if (options.followRedirects && isRedirect(res.statusCode)) {
// prevent leakage of file handles
res.body.resume();
if (method === 'DELETE' && res.statusCode === 303) {
// 303 See Other should convert to GET for duplex
// requests and for DELETE
method = 'GET';
}
if (options.maxRedirects === 0) {
var err_1 = new Error('Maximum number of redirects exceeded');
err_1.res = res;
return callback(err_1, res);
}
options = __assign({}, options, { duplex: false, maxRedirects: options.maxRedirects && options.maxRedirects !== Infinity ? options.maxRedirects - 1 : options.maxRedirects });
// don't maintain headers through redirects
// This fixes a problem where a POST to http://example.com
// might result in a GET to http://example.co.uk that includes "content-length"
// as a header
var headers_1 = caseless(options.headers);
var redirectHeaders = {};
if (options.allowRedirectHeaders) {
for (var i = 0; i < options.allowRedirectHeaders.length; i++) {
var headerName = options.allowRedirectHeaders[i];
var headerValue = headers_1.get(headerName);
if (headerValue) {
redirectHeaders[headerName] = headerValue;
}
}
}
options.headers = redirectHeaders;
var location = res.headers.location;
if (typeof location !== 'string') {
return callback(new Error('Cannot redirect to non string location: ' + location));
}
return request(duplex ? 'GET' : method, url_1.resolve(url, location), options, callback);
}
else {
return callback(null, res);
}
});
}
if (cache && method === 'GET' && !duplex) {
var timestamp_1 = Date.now();
return cache.getResponse(url, function (err, cachedResponse) {
if (err) {
console.warn('Error reading from cache: ' + err.message);
}
var isMatch = !!(cachedResponse && cacheUtils.isMatch(rawHeaders, cachedResponse));
if (cachedResponse && (options.isMatch ? options.isMatch(rawHeaders, cachedResponse, isMatch) : isMatch)) {
var isExpired = cacheUtils.isExpired(cachedResponse);
if (!(options.isExpired ? options.isExpired(cachedResponse, isExpired) : isExpired)) {
var res = new Response(cachedResponse.statusCode, cachedResponse.headers, cachedResponse.body, url);
res.fromCache = true;
res.fromNotModified = false;
return callback(null, res);
}
else {
if (cachedResponse.headers['etag']) {
headers.set('If-None-Match', cachedResponse.headers['etag']);
}
if (cachedResponse.headers['last-modified']) {
headers.set('If-Modified-Since', cachedResponse.headers['last-modified']);
}
}
}
request('GET', url, {
allowRedirectHeaders: options.allowRedirectHeaders,
headers: rawHeaders,
retry: options.retry,
retryDelay: options.retryDelay,
maxRetries: options.maxRetries,
agent: agent,
timeout: options.timeout
}, function (err, res) {
if (err)
return callback(err);
if (!res)
return callback(new Error('Response should not be undefined if there is no error.'));
if (res.statusCode === 304 && cachedResponse) { // Not Modified
// prevent leakage of file handles
res.body.resume();
var resultBody = cachedResponse.body;
var c = cache;
if (c.updateResponseHeaders) {
c.updateResponseHeaders(url, {
headers: res.headers,
requestTimestamp: timestamp_1
});
}
else {
var cachedResponseBody_1 = new stream_1.PassThrough();
var newResultBody_1 = new stream_1.PassThrough();
resultBody.on('data', function (data) {
cachedResponseBody_1.write(data);
newResultBody_1.write(data);
});
resultBody.on('end', function () {
cachedResponseBody_1.end();
newResultBody_1.end();
});
resultBody = newResultBody_1;
cache.setResponse(url, {
statusCode: cachedResponse.statusCode,
headers: res.headers,
body: cachedResponseBody_1,
requestHeaders: cachedResponse.requestHeaders,
requestTimestamp: timestamp_1
});
}
var response = new Response(cachedResponse.statusCode, cachedResponse.headers, resultBody, url);
response.fromCache = true;
response.fromNotModified = true;
return callback(null, response);
}
// prevent leakage of file handles
cachedResponse && cachedResponse.body.resume();
var canCache = cacheUtils.canCache(res);
if (options.canCache ? options.canCache(res, canCache) : canCache) {
var cachedResponseBody_2 = new stream_1.PassThrough();
var resultResponseBody_1 = new stream_1.PassThrough();
res.body.on('data', function (data) {
cachedResponseBody_2.write(data);
resultResponseBody_1.write(data);
});
res.body.on('end', function () { cachedResponseBody_2.end(); resultResponseBody_1.end(); });
var resultResponse = new Response(res.statusCode, res.headers, resultResponseBody_1, url);
cache.setResponse(url, {
statusCode: res.statusCode,
headers: res.headers,
body: cachedResponseBody_2,
requestHeaders: rawHeaders,
requestTimestamp: timestamp_1
});
return callback(null, resultResponse);
}
else {
return callback(null, res);
}
});
});
}
function attempt(n) {
return _request(method, url, {
allowRedirectHeaders: options.allowRedirectHeaders,
headers: rawHeaders,
agent: agent,
timeout: options.timeout
}, function (err, res) {
var retry = err || !res || res.statusCode >= 400;
if (typeof options.retry === 'function') {
retry = options.retry(err, res, n + 1);
}
if (n >= (options.maxRetries || 5)) {
retry = false;
}
if (retry) {
var delay = options.retryDelay;
if (typeof delay === 'function') {
delay = delay(err, res, n + 1);
}
delay = delay || 200;
setTimeout(function () {
attempt(n + 1);
}, delay);
}
else {
callback(err, res);
}
});
}
if (options.retry && method === 'GET' && !duplex) {
return attempt(0);
}
var responded = false;
var timeout = null;
var req = requestProtocol(protocol, {
host: urlObject.hostname,
port: urlObject.port == null ? undefined : +urlObject.port,
path: urlObject.path,
method: method,
headers: rawHeaders,
agent: agent
}, function (res) {
var end = Date.now();
if (responded)
return res.resume();
responded = true;
if (timeout !== null)
clearTimeout(timeout);
var result = new Response(res.statusCode || 0, res.headers, res, url);
if (cache && unsafe && res.statusCode && res.statusCode < 400) {
cache.invalidateResponse(url, function (err) {
if (err && !ignoreFailedInvalidation) {
callback(new Error('Error invalidating the cache for' + url + ': ' + err.message), result);
}
else {
callback(null, result);
}
});
}
else {
callback(null, result);
}
}).on('error', function (err) {
if (responded)
return;
responded = true;
if (timeout !== null)
clearTimeout(timeout);
callback(err);
});
function onTimeout() {
if (responded)
return;
responded = true;
if (timeout !== null)
clearTimeout(timeout);
req.abort();
var duration = Date.now() - start;
var err = new Error('Request timed out after ' + duration + 'ms');
err.timeout = true;
err.duration = duration;
callback(err);
}
if (options.socketTimeout) {
req.setTimeout(options.socketTimeout, onTimeout);
}
if (options.timeout) {
timeout = setTimeout(onTimeout, options.timeout);
}
if (duplex) {
return req;
}
else {
req.end();
}
return undefined;
}
function isRedirect(statusCode) {
return statusCode === 301 || statusCode === 302 || statusCode === 303 || statusCode === 307 || statusCode === 308;
}
exports["default"] = request;
module.exports = request;
module.exports["default"] = request;
module.exports.Response = Response;

50
node_modules/@derhuerst/http-basic/package.json generated vendored Normal file
View file

@ -0,0 +1,50 @@
{
"name": "@derhuerst/http-basic",
"version": "8.2.1",
"main": "lib/index.js",
"types": "lib/index.d.ts",
"files": [
"lib"
],
"description": "Very low level wrapper arround http.request/https.request",
"keywords": [
"http",
"https",
"request",
"fetch",
"gzip",
"deflate",
"redirect",
"cache",
"etag",
"cache-control"
],
"dependencies": {
"caseless": "^0.12.0",
"concat-stream": "^1.6.2",
"http-response-object": "^3.0.1",
"parse-cache-control": "^1.0.1"
},
"devDependencies": {
"@types/concat-stream": "^1.6.0",
"@types/node": "^11.9.0",
"flowgen2": "^2.2.1",
"rimraf": "^2.5.4",
"serve-static": "^1.11.1",
"typescript": "^2.3.4"
},
"scripts": {
"build": "tsc && flowgen lib/**/*",
"pretest": "npm run build",
"test": "node test/index && node test/cache && node test/cache-invalidation && rimraf lib/cache"
},
"engines": {
"node": ">=6.0.0"
},
"repository": {
"type": "git",
"url": "https://github.com/ForbesLindesay/http-basic.git"
},
"author": "ForbesLindesay",
"license": "MIT"
}

190
node_modules/@discordjs/collection/LICENSE generated vendored Normal file
View file

@ -0,0 +1,190 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
Copyright 2015 - 2020 Amish Shah
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

3
node_modules/@discordjs/collection/README.md generated vendored Normal file
View file

@ -0,0 +1,3 @@
# Collection
Utility data structure used in Discord.js.

319
node_modules/@discordjs/collection/dist/index.d.ts generated vendored Normal file
View file

@ -0,0 +1,319 @@
export interface CollectionConstructor {
new (): Collection<unknown, unknown>;
new <K, V>(entries?: ReadonlyArray<readonly [K, V]> | null): Collection<K, V>;
new <K, V>(iterable: Iterable<readonly [K, V]>): Collection<K, V>;
readonly prototype: Collection<unknown, unknown>;
readonly [Symbol.species]: CollectionConstructor;
}
/**
* A Map with additional utility methods. This is used throughout discord.js rather than Arrays for anything that has
* an ID, for significantly improved performance and ease-of-use.
* @extends {Map}
* @property {number} size - The amount of elements in this collection.
*/
declare class Collection<K, V> extends Map<K, V> {
private _array;
private _keyArray;
static readonly default: typeof Collection;
['constructor']: typeof Collection;
constructor(entries?: ReadonlyArray<readonly [K, V]> | null);
/**
* Identical to [Map.get()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/get).
* Gets an element with the specified key, and returns its value, or `undefined` if the element does not exist.
* @param {*} key - The key to get from this collection
* @returns {* | undefined}
*/
get(key: K): V | undefined;
/**
* Identical to [Map.set()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/set).
* Sets a new element in the collection with the specified key and value.
* @param {*} key - The key of the element to add
* @param {*} value - The value of the element to add
* @returns {Collection}
*/
set(key: K, value: V): this;
/**
* Identical to [Map.has()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/has).
* Checks if an element exists in the collection.
* @param {*} key - The key of the element to check for
* @returns {boolean} `true` if the element exists, `false` if it does not exist.
*/
has(key: K): boolean;
/**
* Identical to [Map.delete()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/delete).
* Deletes an element from the collection.
* @param {*} key - The key to delete from the collection
* @returns {boolean} `true` if the element was removed, `false` if the element does not exist.
*/
delete(key: K): boolean;
/**
* Identical to [Map.clear()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/clear).
* Removes all elements from the collection.
* @returns {undefined}
*/
clear(): void;
/**
* Creates an ordered array of the values of this collection, and caches it internally. The array will only be
* reconstructed if an item is added to or removed from the collection, or if you change the length of the array
* itself. If you don't want this caching behavior, use `[...collection.values()]` or
* `Array.from(collection.values())` instead.
* @returns {Array}
*/
array(): V[];
/**
* Creates an ordered array of the keys of this collection, and caches it internally. The array will only be
* reconstructed if an item is added to or removed from the collection, or if you change the length of the array
* itself. If you don't want this caching behavior, use `[...collection.keys()]` or
* `Array.from(collection.keys())` instead.
* @returns {Array}
*/
keyArray(): K[];
/**
* Obtains the first value(s) in this collection.
* @param {number} [amount] Amount of values to obtain from the beginning
* @returns {*|Array<*>} A single value if no amount is provided or an array of values, starting from the end if
* amount is negative
*/
first(): V | undefined;
first(amount: number): V[];
/**
* Obtains the first key(s) in this collection.
* @param {number} [amount] Amount of keys to obtain from the beginning
* @returns {*|Array<*>} A single key if no amount is provided or an array of keys, starting from the end if
* amount is negative
*/
firstKey(): K | undefined;
firstKey(amount: number): K[];
/**
* Obtains the last value(s) in this collection. This relies on {@link Collection#array}, and thus the caching
* mechanism applies here as well.
* @param {number} [amount] Amount of values to obtain from the end
* @returns {*|Array<*>} A single value if no amount is provided or an array of values, starting from the start if
* amount is negative
*/
last(): V | undefined;
last(amount: number): V[];
/**
* Obtains the last key(s) in this collection. This relies on {@link Collection#keyArray}, and thus the caching
* mechanism applies here as well.
* @param {number} [amount] Amount of keys to obtain from the end
* @returns {*|Array<*>} A single key if no amount is provided or an array of keys, starting from the start if
* amount is negative
*/
lastKey(): K | undefined;
lastKey(amount: number): K[];
/**
* Obtains unique random value(s) from this collection. This relies on {@link Collection#array}, and thus the caching
* mechanism applies here as well.
* @param {number} [amount] Amount of values to obtain randomly
* @returns {*|Array<*>} A single value if no amount is provided or an array of values
*/
random(): V;
random(amount: number): V[];
/**
* Obtains unique random key(s) from this collection. This relies on {@link Collection#keyArray}, and thus the caching
* mechanism applies here as well.
* @param {number} [amount] Amount of keys to obtain randomly
* @returns {*|Array<*>} A single key if no amount is provided or an array
*/
randomKey(): K;
randomKey(amount: number): K[];
/**
* Searches for a single item where the given function returns a truthy value. This behaves like
* [Array.find()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/find).
* <warn>All collections used in Discord.js are mapped using their `id` property, and if you want to find by id you
* should use the `get` method. See
* [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/get) for details.</warn>
* @param {Function} fn The function to test with (should return boolean)
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {*}
* @example collection.find(user => user.username === 'Bob');
*/
find(fn: (value: V, key: K, collection: this) => boolean): V | undefined;
find<T>(fn: (this: T, value: V, key: K, collection: this) => boolean, thisArg: T): V | undefined;
/**
* Searches for the key of a single item where the given function returns a truthy value. This behaves like
* [Array.findIndex()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/findIndex),
* but returns the key rather than the positional index.
* @param {Function} fn The function to test with (should return boolean)
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {*}
* @example collection.findKey(user => user.username === 'Bob');
*/
findKey(fn: (value: V, key: K, collection: this) => boolean): K | undefined;
findKey<T>(fn: (this: T, value: V, key: K, collection: this) => boolean, thisArg: T): K | undefined;
/**
* Removes items that satisfy the provided filter function.
* @param {Function} fn Function used to test (should return a boolean)
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {number} The number of removed entries
*/
sweep(fn: (value: V, key: K, collection: this) => boolean): number;
sweep<T>(fn: (this: T, value: V, key: K, collection: this) => boolean, thisArg: T): number;
/**
* Identical to
* [Array.filter()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/filter),
* but returns a Collection instead of an Array.
* @param {Function} fn The function to test with (should return boolean)
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {Collection}
* @example collection.filter(user => user.username === 'Bob');
*/
filter(fn: (value: V, key: K, collection: this) => boolean): this;
filter<T>(fn: (this: T, value: V, key: K, collection: this) => boolean, thisArg: T): this;
/**
* Partitions the collection into two collections where the first collection
* contains the items that passed and the second contains the items that failed.
* @param {Function} fn Function used to test (should return a boolean)
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {Collection[]}
* @example const [big, small] = collection.partition(guild => guild.memberCount > 250);
*/
partition(fn: (value: V, key: K, collection: this) => boolean): [this, this];
partition<T>(fn: (this: T, value: V, key: K, collection: this) => boolean, thisArg: T): [this, this];
/**
* Maps each item into a Collection, then joins the results into a single Collection. Identical in behavior to
* [Array.flatMap()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/flatMap).
* @param {Function} fn Function that produces a new Collection
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {Collection}
* @example collection.flatMap(guild => guild.members.cache);
*/
flatMap<T>(fn: (value: V, key: K, collection: this) => Collection<K, T>): Collection<K, T>;
flatMap<T, This>(fn: (this: This, value: V, key: K, collection: this) => Collection<K, T>, thisArg: This): Collection<K, T>;
/**
* Maps each item to another value into an array. Identical in behavior to
* [Array.map()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map).
* @param {Function} fn Function that produces an element of the new array, taking three arguments
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {Array}
* @example collection.map(user => user.tag);
*/
map<T>(fn: (value: V, key: K, collection: this) => T): T[];
map<This, T>(fn: (this: This, value: V, key: K, collection: this) => T, thisArg: This): T[];
/**
* Maps each item to another value into a collection. Identical in behavior to
* [Array.map()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map).
* @param {Function} fn Function that produces an element of the new collection, taking three arguments
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {Collection}
* @example collection.mapValues(user => user.tag);
*/
mapValues<T>(fn: (value: V, key: K, collection: this) => T): Collection<K, T>;
mapValues<This, T>(fn: (this: This, value: V, key: K, collection: this) => T, thisArg: This): Collection<K, T>;
/**
* Checks if there exists an item that passes a test. Identical in behavior to
* [Array.some()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/some).
* @param {Function} fn Function used to test (should return a boolean)
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {boolean}
* @example collection.some(user => user.discriminator === '0000');
*/
some(fn: (value: V, key: K, collection: this) => boolean): boolean;
some<T>(fn: (this: T, value: V, key: K, collection: this) => boolean, thisArg: T): boolean;
/**
* Checks if all items passes a test. Identical in behavior to
* [Array.every()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/every).
* @param {Function} fn Function used to test (should return a boolean)
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {boolean}
* @example collection.every(user => !user.bot);
*/
every(fn: (value: V, key: K, collection: this) => boolean): boolean;
every<T>(fn: (this: T, value: V, key: K, collection: this) => boolean, thisArg: T): boolean;
/**
* Applies a function to produce a single value. Identical in behavior to
* [Array.reduce()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/reduce).
* @param {Function} fn Function used to reduce, taking four arguments; `accumulator`, `currentValue`, `currentKey`,
* and `collection`
* @param {*} [initialValue] Starting value for the accumulator
* @returns {*}
* @example collection.reduce((acc, guild) => acc + guild.memberCount, 0);
*/
reduce<T>(fn: (accumulator: T, value: V, key: K, collection: this) => T, initialValue?: T): T;
/**
* Identical to
* [Map.forEach()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/forEach),
* but returns the collection instead of undefined.
* @param {Function} fn Function to execute for each element
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {Collection}
* @example
* collection
* .each(user => console.log(user.username))
* .filter(user => user.bot)
* .each(user => console.log(user.username));
*/
each(fn: (value: V, key: K, collection: this) => void): this;
each<T>(fn: (this: T, value: V, key: K, collection: this) => void, thisArg: T): this;
/**
* Runs a function on the collection and returns the collection.
* @param {Function} fn Function to execute
* @param {*} [thisArg] Value to use as `this` when executing function
* @returns {Collection}
* @example
* collection
* .tap(coll => console.log(coll.size))
* .filter(user => user.bot)
* .tap(coll => console.log(coll.size))
*/
tap(fn: (collection: this) => void): this;
tap<T>(fn: (this: T, collection: this) => void, thisArg: T): this;
/**
* Creates an identical shallow copy of this collection.
* @returns {Collection}
* @example const newColl = someColl.clone();
*/
clone(): this;
/**
* Combines this collection with others into a new collection. None of the source collections are modified.
* @param {...Collection} collections Collections to merge
* @returns {Collection}
* @example const newColl = someColl.concat(someOtherColl, anotherColl, ohBoyAColl);
*/
concat(...collections: Collection<K, V>[]): this;
/**
* Checks if this collection shares identical items with another.
* This is different to checking for equality using equal-signs, because
* the collections may be different objects, but contain the same data.
* @param {Collection} collection Collection to compare with
* @returns {boolean} Whether the collections have identical contents
*/
equals(collection: Collection<K, V>): boolean;
/**
* The sort method sorts the items of a collection in place and returns it.
* The sort is not necessarily stable in Node 10 or older.
* The default sort order is according to string Unicode code points.
* @param {Function} [compareFunction] Specifies a function that defines the sort order.
* If omitted, the collection is sorted according to each character's Unicode code point value,
* according to the string conversion of each element.
* @returns {Collection}
* @example collection.sort((userA, userB) => userA.createdTimestamp - userB.createdTimestamp);
*/
sort(compareFunction?: (firstValue: V, secondValue: V, firstKey: K, secondKey: K) => number): this;
/**
* The intersect method returns a new structure containing items where the keys are present in both original structures.
* @param {Collection} other The other Collection to filter against
* @returns {Collection}
*/
intersect(other: Collection<K, V>): Collection<K, V>;
/**
* The difference method returns a new structure containing items where the key is present in one of the original structures but not the other.
* @param {Collection} other The other Collection to filter against
* @returns {Collection}
*/
difference(other: Collection<K, V>): Collection<K, V>;
/**
* The sorted method sorts the items of a collection and returns it.
* The sort is not necessarily stable in Node 10 or older.
* The default sort order is according to string Unicode code points.
* @param {Function} [compareFunction] Specifies a function that defines the sort order.
* If omitted, the collection is sorted according to each character's Unicode code point value,
* according to the string conversion of each element.
* @returns {Collection}
* @example collection.sorted((userA, userB) => userA.createdTimestamp - userB.createdTimestamp);
*/
sorted(compareFunction?: (firstValue: V, secondValue: V, firstKey: K, secondKey: K) => number): this;
}
export { Collection };
export default Collection;

392
node_modules/@discordjs/collection/dist/index.js generated vendored Normal file

File diff suppressed because one or more lines are too long

49
node_modules/@discordjs/collection/package.json generated vendored Normal file
View file

@ -0,0 +1,49 @@
{
"name": "@discordjs/collection",
"version": "0.1.6",
"description": "Utility data structure used in Discord.js",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"lint": "eslint src --ext .ts",
"prebuild": "npm run lint",
"build": "rimraf dist/ && tsc",
"pretest": "npm run build",
"test": "node test/index.js",
"docs": "docgen --jsdoc jsdoc.json --source src/*.ts src/**/*.ts --custom docs/index.yml --output docs/docs.json",
"docs:test": "docgen --jsdoc jsdoc.json --source src/*.ts src/**/*.ts --custom docs/index.yml"
},
"repository": {
"type": "git",
"url": "git+https://github.com/discordjs/collection.git"
},
"keywords": [
"map",
"collection",
"utility"
],
"author": "Amish Shah <amishshah.2k@gmail.com>",
"license": "Apache-2.0",
"bugs": {
"url": "https://github.com/discordjs/collection/issues"
},
"homepage": "https://github.com/discordjs/collection#readme",
"devDependencies": {
"@babel/cli": "^7.8.4",
"@babel/core": "^7.8.4",
"@babel/preset-env": "^7.8.4",
"@babel/preset-typescript": "^7.8.3",
"@types/node": "^13.7.4",
"@typescript-eslint/eslint-plugin": "^2.21.0",
"@typescript-eslint/parser": "^2.21.0",
"discord.js-docgen": "discordjs/docgen#ts-patch",
"eslint": "^6.8.0",
"eslint-config-marine": "^6.0.0",
"jsdoc-babel": "^0.5.0",
"rimraf": "^3.0.2",
"typescript": "^3.8.2"
},
"eslintConfig": {
"extends": "marine/node"
}
}

19
node_modules/@discordjs/form-data/License generated vendored Normal file
View file

@ -0,0 +1,19 @@
Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

353
node_modules/@discordjs/form-data/Readme.md generated vendored Normal file
View file

@ -0,0 +1,353 @@
# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data)
A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications.
The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd].
[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface
[![Linux Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data)
[![MacOS Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data)
[![Windows Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data)
[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/master.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master)
[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data)
## Install
```
npm install --save form-data
```
## Usage
In this example we are constructing a form with 3 fields that contain a string,
a buffer and a file stream.
``` javascript
var FormData = require('form-data');
var fs = require('fs');
var form = new FormData();
form.append('my_field', 'my value');
form.append('my_buffer', new Buffer(10));
form.append('my_file', fs.createReadStream('/foo/bar.jpg'));
```
Also you can use http-response stream:
``` javascript
var FormData = require('form-data');
var http = require('http');
var form = new FormData();
http.request('http://nodejs.org/images/logo.png', function(response) {
form.append('my_field', 'my value');
form.append('my_buffer', new Buffer(10));
form.append('my_logo', response);
});
```
Or @mikeal's [request](https://github.com/request/request) stream:
``` javascript
var FormData = require('form-data');
var request = require('request');
var form = new FormData();
form.append('my_field', 'my value');
form.append('my_buffer', new Buffer(10));
form.append('my_logo', request('http://nodejs.org/images/logo.png'));
```
In order to submit this form to a web application, call ```submit(url, [callback])``` method:
``` javascript
form.submit('http://example.org/', function(err, res) {
// res response object (http.IncomingMessage) //
res.resume();
});
```
For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods.
### Custom options
You can provide custom options, such as `maxDataSize`:
``` javascript
var FormData = require('form-data');
var form = new FormData({ maxDataSize: 20971520 });
form.append('my_field', 'my value');
form.append('my_buffer', /* something big */);
```
List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15)
### Alternative submission methods
You can use node's http client interface:
``` javascript
var http = require('http');
var request = http.request({
method: 'post',
host: 'example.org',
path: '/upload',
headers: form.getHeaders()
});
form.pipe(request);
request.on('response', function(res) {
console.log(res.statusCode);
});
```
Or if you would prefer the `'Content-Length'` header to be set for you:
``` javascript
form.submit('example.org/upload', function(err, res) {
console.log(res.statusCode);
});
```
To use custom headers and pre-known length in parts:
``` javascript
var CRLF = '\r\n';
var form = new FormData();
var options = {
header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF,
knownLength: 1
};
form.append('my_buffer', buffer, options);
form.submit('http://example.com/', function(err, res) {
if (err) throw err;
console.log('Done');
});
```
Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually:
``` javascript
someModule.stream(function(err, stdout, stderr) {
if (err) throw err;
var form = new FormData();
form.append('file', stdout, {
filename: 'unicycle.jpg', // ... or:
filepath: 'photos/toys/unicycle.jpg',
contentType: 'image/jpeg',
knownLength: 19806
});
form.submit('http://example.com/', function(err, res) {
if (err) throw err;
console.log('Done');
});
});
```
The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory).
For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter:
``` javascript
form.submit({
host: 'example.com',
path: '/probably.php?extra=params',
auth: 'username:password'
}, function(err, res) {
console.log(res.statusCode);
});
```
In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`:
``` javascript
form.submit({
host: 'example.com',
path: '/surelynot.php',
headers: {'x-test-header': 'test-header-value'}
}, function(err, res) {
console.log(res.statusCode);
});
```
### Methods
- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-).
- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-)
- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary)
- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer)
- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync)
- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-)
- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength)
- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-)
- [_String_ toString()](https://github.com/form-data/form-data#string-tostring)
#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )
Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user.
```javascript
var form = new FormData();
form.append( 'my_string', 'my value' );
form.append( 'my_integer', 1 );
form.append( 'my_boolean', true );
form.append( 'my_buffer', new Buffer(10) );
form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) )
```
You may provide a string for options, or an object.
```javascript
// Set filename by providing a string for options
form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' );
// provide an object.
form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} );
```
#### _Headers_ getHeaders( [**Headers** _userHeaders_] )
This method adds the correct `content-type` header to the provided array of `userHeaders`.
#### _String_ getBoundary()
Return the boundary of the formData. A boundary consists of 26 `-` followed by 24 numbers
for example:
```javascript
--------------------------515890814546601021194782
```
_Note: The boundary must be unique and may not appear in the data._
#### _Buffer_ getBuffer()
Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data.
```javascript
var form = new FormData();
form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) );
form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') );
axios.post( 'https://example.com/path/to/api',
form.getBuffer(),
form.getHeaders()
)
```
**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error.
#### _Integer_ getLengthSync()
Same as `getLength` but synchronous.
_Note: getLengthSync __doesn't__ calculate streams length._
#### _Integer_ getLength( **function** _callback_ )
Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated
```javascript
this.getLength(function(err, length) {
if (err) {
this._error(err);
return;
}
// add content length
request.setHeader('Content-Length', length);
...
}.bind(this));
```
#### _Boolean_ hasKnownLength()
Checks if the length of added values is known.
#### _Request_ submit( _params_, **function** _callback_ )
Submit the form to a web application.
```javascript
var form = new FormData();
form.append( 'my_string', 'Hello World' );
form.submit( 'http://example.com/', function(err, res) {
// res response object (http.IncomingMessage) //
res.resume();
} );
```
#### _String_ toString()
Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead.
### Integration with other libraries
#### Request
Form submission using [request](https://github.com/request/request):
```javascript
var formData = {
my_field: 'my_value',
my_file: fs.createReadStream(__dirname + '/unicycle.jpg'),
};
request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) {
if (err) {
return console.error('upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
});
```
For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads).
#### node-fetch
You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch):
```javascript
var form = new FormData();
form.append('a', 1);
fetch('http://example.com', { method: 'POST', body: form })
.then(function(res) {
return res.json();
}).then(function(json) {
console.log(json);
});
```
#### axios
In Node.js you can post a file using [axios](https://github.com/axios/axios):
```javascript
const form = new FormData();
const stream = fs.createReadStream(PATH_TO_FILE);
form.append('image', stream);
// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders`
const formHeaders = form.getHeaders();
axios.post('http://example.com', form, {
headers: {
...formHeaders,
},
})
.then(response => response)
.catch(error => error)
```
## Notes
- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround.
- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```).
- ```sbumit``` will not add `content-length` if form length is unknown or not calculable.
- Starting version `2.x` FormData has dropped support for `node@0.10.x`.
- Starting version `3.x` FormData has dropped support for `node@4.x`.
## License
Form-Data is released under the [MIT](License) license.

61
node_modules/@discordjs/form-data/index.d.ts generated vendored Normal file
View file

@ -0,0 +1,61 @@
// Definitions by: Carlos Ballesteros Velasco <https://github.com/soywiz>
// Leon Yu <https://github.com/leonyu>
// BendingBender <https://github.com/BendingBender>
// Maple Miao <https://github.com/mapleeit>
/// <reference types="node" />
import * as stream from 'stream';
import * as http from 'http';
export = FormData;
// Extracted because @types/node doesn't export interfaces.
interface ReadableOptions {
highWaterMark?: number;
encoding?: string;
objectMode?: boolean;
read?(this: stream.Readable, size: number): void;
destroy?(this: stream.Readable, error: Error | null, callback: (error: Error | null) => void): void;
autoDestroy?: boolean;
}
interface Options extends ReadableOptions {
writable?: boolean;
readable?: boolean;
dataSize?: number;
maxDataSize?: number;
pauseStreams?: boolean;
}
declare class FormData extends stream.Readable {
constructor(options?: Options);
append(key: string, value: any, options?: FormData.AppendOptions | string): void;
getHeaders(userHeaders?: FormData.Headers): FormData.Headers;
submit(
params: string | FormData.SubmitOptions,
callback?: (error: Error | null, response: http.IncomingMessage) => void
): http.ClientRequest;
getBuffer(): Buffer;
getBoundary(): string;
getLength(callback: (err: Error | null, length: number) => void): void;
getLengthSync(): number;
hasKnownLength(): boolean;
}
declare namespace FormData {
interface Headers {
[key: string]: any;
}
interface AppendOptions {
header?: string | Headers;
knownLength?: number;
filename?: string;
filepath?: string;
contentType?: string;
}
interface SubmitOptions extends http.RequestOptions {
protocol?: 'https:' | 'http:';
}
}

2
node_modules/@discordjs/form-data/lib/browser.js generated vendored Normal file
View file

@ -0,0 +1,2 @@
/* eslint-env browser */
module.exports = typeof self == 'object' ? self.FormData : window.FormData;

497
node_modules/@discordjs/form-data/lib/form_data.js generated vendored Normal file
View file

@ -0,0 +1,497 @@
var CombinedStream = require('combined-stream');
var util = require('util');
var path = require('path');
var http = require('http');
var https = require('https');
var parseUrl = require('url').parse;
var fs = require('fs');
var Stream = require('stream').Stream;
var mime = require('mime-types');
var asynckit = require('asynckit');
var populate = require('./populate.js');
// Public API
module.exports = FormData;
// make it a Stream
util.inherits(FormData, CombinedStream);
/**
* Create readable "multipart/form-data" streams.
* Can be used to submit forms
* and file uploads to other web applications.
*
* @constructor
* @param {Object} options - Properties to be added/overriden for FormData and CombinedStream
*/
function FormData(options) {
if (!(this instanceof FormData)) {
return new FormData(options);
}
this._overheadLength = 0;
this._valueLength = 0;
this._valuesToMeasure = [];
CombinedStream.call(this);
options = options || {};
for (var option in options) {
this[option] = options[option];
}
}
FormData.LINE_BREAK = '\r\n';
FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream';
FormData.prototype.append = function(field, value, options) {
options = options || {};
// allow filename as single option
if (typeof options == 'string') {
options = {filename: options};
}
var append = CombinedStream.prototype.append.bind(this);
// all that streamy business can't handle numbers
if (typeof value == 'number') {
value = '' + value;
}
// https://github.com/felixge/node-form-data/issues/38
if (util.isArray(value)) {
// Please convert your array into string
// the way web server expects it
this._error(new Error('Arrays are not supported.'));
return;
}
var header = this._multiPartHeader(field, value, options);
var footer = this._multiPartFooter();
append(header);
append(value);
append(footer);
// pass along options.knownLength
this._trackLength(header, value, options);
};
FormData.prototype._trackLength = function(header, value, options) {
var valueLength = 0;
// used w/ getLengthSync(), when length is known.
// e.g. for streaming directly from a remote server,
// w/ a known file a size, and not wanting to wait for
// incoming file to finish to get its size.
if (options.knownLength != null) {
valueLength += +options.knownLength;
} else if (Buffer.isBuffer(value)) {
valueLength = value.length;
} else if (typeof value === 'string') {
valueLength = Buffer.byteLength(value);
}
this._valueLength += valueLength;
// @check why add CRLF? does this account for custom/multiple CRLFs?
this._overheadLength +=
Buffer.byteLength(header) +
FormData.LINE_BREAK.length;
// empty or either doesn't have path or not an http response or not a stream
if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) {
return;
}
// no need to bother with the length
if (!options.knownLength) {
this._valuesToMeasure.push(value);
}
};
FormData.prototype._lengthRetriever = function(value, callback) {
if (value.hasOwnProperty('fd')) {
// take read range into a account
// `end` = Infinity > read file till the end
//
// TODO: Looks like there is bug in Node fs.createReadStream
// it doesn't respect `end` options without `start` options
// Fix it when node fixes it.
// https://github.com/joyent/node/issues/7819
if (value.end != undefined && value.end != Infinity && value.start != undefined) {
// when end specified
// no need to calculate range
// inclusive, starts with 0
callback(null, value.end + 1 - (value.start ? value.start : 0));
// not that fast snoopy
} else {
// still need to fetch file size from fs
fs.stat(value.path, function(err, stat) {
var fileSize;
if (err) {
callback(err);
return;
}
// update final size based on the range options
fileSize = stat.size - (value.start ? value.start : 0);
callback(null, fileSize);
});
}
// or http response
} else if (value.hasOwnProperty('httpVersion')) {
callback(null, +value.headers['content-length']);
// or request stream http://github.com/mikeal/request
} else if (value.hasOwnProperty('httpModule')) {
// wait till response come back
value.on('response', function(response) {
value.pause();
callback(null, +response.headers['content-length']);
});
value.resume();
// something else
} else {
callback('Unknown stream');
}
};
FormData.prototype._multiPartHeader = function(field, value, options) {
// custom header specified (as string)?
// it becomes responsible for boundary
// (e.g. to handle extra CRLFs on .NET servers)
if (typeof options.header == 'string') {
return options.header;
}
var contentDisposition = this._getContentDisposition(value, options);
var contentType = this._getContentType(value, options);
var contents = '';
var headers = {
// add custom disposition as third element or keep it two elements if not
'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []),
// if no content type. allow it to be empty array
'Content-Type': [].concat(contentType || [])
};
// allow custom headers.
if (typeof options.header == 'object') {
populate(headers, options.header);
}
var header;
for (var prop in headers) {
if (!headers.hasOwnProperty(prop)) continue;
header = headers[prop];
// skip nullish headers.
if (header == null) {
continue;
}
// convert all headers to arrays.
if (!Array.isArray(header)) {
header = [header];
}
// add non-empty headers.
if (header.length) {
contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK;
}
}
return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK;
};
FormData.prototype._getContentDisposition = function(value, options) {
var filename
, contentDisposition
;
if (typeof options.filepath === 'string') {
// custom filepath for relative paths
filename = path.normalize(options.filepath).replace(/\\/g, '/');
} else if (options.filename || value.name || value.path) {
// custom filename take precedence
// formidable and the browser add a name property
// fs- and request- streams have path property
filename = path.basename(options.filename || value.name || value.path);
} else if (value.readable && value.hasOwnProperty('httpVersion')) {
// or try http response
filename = path.basename(value.client._httpMessage.path || '');
}
if (filename) {
contentDisposition = 'filename="' + filename + '"';
}
return contentDisposition;
};
FormData.prototype._getContentType = function(value, options) {
// use custom content-type above all
var contentType = options.contentType;
// or try `name` from formidable, browser
if (!contentType && value.name) {
contentType = mime.lookup(value.name);
}
// or try `path` from fs-, request- streams
if (!contentType && value.path) {
contentType = mime.lookup(value.path);
}
// or if it's http-reponse
if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) {
contentType = value.headers['content-type'];
}
// or guess it from the filepath or filename
if (!contentType && (options.filepath || options.filename)) {
contentType = mime.lookup(options.filepath || options.filename);
}
// fallback to the default content type if `value` is not simple value
if (!contentType && typeof value == 'object') {
contentType = FormData.DEFAULT_CONTENT_TYPE;
}
return contentType;
};
FormData.prototype._multiPartFooter = function() {
return function(next) {
var footer = FormData.LINE_BREAK;
var lastPart = (this._streams.length === 0);
if (lastPart) {
footer += this._lastBoundary();
}
next(footer);
}.bind(this);
};
FormData.prototype._lastBoundary = function() {
return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK;
};
FormData.prototype.getHeaders = function(userHeaders) {
var header;
var formHeaders = {
'content-type': 'multipart/form-data; boundary=' + this.getBoundary()
};
for (header in userHeaders) {
if (userHeaders.hasOwnProperty(header)) {
formHeaders[header.toLowerCase()] = userHeaders[header];
}
}
return formHeaders;
};
FormData.prototype.getBoundary = function() {
if (!this._boundary) {
this._generateBoundary();
}
return this._boundary;
};
FormData.prototype.getBuffer = function() {
var dataBuffer = new Buffer.alloc( 0 );
var boundary = this.getBoundary();
// Create the form content. Add Line breaks to the end of data.
for (var i = 0, len = this._streams.length; i < len; i++) {
if (typeof this._streams[i] !== 'function') {
// Add content to the buffer.
if(Buffer.isBuffer(this._streams[i])) {
dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]);
}else {
dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]);
}
// Add break after content.
if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) {
dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] );
}
}
}
// Add the footer and return the Buffer object.
return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] );
};
FormData.prototype._generateBoundary = function() {
// This generates a 50 character boundary similar to those used by Firefox.
// They are optimized for boyer-moore parsing.
var boundary = '--------------------------';
for (var i = 0; i < 24; i++) {
boundary += Math.floor(Math.random() * 10).toString(16);
}
this._boundary = boundary;
};
// Note: getLengthSync DOESN'T calculate streams length
// As workaround one can calculate file size manually
// and add it as knownLength option
FormData.prototype.getLengthSync = function() {
var knownLength = this._overheadLength + this._valueLength;
// Don't get confused, there are 3 "internal" streams for each keyval pair
// so it basically checks if there is any value added to the form
if (this._streams.length) {
knownLength += this._lastBoundary().length;
}
// https://github.com/form-data/form-data/issues/40
if (!this.hasKnownLength()) {
// Some async length retrievers are present
// therefore synchronous length calculation is false.
// Please use getLength(callback) to get proper length
this._error(new Error('Cannot calculate proper length in synchronous way.'));
}
return knownLength;
};
// Public API to check if length of added values is known
// https://github.com/form-data/form-data/issues/196
// https://github.com/form-data/form-data/issues/262
FormData.prototype.hasKnownLength = function() {
var hasKnownLength = true;
if (this._valuesToMeasure.length) {
hasKnownLength = false;
}
return hasKnownLength;
};
FormData.prototype.getLength = function(cb) {
var knownLength = this._overheadLength + this._valueLength;
if (this._streams.length) {
knownLength += this._lastBoundary().length;
}
if (!this._valuesToMeasure.length) {
process.nextTick(cb.bind(this, null, knownLength));
return;
}
asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) {
if (err) {
cb(err);
return;
}
values.forEach(function(length) {
knownLength += length;
});
cb(null, knownLength);
});
};
FormData.prototype.submit = function(params, cb) {
var request
, options
, defaults = {method: 'post'}
;
// parse provided url if it's string
// or treat it as options object
if (typeof params == 'string') {
params = parseUrl(params);
options = populate({
port: params.port,
path: params.pathname,
host: params.hostname,
protocol: params.protocol
}, defaults);
// use custom params
} else {
options = populate(params, defaults);
// if no port provided use default one
if (!options.port) {
options.port = options.protocol == 'https:' ? 443 : 80;
}
}
// put that good code in getHeaders to some use
options.headers = this.getHeaders(params.headers);
// https if specified, fallback to http in any other case
if (options.protocol == 'https:') {
request = https.request(options);
} else {
request = http.request(options);
}
// get content length and fire away
this.getLength(function(err, length) {
if (err && err !== 'Unknown stream') {
this._error(err);
return;
}
// add content length
if (length) {
request.setHeader('Content-Length', length);
}
this.pipe(request);
if (cb) {
var onResponse;
var callback = function (error, responce) {
request.removeListener('error', callback);
request.removeListener('response', onResponse);
return cb.call(this, error, responce);
};
onResponse = callback.bind(this, null);
request.on('error', callback);
request.on('response', onResponse);
}
}.bind(this));
return request;
};
FormData.prototype._error = function(err) {
if (!this.error) {
this.error = err;
this.pause();
this.emit('error', err);
}
};
FormData.prototype.toString = function () {
return '[object FormData]';
};

10
node_modules/@discordjs/form-data/lib/populate.js generated vendored Normal file
View file

@ -0,0 +1,10 @@
// populates missing values
module.exports = function(dst, src) {
Object.keys(src).forEach(function(prop)
{
dst[prop] = dst[prop] || src[prop];
});
return dst;
};

64
node_modules/@discordjs/form-data/package.json generated vendored Normal file
View file

@ -0,0 +1,64 @@
{
"author": "Felix Geisendörfer <felix@debuggable.com> (http://debuggable.com/)",
"name": "@discordjs/form-data",
"description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.",
"version": "3.0.1",
"repository": {
"type": "git",
"url": "git://github.com/form-data/form-data.git"
},
"main": "./lib/form_data",
"browser": "./lib/browser",
"typings": "./index.d.ts",
"scripts": {
"pretest": "rimraf coverage test/tmp",
"test": "istanbul cover test/run.js",
"posttest": "istanbul report lcov text",
"lint": "eslint lib/*.js test/*.js test/integration/*.js",
"report": "istanbul report lcov text",
"ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8",
"ci-test": "npm run test && npm run browser && npm run report",
"predebug": "rimraf coverage test/tmp",
"debug": "verbose=1 ./test/run.js",
"browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage",
"check": "istanbul check-coverage coverage/coverage*.json",
"files": "pkgfiles --sort=name",
"get-version": "node -e \"console.log(require('./package.json').version)\""
},
"pre-commit": [
"lint",
"ci-test",
"check"
],
"engines": {
"node": ">= 6"
},
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"devDependencies": {
"@types/node": "^12.0.10",
"browserify": "^13.1.1",
"browserify-istanbul": "^2.0.0",
"coveralls": "^3.0.4",
"cross-spawn": "^6.0.5",
"eslint": "^6.0.1",
"fake": "^0.2.2",
"far": "^0.0.7",
"formidable": "^1.0.17",
"in-publish": "^2.0.0",
"is-node-modern": "^1.0.0",
"istanbul": "^0.4.5",
"obake": "^0.1.2",
"puppeteer": "^1.19.0",
"pkgfiles": "^2.3.0",
"pre-commit": "^1.1.3",
"request": "^2.88.0",
"rimraf": "^2.7.1",
"tape": "^4.6.2",
"typescript": "^3.5.2"
},
"license": "MIT"
}

3
node_modules/@discordjs/node-pre-gyp/.eslintrc.json generated vendored Normal file
View file

@ -0,0 +1,3 @@
{
"extends": "aqua/prettier/node"
}

1
node_modules/@discordjs/node-pre-gyp/.gitattributes generated vendored Normal file
View file

@ -0,0 +1 @@
* text=auto eol=lf

View file

@ -0,0 +1,76 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to make participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
- Using welcoming and inclusive language
- Being respectful of differing viewpoints and experiences
- Gracefully accepting constructive criticism
- Focusing on what is best for the community
- Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
- The use of sexualized language or imagery and unwelcome sexual attention or
advances
- Trolling, insulting/derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or electronic
address, without explicit permission
- Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies within all project spaces, and it also applies when
an individual is representing the project or its community in public spaces.
Examples of representing a project or community include using an official
project e-mail address, posting via an official social media account, or acting
as an appointed representative at an online or offline event. Representation of
a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at https://discord.gg/bRCvFy9. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq

View file

@ -0,0 +1,91 @@
## Git Commit Message Convention
> This is adapted from [Angular's commit convention](https://github.com/conventional-changelog/conventional-changelog/tree/master/packages/conventional-changelog-angular).
#### TL;DR:
Messages must be matched by the following regex:
```js
/^(revert: )?(feat|fix|docs|style|refactor|perf|test|workflow|build|ci|chore|types|wip)(\(.+\))?: .{1,72}/;
```
#### Examples
Appears under "Features" header, `GuildMember` subheader:
```
feat(GuildMember): add 'tag' method
```
Appears under "Bug Fixes" header, `Guild` subheader, with a link to issue #28:
```
fix(Guild): handle events correctly
close #28
```
Appears under "Performance Improvements" header, and under "Breaking Changes" with the breaking change explanation:
```
perf(core): improve patching by removing 'bar' option
BREAKING CHANGE: The 'bar' option has been removed.
```
The following commit and commit `667ecc1` do not appear in the changelog if they are under the same release. If not, the revert commit appears under the "Reverts" header.
```
revert: feat(Managers): add Managers
This reverts commit 667ecc1654a317a13331b17617d973392f415f02.
```
### Full Message Format
A commit message consists of a **header**, **body** and **footer**. The header has a **type**, **scope** and **subject**:
```
<type>(<scope>): <subject>
<BLANK LINE>
<body>
<BLANK LINE>
<footer>
```
The **header** is mandatory and the **scope** of the header is optional.
### Revert
If the commit reverts a previous commit, it should begin with `revert:`, followed by the header of the reverted commit. In the body, it should say: `This reverts commit <hash>.`, where the hash is the SHA of the commit being reverted.
### Type
If the prefix is `feat`, `fix` or `perf`, it will appear in the changelog. However, if there is any [BREAKING CHANGE](#footer), the commit will always appear in the changelog.
Other prefixes are up to your discretion. Suggested prefixes are `docs`, `chore`, `style`, `refactor`, and `test` for non-changelog related tasks.
### Scope
The scope could be anything specifying the place of the commit change. For example `GuildMember`, `Guild`, `Message`, `MessageEmbed` etc...
### Subject
The subject contains a succinct description of the change:
- use the imperative, present tense: "change" not "changed" nor "changes"
- don't capitalize the first letter
- no dot (.) at the end
### Body
Just as in the **subject**, use the imperative, present tense: "change" not "changed" nor "changes".
The body should include the motivation for the change and contrast this with previous behavior.
### Footer
The footer should contain any information about **Breaking Changes** and is also the place to
reference GitHub issues that this commit **Closes**.
**Breaking Changes** should start with the word `BREAKING CHANGE:` with a space or two newlines. The rest of the commit message is then used for this.

View file

@ -0,0 +1,39 @@
# @discordjs/opus Contributing Guide
- [Code of Conduct](https://github.com/discordjs/discord.js-next/blob/master/.github/CODE_OF_CONDUCT.md)
- [Pull Request Guidelines](#pull-request-guidelines)
- [Development Setup](#development-setup)
## Pull Request Guidelines
- Checkout a topic branch from a base branch, e.g. `master`, and merge back against that branch.
- If adding a new feature:
- Provide a convincing reason to add this feature. Ideally, you should open a suggestion issue first and have it approved before working on it.
- If fixing a bug:
- If you are resolving a special issue, add `fix/close #xxxx[,#xxxx]` (#xxxx is the issue id) in your PR body for a better release log, e.g.
```
fix(Guild): handle events correctly
close #28
```
- Provide a detailed description of the bug in the PR. Live demo preferred.
- It's OK to have multiple small commits as you work on the PR - GitHub can automatically squash them before merging.
- Commit messages must follow the [commit message convention](./COMMIT_CONVENTION.md) so that changelogs can be automatically generated. Commit messages are automatically validated before commit (by invoking [Git Hooks](https://git-scm.com/docs/githooks) via [husky](https://github.com/typicode/husky)).
## Development Setup
You will need [Node.js](http://nodejs.org) **version 12+**, and [npm](https://www.npmjs.com/).
After cloning the repo, run:
```bash
$ npm i # install the dependencies of the project
```

View file

@ -0,0 +1,22 @@
---
name: Bug report
about: Report incorrect or unexpected behaviour
title: ''
labels: 'bug'
assignees: ''
---
**Please describe the problem you are having in as much detail as possible:**
**Include a reproducible code sample here, if possible:**
```js
// Place your code here
```
**Further details:**
- @discordjs/node-pre-gyp version:
- Node.js version:
- Operating system:
- Priority this issue should have please be realistic and elaborate if possible:

View file

@ -0,0 +1 @@
blank_issues_enabled: false

View file

@ -0,0 +1,19 @@
---
name: Feature request
about: Request a feature
title: ''
labels: 'feature request'
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the ideal solution**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View file

@ -0,0 +1,7 @@
**Please describe the changes this PR makes and why it should be merged:**
**Semantic versioning classification:**
- [ ] This PR changes the library's interface (methods or parameters added)
- [ ] This PR includes breaking changes (methods removed or renamed, parameters moved or removed)
- [ ] This PR **only** includes non-code changes, like changes to documentation, README, etc.

View file

@ -0,0 +1,8 @@
{
"printWidth": 120,
"useTabs": true,
"singleQuote": true,
"quoteProps": "as-needed",
"trailingComma": "all",
"endOfLine": "lf"
}

27
node_modules/@discordjs/node-pre-gyp/LICENSE generated vendored Normal file
View file

@ -0,0 +1,27 @@
Copyright (c), Mapbox
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of node-pre-gyp nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

369
node_modules/@discordjs/node-pre-gyp/README.md generated vendored Normal file
View file

@ -0,0 +1,369 @@
# @discordjs/node-pre-gyp ![Test](https://github.com/discordjs/node-pre-gyp/workflows/Test/badge.svg)
> node-pre-gyp makes it easy to publish and install Node.js C++ addons from binaries
`@discordjs/node-pre-gyp` stands between [npm](https://github.com/npm/npm) and [node-gyp](https://github.com/Tootallnate/node-gyp) and offers a cross-platform method of binary deployment.
# Features
- A command line tool called `node-pre-gyp` that can install your package's C++ module from a binary.
- A variety of developer targeted commands for packaging, testing, and publishing binaries.
- A JavaScript module that can dynamically require your installed binary: `require('@discordjs/node-pre-gyp').find`
For a hello world example of a module packaged with `node-pre-gyp` see <https://github.com/springmeyer/node-addon-example> and [the wiki ](https://github.com/mapbox/node-pre-gyp/wiki/Modules-using-node-pre-gyp) for real world examples.
# Credits
- The module is modeled after [node-gyp](https://github.com/Tootallnate/node-gyp) by [@Tootallnate](https://github.com/Tootallnate)
- Motivation for initial development came from [@ErisDS](https://github.com/ErisDS) and the [Ghost Project](https://github.com/TryGhost/Ghost).
- Development is sponsored by [Mapbox](https://www.mapbox.com/)
# FAQ
See the [Frequently Ask Questions](https://github.com/mapbox/node-pre-gyp/wiki/FAQ).
# Usage
## Commands
View all possible commands:
node-pre-gyp --help
- clean - Remove the entire folder containing the compiled .node module
- install - Install pre-built binary for module
- reinstall - Run "clean" and "install" at once
- build - Compile the module by dispatching to node-gyp or nw-gyp
- rebuild - Run "clean" and "build" at once
- package - Pack binary into tarball
- testpackage - Test that the staged package is valid
You can also chain commands:
```bash
node-pre-gyp clean build package
```
## Options
Options include:
- `-C/--directory`: run the command in this directory
- `--build-from-source`: build from source instead of using pre-built binary
- `--update-binary`: reinstall by replacing previously installed local binary with remote binary
- `--runtime=electron`: customize the runtime: `node` and `electron` are the valid options
- `--fallback-to-build`: fallback to building from source if pre-built binary is not available
- `--target=0.4.0`: Pass the target node version to compile against
- `--target_arch=ia32`: Pass the target arch and override the host `arch`. Valid values are 'ia32','x64', or `arm`.
- `--target_platform=win32`: Pass the target platform and override the host `platform`. Valid values are `linux`, `darwin`, `win32`, `sunos`, `freebsd`, `openbsd`, and `aix`.
Both `--build-from-source` and `--fallback-to-build` can be passed alone or they can provide values. You can pass `--fallback-to-build=false` to override the option as declared in package.json. In addition to being able to pass `--build-from-source` you can also pass `--build-from-source=myapp` where `myapp` is the name of your module.
For example: `npm install --build-from-source=myapp`. This is useful if:
- `myapp` is referenced in the package.json of a larger app and therefore `myapp` is being installed as a dependency with `npm install`.
- The larger app also depends on other modules installed with `node-pre-gyp`
- You only want to trigger a source compile for `myapp` and the other modules.
# Configuring
This is a guide to configuring your module to use node-pre-gyp.
## 1) Add new entries to your `package.json`
- Add `node-pre-gyp` to `dependencies`
- Add a custom `install` script
- Declare a `binary` object
This looks like:
```json
"dependencies" : {
"@discordjs/node-pre-gyp": "0.1.x"
},
"scripts": {
"install": "node-pre-gyp install --fallback-to-build"
},
"binary": {
"module_name": "your_module",
"module_path": "./lib/binding/",
"host": "https://your_module.s3-us-west-1.amazonaws.com"
}
```
For a full example see [node-addon-examples's package.json](https://github.com/springmeyer/node-addon-example/blob/master/package.json).
Let's break this down:
- Dependencies need to list `node-pre-gyp`
- Your `scripts` section should override the `install` target with `"install": "node-pre-gyp install --fallback-to-build"`. This allows node-pre-gyp to be used instead of the default npm behavior of always source compiling with `node-gyp` directly.
- Your package.json should contain a `binary` section describing key properties you provide to allow node-pre-gyp to package optimally. They are detailed below.
### The `binary` object has three required properties
### module_name
The name of your native node module. This value must:
- Match the name passed to [the NODE_MODULE macro](http://nodejs.org/api/addons.html#addons_hello_world)
- Must be a valid C variable name (e.g. it cannot contain `-`)
- Should not include the `.node` extension.
### module_path
The location your native module is placed after a build. This should be an empty directory without other Javascript files. This entire directory will be packaged in the binary tarball. When installing from a remote package this directory will be overwritten with the contents of the tarball.
Note: This property supports variables based on [Versioning](#versioning).
### host
A url to the remote location where you've published tarball binaries (must be `https` not `http`).
### remote_path
It **is recommended** that you customize this property. This is an extra path to use for publishing and finding remote tarballs. The default value for `remote_path` is `""` meaning that if you do not provide it then all packages will be published at the base of the `host`. It is recommended to provide a value like `./{name}/v{version}` to help organize remote packages in the case that you choose to publish multiple node addons to the same `host`.
Note: This property supports variables based on [Versioning](#versioning).
### package_name
It is **not recommended** to override this property unless you are also overriding the `remote_path`. This is the versioned name of the remote tarball containing the binary `.node` module and any supporting files you've placed inside the `module_path` directory. Unless you specify `package_name` in your `package.json` then it defaults to `{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz` which allows your binary to work across node versions, platforms, and architectures. If you are using `remote_path` that is also versioned by `./{module_name}/v{version}` then you could remove these variables from the `package_name` and just use: `{node_abi}-{platform}-{arch}.tar.gz`. Then your remote tarball will be looked up at, for example, `https://example.com/your-module/v0.1.0/node-v11-linux-x64.tar.gz`.
Avoiding the version of your module in the `package_name` and instead only embedding in a directory name can be useful when you want to make a quick tag of your module that does not change any C++ code.
Note: This property supports variables based on [Versioning](#versioning).
## 2) Add a new target to binding.gyp
`node-pre-gyp` calls out to `node-gyp` to compile the module and passes variables along like [module_name](#module_name) and [module_path](#module_path).
A new target must be added to `binding.gyp` that moves the compiled `.node` module from `./build/Release/module_name.node` into the directory specified by `module_path`.
Add a target like this at the end of your `targets` list:
```json
{
"target_name": "action_after_build",
"type": "none",
"dependencies": [ "<(module_name)" ],
"copies": [
{
"files": [ "<(PRODUCT_DIR)/<(module_name).node" ],
"destination": "<(module_path)"
}
]
}
```
For a full example see [node-addon-example's binding.gyp](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/binding.gyp).
## 3) Dynamically require your `.node`
Inside the main js file that requires your addon module you are likely currently doing:
```js
const binding = require('../build/Release/binding.node');
```
or:
```js
const bindings = require('./bindings')
```
Change those lines to:
```js
const binary = require('@discordjs/node-pre-gyp');
const path = require('path');
const binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json')));
const binding = require(binding_path);
```
For a full example see [node-addon-example's index.js](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/index.js#L1-L4)
## 4) Build and package your app
Now build your module from source:
```bash
npm install --build-from-source
```
The `--build-from-source` tells `node-pre-gyp` to not look for a remote package and instead dispatch to node-gyp to build.
Now `node-pre-gyp` should now also be installed as a local dependency so the command line tool it offers can be found at `./node_modules/.bin/node-pre-gyp`.
## 5) Test
Now `npm test` should work just as it did before.
## 6) Publish the tarball
Then package your app:
./node_modules/.bin/node-pre-gyp package
Once packaged you can also host your binaries. To do this requires:
- You manually publish the binary created by the `package` command to an `https` endpoint
- Ensure that the `host` value points to your custom `https` endpoint.
## 7) You're done!
Now publish your module to the npm registry. Users will now be able to install your module from a binary.
What will happen is this:
1. `npm install <your package>` will pull from the npm registry
2. npm will run the `install` script which will call out to `node-pre-gyp`
3. `node-pre-gyp` will fetch the binary `.node` module and unpack in the right place
4. Assuming that all worked, you are done
If a a binary was not available for a given platform and `--fallback-to-build` was used then `node-gyp rebuild` will be called to try to source compile the module.
## N-API Considerations
[Node-API](https://nodejs.org/api/n-api.html#n_api_node_api), which was previously known as N-API, is an ABI-stable alternative to previous technologies such as [nan](https://github.com/nodejs/nan) which are tied to a specific Node runtime engine. Node-API is Node runtime engine agnostic and guarantees modules created today will continue to run, without changes, into the future.
Using `node-pre-gyp` with Node-API projects requires a handful of additional configuration values and imposes some additional requirements.
The most significant difference is that an Node-API module can be coded to target multiple Node-API versions. Therefore, an Node-API module must declare in its `package.json` file which Node-API versions the module is designed to run against. In addition, since multiple builds may be required for a single module, path and file names must be specified in way that avoids naming conflicts.
## The `napi_versions` array property
A Node-API module must declare in its `package.json` file, the Node-API versions the module is intended to support. This is accomplished by including an `napi-versions` array property in the `binary` object. For example:
```json
"binary": {
"module_name": "your_module",
"module_path": "your_module_path",
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
"napi_versions": [1,3]
}
```
If the `napi_versions` array property is *not* present, `node-pre-gyp` operates as it always has. Including the `napi_versions` array property instructs `node-pre-gyp` that this is a Node-API module build.
When the `napi_versions` array property is present, `node-pre-gyp` fires off multiple operations, one for each of the Node-API versions in the array. In the example above, two operations are initiated, one for Node-API version 1 and second for Node-API version 3. How this version number is communicated is described next.
## The `napi_build_version` value
For each of the Node-API module operations `node-pre-gyp` initiates, it ensures that the `napi_build_version` is set appropriately.
This value is of importance in two areas:
1. The C/C++ code which needs to know against which Node-API version it should compile.
2. `node-pre-gyp` itself which must assign appropriate path and file names to avoid collisions.
## Defining `NAPI_VERSION` for the C/C++ code
The `napi_build_version` value is communicated to the C/C++ code by adding this code to the `binding.gyp` file:
```json
"defines": [
"NAPI_VERSION=<(napi_build_version)",
]
```
This ensures that `NAPI_VERSION`, an integer value, is declared appropriately to the C/C++ code for each build.
> Note that earlier versions of this document recommended defining the symbol `NAPI_BUILD_VERSION`. `NAPI_VERSION` is preferred because it used by the Node-API C/C++ headers to configure the specific Node-API versions being requested.
## Path and file naming requirements in `package.json`
Since `node-pre-gyp` fires off multiple operations for each request, it is essential that path and file names be created in such a way as to avoid collisions. This is accomplished by imposing additional path and file naming requirements.
Specifically, when performing Node-API builds, the `{napi_build_version}` text configuration value *must* be present in the `module_path` property. In addition, the `{napi_build_version}` text configuration value *must* be present in either the `remote_path` or `package_name` property. (No problem if it's in both.)
Here's an example:
```json
"binary": {
"module_name": "your_module",
"module_path": "./lib/binding/napi-v{napi_build_version}",
"remote_path": "./{module_name}/v{version}/{configuration}/",
"package_name": "{platform}-{arch}-napi-v{napi_build_version}.tar.gz",
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
"napi_versions": [1,3]
}
```
## Supporting both N-API and NAN builds
You may have a legacy native add-on that you wish to continue supporting for those versions of Node that do not support Node-API, as you add Node-API support for later Node versions. This can be accomplished by specifying the `node_napi_label` configuration value in the package.json `binary.package_name` property.
Placing the configuration value `node_napi_label` in the package.json `binary.package_name` property instructs `node-pre-gyp` to build all viable Node-API binaries supported by the current Node instance. If the current Node instance does not support Node-API, `node-pre-gyp` will request a traditional, non-Node-API build.
The configuration value `node_napi_label` is set by `node-pre-gyp` to the type of build created, `napi` or `node`, and the version number. For Node-API builds, the string contains the Node-API version nad has values like `napi-v3`. For traditional, non-Node-API builds, the string contains the ABI version with values like `node-v46`.
Here's how the `binary` configuration above might be changed to support both Node-API and NAN builds:
```json
"binary": {
"module_name": "your_module",
"module_path": "./lib/binding/{node_napi_label}",
"remote_path": "./{module_name}/v{version}/{configuration}/",
"package_name": "{platform}-{arch}-{node_napi_label}.tar.gz",
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
"napi_versions": [1,3]
}
```
The C/C++ symbol `NAPI_VERSION` can be used to distinguish Node-API and non-Node-API builds. The value of `NAPI_VERSION` is set to the integer Node-API version for Node-API builds and is set to `0` for non-Node-API builds.
For example:
```C
#if NAPI_VERSION
// Node-API code goes here
#else
// NAN code goes here
#endif
```
## Two additional configuration values
The following two configuration values, which were implemented in previous versions of `node-pre-gyp`, continue to exist, but have been replaced by the `node_napi_label` configuration value described above.
1. `napi_version` If Node-API is supported by the currently executing Node instance, this value is the Node-API version number supported by Node. If Node-API is not supported, this value is an empty string.
2. `node_abi_napi` If the value returned for `napi_version` is non empty, this value is `'napi'`. If the value returned for `napi_version` is empty, this value is the value returned for `node_abi`.
These values are present for use in the `binding.gyp` file and may be used as `{napi_version}` and `{node_abi_napi}` for text substituion in the `binary` properties of the `package.json` file.
# Versioning
The `binary` properties of `module_path`, `remote_path`, and `package_name` support variable substitution. The strings are evaluated by `node-pre-gyp` depending on your system and any custom build flags you passed.
- `node_abi`: The node C++ `ABI` number. This value is available in Javascript as `process.versions.modules` as of [`>= v0.10.4 >= v0.11.7`](https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e) and in C++ as the `NODE_MODULE_VERSION` define much earlier. For versions of Node before this was available we fallback to the V8 major and minor version.
- `platform` matches node's `process.platform` like `linux`, `darwin`, and `win32` unless the user passed the `--target_platform` option to override.
- `arch` matches node's `process.arch` like `x64` or `ia32` unless the user passes the `--target_arch` option to override.
- `libc` matches `require('detect-libc').family` like `glibc` or `musl` unless the user passes the `--target_libc` option to override.
- `libc_version` matches `require('detect-libc').version`
- `configuration` - Either 'Release' or 'Debug' depending on if `--debug` is passed during the build.
- `module_name` - the `binary.module_name` attribute from `package.json`.
- `version` - the semver `version` value for your module from `package.json` (NOTE: ignores the `semver.build` property).
- `major`, `minor`, `patch`, and `prelease` match the individual semver values for your module's `version`
- `build` - the sevmer `build` value. For example it would be `this.that` if your package.json `version` was `v1.0.0+this.that`
- `prerelease` - the semver `prerelease` value. For example it would be `alpha.beta` if your package.json `version` was `v1.0.0-alpha.beta`
The options are visible in the code at <https://github.com/mapbox/node-pre-gyp/blob/612b7bca2604508d881e1187614870ba19a7f0c5/lib/util/versioning.js#L114-L127>
# Download binary files from a mirror
S3 is broken in China for the well known reason.
Using the `npm` config argument: `--{module_name}_binary_host_mirror` can download binary files through a mirror, `-` in `module_name` will be replaced with `_`.
e.g.: Install [v8-profiler](https://www.npmjs.com/package/v8-profiler) from `npm`.
```bash
$ npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/
```
e.g.: Install [canvas-prebuilt](https://www.npmjs.com/package/canvas-prebuilt) from `npm`.
```bash
$ npm install canvas-prebuilt --canvas_prebuilt_binary_host_mirror=https://npm.taobao.org/mirrors/canvas-prebuilt/
```

View file

@ -0,0 +1,2 @@
#!/usr/bin/env node
require('../lib/main');

View file

@ -0,0 +1,2 @@
@echo off
node "%~dp0\node-pre-gyp" %*

46
node_modules/@discordjs/node-pre-gyp/lib/build.js generated vendored Normal file
View file

@ -0,0 +1,46 @@
module.exports = exports = build;
exports.usage = 'Attempts to compile the module by dispatching to node-gyp or nw-gyp';
const napi = require('./util/napi.js');
const compile = require('./util/compile.js');
const handle_gyp_opts = require('./util/handle_gyp_opts.js');
const configure = require('./configure.js');
function do_build(gyp, argv, callback) {
handle_gyp_opts(gyp, argv, (err, result) => {
let final_args = ['build'].concat(result.gyp).concat(result.pre);
if (result.unparsed.length > 0) {
final_args = final_args.concat(['--']).concat(result.unparsed);
}
if (!err && result.opts.napi_build_version) {
napi.swap_build_dir_in(result.opts.napi_build_version);
}
compile.run_gyp(final_args, result.opts, (err2) => {
if (result.opts.napi_build_version) {
napi.swap_build_dir_out(result.opts.napi_build_version);
}
return callback(err2);
});
});
}
function build(gyp, argv, callback) {
// Form up commands to pass to node-gyp:
// We map `node-pre-gyp build` to `node-gyp configure build` so that we do not
// trigger a clean and therefore do not pay the penalty of a full recompile
if (argv.length && argv.indexOf('rebuild') > -1) {
argv.shift(); // remove `rebuild`
// here we map `node-pre-gyp rebuild` to `node-gyp rebuild` which internally means
// "clean + configure + build" and triggers a full recompile
compile.run_gyp(['clean'], {}, (err3) => {
if (err3) return callback(err3);
configure(gyp, argv, (err4) => {
if (err4) return callback(err4);
return do_build(gyp, argv, callback);
});
});
} else {
return do_build(gyp, argv, callback);
}
}

28
node_modules/@discordjs/node-pre-gyp/lib/clean.js generated vendored Normal file
View file

@ -0,0 +1,28 @@
module.exports = exports = clean;
exports.usage = 'Removes the entire folder containing the compiled .node module';
const rm = require('rimraf');
const exists = require('fs').exists || require('path').exists;
const versioning = require('./util/versioning.js');
const napi = require('./util/napi.js');
const path = require('path');
function clean(gyp, argv, callback) {
const { package_json } = gyp;
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
const to_delete = opts.module_path;
if (!to_delete) {
return callback(new Error('module_path is empty, refusing to delete'));
} else if (path.normalize(to_delete) === path.normalize(process.cwd())) {
return callback(new Error('module_path is not set, refusing to delete'));
}
exists(to_delete, (found) => {
if (found) {
if (!gyp.opts.silent_clean) console.log(`[${package_json.name}] Removing "%s"`, to_delete);
return rm(to_delete, callback);
}
return callback();
});
}

46
node_modules/@discordjs/node-pre-gyp/lib/configure.js generated vendored Normal file
View file

@ -0,0 +1,46 @@
module.exports = exports = configure;
exports.usage = 'Attempts to configure node-gyp or nw-gyp build';
const napi = require('./util/napi.js');
const compile = require('./util/compile.js');
const handle_gyp_opts = require('./util/handle_gyp_opts.js');
function configure(gyp, argv, callback) {
handle_gyp_opts(gyp, argv, (err, result) => {
let final_args = result.gyp.concat(result.pre);
// pull select node-gyp configure options out of the npm environ
const known_gyp_args = ['dist-url', 'python', 'nodedir', 'msvs_version'];
known_gyp_args.forEach((key) => {
const val = gyp.opts[key] || gyp.opts[key.replace('-', '_')];
if (val) {
final_args.push(`--${key}=${val}`);
}
});
// --ensure=false tell node-gyp to re-install node development headers
// but it is only respected by node-gyp install, so we have to call install
// as a separate step if the user passes it
if (gyp.opts.ensure === false) {
const install_args = final_args.concat(['install', '--ensure=false']);
compile.run_gyp(install_args, result.opts, (err2) => {
if (err2) return callback(err2);
if (result.unparsed.length > 0) {
final_args = final_args.concat(['--']).concat(result.unparsed);
}
compile.run_gyp(['configure'].concat(final_args), result.opts, (err3) => {
return callback(err3);
});
});
} else {
if (result.unparsed.length > 0) {
final_args = final_args.concat(['--']).concat(result.unparsed);
}
compile.run_gyp(['configure'].concat(final_args), result.opts, (err4) => {
if (!err4 && result.opts.napi_build_version) {
napi.swap_build_dir_out(result.opts.napi_build_version);
}
return callback(err4);
});
}
});
}

230
node_modules/@discordjs/node-pre-gyp/lib/install.js generated vendored Normal file
View file

@ -0,0 +1,230 @@
'use strict';
module.exports = exports = install;
exports.usage = 'Attempts to install pre-built binary for module';
const fs = require('fs');
const path = require('path');
const log = require('npmlog');
const existsAsync = fs.exists || path.exists;
const versioning = require('./util/versioning.js');
const napi = require('./util/napi.js');
const makeDir = require('make-dir');
// for fetching binaries
const fetch = require('node-fetch');
const tar = require('tar');
let npgVersion = 'unknown';
try {
// Read own package.json to get the current node-pre-pyp version.
const ownPackageJSON = fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8');
npgVersion = JSON.parse(ownPackageJSON).version;
} catch (e) {}
function place_binary(uri, targetDir, opts, callback) {
log.http('GET', uri);
// Try getting version info from the currently running npm.
const envVersionInfo = process.env.npm_config_user_agent || `node ${process.version}`;
const sanitized = uri.replace('+', '%2B');
const requestOpts = {
uri: sanitized,
headers: {
'User-Agent': `node-pre-gyp (v${npgVersion}, ${envVersionInfo})`,
},
follow_max: 10,
};
if (opts.cafile) {
try {
requestOpts.ca = fs.readFileSync(opts.cafile);
} catch (e) {
return callback(e);
}
} else if (opts.ca) {
requestOpts.ca = opts.ca;
}
const proxyUrl = opts.proxy || process.env.http_proxy || process.env.HTTP_PROXY || process.env.npm_config_proxy;
let agent;
if (proxyUrl) {
const ProxyAgent = require('https-proxy-agent');
agent = new ProxyAgent(proxyUrl);
log.http('download', 'proxy agent configured using: "%s"', proxyUrl);
}
fetch(sanitized, { agent })
.then((res) => {
if (!res.ok) {
throw new Error(`response status ${res.status} ${res.statusText} on ${sanitized}`);
}
const dataStream = res.body;
return new Promise((resolve, reject) => {
let extractions = 0;
const countExtractions = (entry) => {
extractions += 1;
log.info('install', 'unpacking %s', entry.path);
};
dataStream.pipe(extract(targetDir, countExtractions)).on('error', (e) => {
reject(e);
});
dataStream.on('end', () => {
resolve(`extracted file count: ${extractions}`);
});
dataStream.on('error', (e) => {
reject(e);
});
});
})
.then((text) => {
log.info(text);
callback();
})
.catch((e) => {
log.error(`install ${e.message}`);
callback(e);
});
}
function extract(to, onentry) {
return tar.extract({
cwd: to,
strip: 1,
onentry,
});
}
function extract_from_local(from, targetDir, callback) {
if (!fs.existsSync(from)) {
return callback(new Error(`Cannot find file ${from}`));
}
log.info(`Found local file to extract from ${from}`);
// extract helpers
let extractCount = 0;
function countExtractions(entry) {
extractCount += 1;
log.info('install', `unpacking ${entry.path}`);
}
function afterExtract(err) {
if (err) return callback(err);
if (extractCount === 0) {
return callback(new Error('There was a fatal problem while extracting the tarball'));
}
log.info('tarball', 'done parsing tarball');
callback();
}
fs.createReadStream(from)
.pipe(extract(targetDir, countExtractions))
.on('close', afterExtract)
.on('error', afterExtract);
}
function do_build(gyp, argv, callback) {
const args = ['rebuild'].concat(argv);
gyp.todo.push({ name: 'build', args });
process.nextTick(callback);
}
function print_fallback_error(err, opts, package_json) {
const fallback_message = ' (falling back to source compile with node-gyp)';
let full_message = '';
if (err.statusCode !== undefined) {
// If we got a network response it but failed to download
// it means remote binaries are not available, so let's try to help
// the user/developer with the info to debug why
full_message = `Pre-built binaries not found for ${package_json.name}@${package_json.version}`;
full_message += ` and ${opts.runtime}@${opts.target || process.versions.node} (${opts.node_abi} ABI, ${opts.libc})`;
full_message += fallback_message;
log.warn(`Tried to download(${err.statusCode}): ${opts.hosted_tarball}`);
log.warn(full_message);
log.http(err.message);
} else {
// If we do not have a statusCode that means an unexpected error
// happened and prevented an http response, so we output the exact error
full_message = `Pre-built binaries not installable for ${package_json.name}@${package_json.version}`;
full_message += ` and ${opts.runtime}@${opts.target || process.versions.node} (${opts.node_abi} ABI, ${opts.libc})`;
full_message += fallback_message;
log.warn(full_message);
log.warn(`Hit error ${err.message}`);
}
}
//
// install
//
function install(gyp, argv, callback) {
const { package_json } = gyp;
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
const source_build = gyp.opts['build-from-source'] || gyp.opts.build_from_source;
const update_binary = gyp.opts['update-binary'] || gyp.opts.update_binary;
const should_do_source_build = source_build === package_json.name || source_build === true || source_build === 'true';
if (should_do_source_build) {
log.info('build', 'requesting source compile');
return do_build(gyp, argv, callback);
}
const fallback_to_build = gyp.opts['fallback-to-build'] || gyp.opts.fallback_to_build;
let should_do_fallback_build =
fallback_to_build === package_json.name || fallback_to_build === true || fallback_to_build === 'true';
// but allow override from npm
if (process.env.npm_config_argv) {
const { cooked } = JSON.parse(process.env.npm_config_argv);
const match = cooked.indexOf('--fallback-to-build');
if (match > -1 && cooked.length > match && cooked[match + 1] === 'false') {
should_do_fallback_build = false;
log.info('install', 'Build fallback disabled via npm flag: --fallback-to-build=false');
}
}
let opts;
try {
opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
} catch (err) {
return callback(err);
}
opts.ca = gyp.opts.ca;
opts.cafile = gyp.opts.cafile;
const from = opts.hosted_tarball;
const to = opts.module_path;
const binary_module = path.join(to, `${opts.module_name}.node`);
existsAsync(binary_module, (found) => {
if (!update_binary) {
if (found) {
console.log(`[${package_json.name}] Success: "${binary_module}" already installed`);
console.log('Pass --update-binary to reinstall or --build-from-source to recompile');
return callback();
}
log.info('check', `checked for "${binary_module}" (not found)`);
}
makeDir(to)
.then(() => {
const fileName = from.startsWith('file://') && from.slice('file://'.length);
if (fileName) {
extract_from_local(fileName, to, after_place);
} else {
place_binary(from, to, opts, after_place);
}
})
.catch((err) => {
after_place(err);
});
function after_place(err) {
if (err && should_do_fallback_build) {
print_fallback_error(err, opts, package_json);
return do_build(gyp, argv, callback);
} else if (err) {
return callback(err);
}
console.log(`[${package_json.name}] Success: "${binary_module}" is installed via remote`);
return callback();
}
});
}

124
node_modules/@discordjs/node-pre-gyp/lib/main.js generated vendored Normal file
View file

@ -0,0 +1,124 @@
'use strict';
/**
* Set the title.
*/
process.title = 'node-pre-gyp';
const node_pre_gyp = require('../');
const log = require('npmlog');
/**
* Process and execute the selected commands.
*/
const prog = new node_pre_gyp.Run({ argv: process.argv });
let completed = false;
if (prog.todo.length === 0) {
if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) {
console.log('v%s', prog.version);
process.exit(0);
} else if (~process.argv.indexOf('-h') || ~process.argv.indexOf('--help')) {
console.log('%s', prog.usage());
process.exit(0);
}
console.log('%s', prog.usage());
process.exit(1);
}
// if --no-color is passed
if (prog.opts && Object.hasOwnProperty.call(prog, 'color') && !prog.opts.color) {
log.disableColor();
}
log.info('it worked if it ends with', 'ok');
log.verbose('cli', process.argv);
log.info('using', `${process.title}@%s`, prog.version);
log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch);
/**
* Change dir if -C/--directory was passed.
*/
const dir = prog.opts.directory;
if (dir) {
const fs = require('fs');
try {
const stat = fs.statSync(dir);
if (stat.isDirectory()) {
log.info('chdir', dir);
process.chdir(dir);
} else {
log.warn('chdir', `${dir} is not a directory`);
}
} catch (e) {
if (e.code === 'ENOENT') {
log.warn('chdir', `${dir} is not a directory`);
} else {
log.warn('chdir', 'error during chdir() "%s"', e.message);
}
}
}
function errorMessage() {
// copied from npm's lib/util/error-handler.js
const os = require('os');
log.error('System', `${os.type()} ${os.release()}`);
log.error('command', process.argv.map(JSON.stringify).join(' '));
log.error('cwd', process.cwd());
log.error('node -v', process.version);
log.error(`${process.title} -v`, `v${prog.package.version}`);
}
function run() {
const command = prog.todo.shift();
if (!command) {
// done!
completed = true;
log.info('ok');
return;
}
// set binary.host when appropriate. host determines the s3 target bucket.
const target = prog.setBinaryHostProperty(command.name);
if (target && ['install', 'publish', 'unpublish', 'info'].indexOf(command.name) >= 0) {
log.info(`using binary.host: ${prog.package_json.binary.host}`);
}
prog.commands[command.name](command.args, (err) => {
if (err) {
log.error(`${command.name} error`);
log.error('stack', err.stack);
errorMessage();
log.error('not ok');
console.log(err.message);
return process.exit(1);
}
const args_array = [].slice.call(arguments, 1);
if (args_array.length) {
console.log.apply(console, args_array);
}
// now run the next command in the queue
process.nextTick(run);
});
}
process.on('exit', (code) => {
if (!completed && !code) {
log.error('Completion callback never invoked!');
errorMessage();
process.exit(6);
}
});
process.on('uncaughtException', (err) => {
log.error('UNCAUGHT EXCEPTION');
log.error('stack', err.stack);
errorMessage();
process.exit(7);
});
// start running the given commands!
run();

View file

@ -0,0 +1,294 @@
'use strict';
/**
* Module exports.
*/
module.exports = exports;
/**
* Module dependencies.
*/
const fs = require('fs');
const path = require('path');
const nopt = require('nopt');
const log = require('npmlog');
log.disableProgress();
const napi = require('./util/napi.js');
const EE = require('events').EventEmitter;
const { inherits } = require('util');
const cli_commands = [
'clean',
'install',
'reinstall',
'build',
'rebuild',
'package',
'testpackage',
'testbinary',
'reveal',
'configure',
];
const aliases = {};
// differentiate node-pre-gyp's logs from npm's
log.heading = 'node-pre-gyp';
// this is a getter to avoid circular reference warnings with node v14.
Object.defineProperty(exports, 'find', {
get() {
return require('./pre-binding').find;
},
enumerable: true,
});
// in the following, "my_module" is using node-pre-gyp to
// prebuild and install pre-built binaries. "main_module"
// is using "my_module".
//
// "bin/node-pre-gyp" invokes Run() without a path. the
// expectation is that the working directory is the package
// root "my_module". this is true because in all cases npm is
// executing a script in the context of "my_module".
//
// "pre-binding.find()" is executed by "my_module" but in the
// context of "main_module". this is because "main_module" is
// executing and requires "my_module" which is then executing
// "pre-binding.find()" via "node-pre-gyp.find()", so the working
// directory is that of "main_module".
//
// that's why "find()" must pass the path to package.json.
//
function Run({ package_json_path = './package.json', argv }) {
this.package_json_path = package_json_path;
this.commands = {};
const self = this;
cli_commands.forEach((command) => {
self.commands[command] = function (argvx, callback) {
log.verbose('command', command, argvx);
return require(`./${command}`)(self, argvx, callback);
};
});
this.parseArgv(argv);
// this is set to true after the binary.host property was set to
// either staging_host or production_host.
this.binaryHostSet = false;
}
inherits(Run, EE);
exports.Run = Run;
const proto = Run.prototype;
/**
* Export the contents of the package.json.
*/
proto.package = require('../package.json');
/**
* nopt configuration definitions
*/
proto.configDefs = {
help: Boolean, // everywhere
arch: String, // 'configure'
debug: Boolean, // 'build'
directory: String, // bin
proxy: String, // 'install'
loglevel: String, // everywhere
};
/**
* nopt shorthands
*/
proto.shorthands = {
release: '--no-debug',
C: '--directory',
debug: '--debug',
j: '--jobs',
silent: '--loglevel=silent',
silly: '--loglevel=silly',
verbose: '--loglevel=verbose',
};
/**
* expose the command aliases for the bin file to use.
*/
proto.aliases = aliases;
/**
* Parses the given argv array and sets the 'opts', 'argv',
* 'command', and 'package_json' properties.
*/
proto.parseArgv = function parseOpts(argv) {
this.opts = nopt(this.configDefs, this.shorthands, argv);
this.argv = this.opts.argv.remain.slice();
const commands = (this.todo = []);
// create a copy of the argv array with aliases mapped
argv = this.argv.map((arg) => {
// is this an alias?
if (arg in this.aliases) {
arg = this.aliases[arg];
}
return arg;
});
// process the mapped args into "command" objects ("name" and "args" props)
argv.slice().forEach((arg) => {
if (arg in this.commands) {
const args = argv.splice(0, argv.indexOf(arg));
argv.shift();
if (commands.length > 0) {
commands[commands.length - 1].args = args;
}
commands.push({ name: arg, args: [] });
}
});
if (commands.length > 0) {
commands[commands.length - 1].args = argv.splice(0);
}
// if a directory was specified package.json is assumed to be relative
// to it.
let { package_json_path } = this;
if (this.opts.directory) {
package_json_path = path.join(this.opts.directory, package_json_path);
}
this.package_json = JSON.parse(fs.readFileSync(package_json_path));
// expand commands entries for multiple napi builds
this.todo = napi.expand_commands(this.package_json, this.opts, commands);
// support for inheriting config env variables from npm
const npm_config_prefix = 'npm_config_';
Object.keys(process.env).forEach((name) => {
if (name.indexOf(npm_config_prefix) !== 0) return;
const val = process.env[name];
if (name === `${npm_config_prefix}loglevel`) {
log.level = val;
} else {
// add the user-defined options to the config
name = name.substring(npm_config_prefix.length);
// avoid npm argv clobber already present args
// which avoids problem of 'npm test' calling
// script that runs unique npm install commands
if (name === 'argv') {
if (this.opts.argv && this.opts.argv.remain && this.opts.argv.remain.length) {
// do nothing
} else {
this.opts[name] = val;
}
} else {
this.opts[name] = val;
}
}
});
if (this.opts.loglevel) {
log.level = this.opts.loglevel;
}
log.resume();
};
/**
* allow the binary.host property to be set at execution time.
*
* for this to take effect requires all the following to be true.
* - binary is a property in package.json
* - binary.host is falsey
* - binary.staging_host is not empty
* - binary.production_host is not empty
*
* if any of the previous checks fail then the function returns an empty string
* and makes no changes to package.json's binary property.
*
*
* if command is "publish" then the default is set to "binary.staging_host"
* if command is not "publish" the the default is set to "binary.production_host"
*
* if the command-line option '--s3_host' is set to "staging" or "production" then
* "binary.host" is set to the specified "staging_host" or "production_host". if
* '--s3_host' is any other value an exception is thrown.
*
* if '--s3_host' is not present then "binary.host" is set to the default as above.
*
* this strategy was chosen so that any command other than "publish" or "unpublish" uses "production"
* as the default without requiring any command-line options but that "publish" and "unpublish" require
* '--s3_host production_host' to be specified in order to *really* publish (or unpublish). publishing
* to staging can be done freely without worrying about disturbing any production releases.
*/
proto.setBinaryHostProperty = function (command) {
if (this.binaryHostSet) {
return this.package_json.binary.host;
}
const p = this.package_json;
// don't set anything if host is present. it must be left blank to trigger this.
if (!p || !p.binary || p.binary.host) {
return '';
}
// and both staging and production must be present. errors will be reported later.
if (!p.binary.staging_host || !p.binary.production_host) {
return '';
}
let target = 'production_host';
if (command === 'publish' || command === 'unpublish') {
target = 'staging_host';
}
// the environment variable has priority over the default or the command line. if
// either the env var or the command line option are invalid throw an error.
const npg_s3_host = process.env.node_pre_gyp_s3_host;
if (npg_s3_host === 'staging' || npg_s3_host === 'production') {
target = `${npg_s3_host}_host`;
} else if (this.opts.s3_host === 'staging' || this.opts.s3_host === 'production') {
target = `${this.opts.s3_host}_host`;
} else if (this.opts.s3_host || npg_s3_host) {
throw new Error(`invalid s3_host ${this.opts.s3_host || npg_s3_host}`);
}
p.binary.host = p.binary[target];
this.binaryHostSet = true;
return p.binary.host;
};
/**
* Returns the usage instructions for node-pre-gyp.
*/
proto.usage = function usage() {
const str = [
'',
' Usage: node-pre-gyp <command> [options]',
'',
' where <command> is one of:',
cli_commands
.map((c) => {
return ` - ${c} - ${require(`./${c}`).usage}`;
})
.join('\n'),
'',
`node-pre-gyp@${this.version} ${path.resolve(__dirname, '..')}`,
`node@${process.versions.node}`,
].join('\n');
return str;
};
/**
* Version number getter.
*/
Object.defineProperty(proto, 'version', {
get() {
return this.package.version;
},
enumerable: true,
});

77
node_modules/@discordjs/node-pre-gyp/lib/package.js generated vendored Normal file
View file

@ -0,0 +1,77 @@
module.exports = exports = _package;
exports.usage = 'Packs binary (and enclosing directory) into locally staged tarball';
const fs = require('fs');
const path = require('path');
const log = require('npmlog');
const versioning = require('./util/versioning.js');
const napi = require('./util/napi.js');
const existsAsync = fs.exists || path.exists;
const makeDir = require('make-dir');
const tar = require('tar');
function readdirSync(dir) {
let list = [];
const files = fs.readdirSync(dir);
files.forEach((file) => {
const stats = fs.lstatSync(path.join(dir, file));
if (stats.isDirectory()) {
list = list.concat(readdirSync(path.join(dir, file)));
} else {
list.push(path.join(dir, file));
}
});
return list;
}
function _package(gyp, argv, callback) {
const { package_json } = gyp;
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
const from = opts.module_path;
const binary_module = path.join(from, `${opts.module_name}.node`);
existsAsync(binary_module, (found) => {
if (!found) {
return callback(new Error(`Cannot package because ${binary_module} missing: run \`node-pre-gyp rebuild\` first`));
}
const tarball = opts.staged_tarball;
const filter_func = function (entry) {
const basename = path.basename(entry);
if (basename.length && basename[0] !== '.') {
console.log(`packing ${entry}`);
return true;
}
console.log(`skipping ${entry}`);
return false;
};
makeDir(path.dirname(tarball))
.then(() => {
let files = readdirSync(from);
const base = path.basename(from);
files = files.map((file) => {
return path.join(base, path.relative(from, file));
});
tar.create(
{
portable: false,
gzip: true,
filter: filter_func,
file: tarball,
cwd: path.dirname(from),
},
files,
(err2) => {
if (err2) console.error(`[${package_json.name}] ${err2.message}`);
else log.info('package', `Binary staged at "${tarball}"`);
return callback(err2);
},
);
})
.catch((err) => {
return callback(err);
});
});
}

View file

@ -0,0 +1,32 @@
const npg = require('..');
const versioning = require('../lib/util/versioning.js');
const napi = require('../lib/util/napi.js');
const existsSync = require('fs').existsSync || require('path').existsSync;
const path = require('path');
module.exports = exports;
exports.usage = 'Finds the require path for the node-pre-gyp installed module';
exports.validate = (package_json, opts) => {
versioning.validate_config(package_json, opts);
};
exports.find = (package_json_path, opts) => {
if (!existsSync(package_json_path)) {
throw new Error(`${package_json_path}does not exist`);
}
const prog = new npg.Run({ package_json_path, argv: process.argv });
prog.setBinaryHostProperty();
const { package_json } = prog;
versioning.validate_config(package_json, opts);
let napi_build_version;
if (napi.get_napi_build_versions(package_json, opts)) {
napi_build_version = napi.get_best_napi_build_version(package_json, opts);
}
opts = opts || {};
if (!opts.module_root) opts.module_root = path.dirname(package_json_path);
const meta = versioning.evaluate(package_json, opts, napi_build_version);
return meta.module;
};

18
node_modules/@discordjs/node-pre-gyp/lib/rebuild.js generated vendored Normal file
View file

@ -0,0 +1,18 @@
module.exports = exports = rebuild;
exports.usage = 'Runs "clean" and "build" at once';
const napi = require('./util/napi.js');
function rebuild(gyp, argv, callback) {
const { package_json } = gyp;
let commands = [
{ name: 'clean', args: [] },
{ name: 'build', args: ['rebuild'] },
];
commands = napi.expand_commands(package_json, gyp.opts, commands);
for (let i = commands.length; i !== 0; i--) {
gyp.todo.unshift(commands[i - 1]);
}
process.nextTick(callback);
}

14
node_modules/@discordjs/node-pre-gyp/lib/reinstall.js generated vendored Normal file
View file

@ -0,0 +1,14 @@
module.exports = exports = rebuild;
exports.usage = 'Runs "clean" and "install" at once';
const napi = require('./util/napi.js');
function rebuild(gyp, argv, callback) {
const { package_json } = gyp;
let installArgs = [];
const napi_build_version = napi.get_best_napi_build_version(package_json, gyp.opts);
if (napi_build_version != null) installArgs = [napi.get_command_arg(napi_build_version)];
gyp.todo.unshift({ name: 'clean', args: [] }, { name: 'install', args: installArgs });
process.nextTick(callback);
}

30
node_modules/@discordjs/node-pre-gyp/lib/reveal.js generated vendored Normal file
View file

@ -0,0 +1,30 @@
module.exports = exports = reveal;
exports.usage = 'Reveals data on the versioned binary';
const versioning = require('./util/versioning.js');
const napi = require('./util/napi.js');
function unix_paths(key, val) {
return val && val.replace ? val.replace(/\\/g, '/') : val;
}
function reveal(gyp, argv, callback) {
const { package_json } = gyp;
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
let hit = false;
// if a second arg is passed look to see
// if it is a known option
// console.log(JSON.stringify(gyp.opts,null,1))
const remain = gyp.opts.argv.remain[gyp.opts.argv.remain.length - 1];
if (remain && Object.hasOwnProperty.call(opts, remain)) {
console.log(opts[remain].replace(/\\/g, '/'));
hit = true;
}
// otherwise return all options as json
if (!hit) {
console.log(JSON.stringify(opts, unix_paths, 2));
}
return callback();
}

40
node_modules/@discordjs/node-pre-gyp/lib/testbinary.js generated vendored Normal file
View file

@ -0,0 +1,40 @@
module.exports = exports = testbinary;
exports.usage = 'Tests that the binary.node can be required';
const log = require('npmlog');
const cp = require('child_process');
const versioning = require('./util/versioning.js');
const napi = require('./util/napi.js');
function testbinary(gyp, argv, callback) {
const args = [];
const options = {};
const shell_cmd = process.execPath;
const { package_json } = gyp;
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
// skip validation for runtimes we don't explicitly support (like electron)
if (opts.runtime && opts.runtime !== 'node-webkit' && opts.runtime !== 'node') {
return callback();
}
// ensure on windows that / are used for require path
const binary_module = opts.module.replace(/\\/g, '/');
if (process.arch !== opts.target_arch || process.platform !== opts.target_platform) {
let msg = 'skipping validation since host platform/arch (';
msg += `${process.platform}/${process.arch})`;
msg += ' does not match target (';
msg += `${opts.target_platform}/${opts.target_arch})`;
log.info('validate', msg);
return callback();
}
args.push('--eval');
args.push(`require('${binary_module.replace(/'/g, "'")}')`);
log.info('validate', `Running test command: '${shell_cmd} ${args.join(' ')}'`);
cp.execFile(shell_cmd, args, options, (err, stdout, stderr) => {
if (err) {
return callback(err, { stdout, stderr });
}
return callback();
});
}

View file

@ -0,0 +1,44 @@
module.exports = exports = testpackage;
exports.usage = 'Tests that the staged package is valid';
const fs = require('fs');
const path = require('path');
const log = require('npmlog');
const existsAsync = fs.exists || path.exists;
const versioning = require('./util/versioning.js');
const napi = require('./util/napi.js');
const testbinary = require('./testbinary.js');
const tar = require('tar');
const makeDir = require('make-dir');
function testpackage(gyp, argv, callback) {
const { package_json } = gyp;
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
const tarball = opts.staged_tarball;
existsAsync(tarball, (found) => {
if (!found) {
return callback(new Error(`Cannot test package because ${tarball} missing: run \`node-pre-gyp package\` first`));
}
const to = opts.module_path;
function filter_func(entry) {
log.info('install', `unpacking [${entry.path}]`);
}
makeDir(to)
.then(() => {
tar
.extract({
file: tarball,
cwd: to,
strip: 1,
onentry: filter_func,
})
.then(after_extract, callback);
})
.catch((err) => {
return callback(err);
});
});
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,89 @@
module.exports = exports;
const fs = require('fs');
const path = require('path');
const win = process.platform === 'win32';
const existsSync = fs.existsSync || path.existsSync;
const cp = require('child_process');
// try to build up the complete path to node-gyp
/* priority:
- node-gyp on ENV:npm_config_node_gyp (https://github.com/npm/npm/pull/4887)
- node-gyp on NODE_PATH
- node-gyp inside npm on NODE_PATH (ignore on iojs)
- node-gyp inside npm beside node exe
*/
function which_node_gyp() {
let node_gyp_bin;
if (process.env.npm_config_node_gyp) {
try {
node_gyp_bin = process.env.npm_config_node_gyp;
if (existsSync(node_gyp_bin)) {
return node_gyp_bin;
}
} catch (err) {
// do nothing
}
}
try {
const node_gyp_main = require.resolve('node-gyp');
node_gyp_bin = path.join(path.dirname(path.dirname(node_gyp_main)), 'bin/node-gyp.js');
if (existsSync(node_gyp_bin)) {
return node_gyp_bin;
}
} catch (err) {
// do nothing
}
if (process.execPath.indexOf('iojs') === -1) {
try {
const npm_main = require.resolve('npm');
node_gyp_bin = path.join(path.dirname(path.dirname(npm_main)), 'node_modules/node-gyp/bin/node-gyp.js');
if (existsSync(node_gyp_bin)) {
return node_gyp_bin;
}
} catch (err) {
// do nothing
}
}
const npm_base = path.join(path.dirname(path.dirname(process.execPath)), 'lib/node_modules/npm/');
node_gyp_bin = path.join(npm_base, 'node_modules/node-gyp/bin/node-gyp.js');
if (existsSync(node_gyp_bin)) {
return node_gyp_bin;
}
}
module.exports.run_gyp = (args, opts, callback) => {
let shell_cmd = '';
const cmd_args = [];
if (opts.runtime && opts.runtime === 'node-webkit') {
shell_cmd = 'nw-gyp';
if (win) shell_cmd += '.cmd';
} else {
const node_gyp_path = which_node_gyp();
if (node_gyp_path) {
shell_cmd = process.execPath;
cmd_args.push(node_gyp_path);
} else {
shell_cmd = 'node-gyp';
if (win) shell_cmd += '.cmd';
}
}
const final_args = cmd_args.concat(args);
const cmd = cp.spawn(shell_cmd, final_args, {
cwd: undefined,
env: process.env,
stdio: [0, 1, 2],
});
cmd.on('error', (err) => {
if (err) {
return callback(new Error(`Failed to execute '${shell_cmd} ${final_args.join(' ')}' (${err})`));
}
callback(null, opts);
});
cmd.on('close', (code) => {
if (code && code !== 0) {
return callback(new Error(`Failed to execute '${shell_cmd} ${final_args.join(' ')}' (${code})`));
}
callback(null, opts);
});
};

View file

@ -0,0 +1,99 @@
module.exports = exports = handle_gyp_opts;
const versioning = require('./versioning.js');
const napi = require('./napi.js');
/*
Here we gather node-pre-gyp generated options (from versioning) and pass them along to node-gyp.
We massage the args and options slightly to account for differences in what commands mean between
node-pre-gyp and node-gyp (e.g. see the difference between "build" and "rebuild" below)
Keep in mind: the values inside `argv` and `gyp.opts` below are different depending on whether
node-pre-gyp is called directory, or if it is called in a `run-script` phase of npm.
We also try to preserve any command line options that might have been passed to npm or node-pre-gyp.
But this is fairly difficult without passing way to much through. For example `gyp.opts` contains all
the process.env and npm pushes a lot of variables into process.env which node-pre-gyp inherits. So we have
to be very selective about what we pass through.
For example:
`npm install --build-from-source` will give:
argv == [ 'rebuild' ]
gyp.opts.argv == { remain: [ 'install' ],
cooked: [ 'install', '--fallback-to-build' ],
original: [ 'install', '--fallback-to-build' ] }
`./bin/node-pre-gyp build` will give:
argv == []
gyp.opts.argv == { remain: [ 'build' ],
cooked: [ 'build' ],
original: [ '-C', 'test/app1', 'build' ] }
*/
// select set of node-pre-gyp versioning info
// to share with node-gyp
const share_with_node_gyp = [
'module',
'module_name',
'module_path',
'napi_version',
'node_abi_napi',
'napi_build_version',
'node_napi_label',
];
function handle_gyp_opts(gyp, argv, callback) {
// Collect node-pre-gyp specific variables to pass to node-gyp
const node_pre_gyp_options = [];
// generate custom node-pre-gyp versioning info
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
const opts = versioning.evaluate(gyp.package_json, gyp.opts, napi_build_version);
share_with_node_gyp.forEach((key) => {
const val = opts[key];
if (val) {
node_pre_gyp_options.push(`--${key}=${val}`);
} else if (key === 'napi_build_version') {
node_pre_gyp_options.push(`--${key}=0`);
} else if (key !== 'napi_version' && key !== 'node_abi_napi')
return callback(new Error(`Option ${key} required but not found by node-pre-gyp`));
});
// Collect options that follow the special -- which disables nopt parsing
const unparsed_options = [];
let double_hyphen_found = false;
gyp.opts.argv.original.forEach((opt) => {
if (double_hyphen_found) {
unparsed_options.push(opt);
}
if (opt === '--') {
double_hyphen_found = true;
}
});
// We try respect and pass through remaining command
// line options (like --foo=bar) to node-gyp
const { cooked } = gyp.opts.argv;
const node_gyp_options = [];
cooked.forEach((value) => {
if (value.length > 2 && value.slice(0, 2) === '--') {
const key = value.slice(2);
const val = cooked[cooked.indexOf(value) + 1];
if (val && val.indexOf('--') === -1) {
// handle '--foo=bar' or ['--foo','bar']
node_gyp_options.push(`--${key}=${val}`);
} else {
// pass through --foo
node_gyp_options.push(value);
}
}
});
const result = { opts, gyp: node_gyp_options, pre: node_pre_gyp_options, unparsed: unparsed_options };
return callback(null, result);
}

211
node_modules/@discordjs/node-pre-gyp/lib/util/napi.js generated vendored Normal file
View file

@ -0,0 +1,211 @@
const fs = require('fs');
module.exports = exports;
const versionArray = process.version
.substr(1)
.replace(/-.*$/, '')
.split('.')
.map((item) => {
return Number(item);
});
const napi_multiple_commands = ['build', 'clean', 'configure', 'package', 'reveal', 'testbinary', 'testpackage'];
const napi_build_version_tag = 'napi_build_version=';
function pathOK(path) {
return path && (path.indexOf('{napi_build_version}') !== -1 || path.indexOf('{node_napi_label}') !== -1);
}
module.exports.get_napi_version = () => {
// returns the non-zero numeric napi version or undefined if napi is not supported.
// correctly supporting target requires an updated cross-walk
let version = process.versions.napi; // can be undefined
if (!version) {
// this code should never need to be updated
if (versionArray[0] === 9 && versionArray[1] >= 3) version = 2;
// 9.3.0+
else if (versionArray[0] === 8) version = 1; // 8.0.0+
}
return version;
};
module.exports.get_napi_version_as_string = (target) => {
// returns the napi version as a string or an empty string if napi is not supported.
const version = module.exports.get_napi_version(target);
return version ? `${version}` : '';
};
module.exports.validate_package_json = function (package_json, opts) {
// throws Error
const { binary } = package_json;
const module_path_ok = pathOK(binary.module_path);
const remote_path_ok = pathOK(binary.remote_path);
const package_name_ok = pathOK(binary.package_name);
const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts, true);
const napi_build_versions_raw = module.exports.get_napi_build_versions_raw(package_json);
if (napi_build_versions) {
napi_build_versions.forEach((napi_build_version) => {
if (!(parseInt(napi_build_version, 10) === napi_build_version && napi_build_version > 0)) {
throw new Error('All values specified in napi_versions must be positive integers.');
}
});
}
if (napi_build_versions && (!module_path_ok || (!remote_path_ok && !package_name_ok))) {
throw new Error(
'When napi_versions is specified; module_path and either remote_path or ' +
"package_name must contain the substitution string '{napi_build_version}`.",
);
}
if ((module_path_ok || remote_path_ok || package_name_ok) && !napi_build_versions_raw) {
throw new Error(
"When the substitution string '{napi_build_version}` is specified in " +
'module_path, remote_path, or package_name; napi_versions must also be specified.',
);
}
if (
napi_build_versions &&
!module.exports.get_best_napi_build_version(package_json, opts) &&
module.exports.build_napi_only(package_json)
) {
throw new Error(
`The Node-API version of this Node instance is ${module.exports.get_napi_version(
opts ? opts.target : undefined,
)}. ` +
`This module supports Node-API version(s) ${module.exports.get_napi_build_versions_raw(package_json)}. ` +
`This Node instance cannot run this module.`,
);
}
if (napi_build_versions_raw && !napi_build_versions && module.exports.build_napi_only(package_json)) {
throw new Error(
`The Node-API version of this Node instance is ${module.exports.get_napi_version(
opts ? opts.target : undefined,
)}. ` +
`This module supports Node-API version(s) ${module.exports.get_napi_build_versions_raw(package_json)}. ` +
`This Node instance cannot run this module.`,
);
}
};
module.exports.expand_commands = (package_json, opts, commands) => {
const expanded_commands = [];
const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts);
commands.forEach((command) => {
if (napi_build_versions && command.name === 'install') {
const napi_build_version = module.exports.get_best_napi_build_version(package_json, opts);
const args = napi_build_version ? [napi_build_version_tag + napi_build_version] : [];
expanded_commands.push({ name: command.name, args });
} else if (napi_build_versions && napi_multiple_commands.indexOf(command.name) !== -1) {
napi_build_versions.forEach((napi_build_version) => {
const args = command.args.slice();
args.push(napi_build_version_tag + napi_build_version);
expanded_commands.push({ name: command.name, args });
});
} else {
expanded_commands.push(command);
}
});
return expanded_commands;
};
module.exports.get_napi_build_versions = (package_json, opts, warnings) => {
// opts may be undefined
const log = require('npmlog');
let napi_build_versions = [];
const supported_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined);
// remove duplicates, verify each napi version can actaully be built
if (package_json.binary && package_json.binary.napi_versions) {
package_json.binary.napi_versions.forEach((napi_version) => {
const duplicated = napi_build_versions.indexOf(napi_version) !== -1;
if (!duplicated && supported_napi_version && napi_version <= supported_napi_version) {
napi_build_versions.push(napi_version);
} else if (warnings && !duplicated && supported_napi_version) {
log.info('This Node instance does not support builds for N-API version', napi_version);
}
});
}
if (opts && opts['build-latest-napi-version-only']) {
let latest_version = 0;
napi_build_versions.forEach((napi_version) => {
if (napi_version > latest_version) latest_version = napi_version;
});
napi_build_versions = latest_version ? [latest_version] : [];
}
return napi_build_versions.length ? napi_build_versions : undefined;
};
module.exports.get_napi_build_versions_raw = (package_json) => {
const napi_build_versions = [];
// remove duplicates
if (package_json.binary && package_json.binary.napi_versions) {
package_json.binary.napi_versions.forEach((napi_version) => {
if (napi_build_versions.indexOf(napi_version) === -1) {
napi_build_versions.push(napi_version);
}
});
}
return napi_build_versions.length ? napi_build_versions : undefined;
};
module.exports.get_command_arg = (napi_build_version) => {
return napi_build_version_tag + napi_build_version;
};
module.exports.get_napi_build_version_from_command_args = (command_args) => {
for (let i = 0; i < command_args.length; i++) {
const arg = command_args[i];
if (arg.indexOf(napi_build_version_tag) === 0) {
return parseInt(arg.substr(napi_build_version_tag.length), 10);
}
}
return undefined;
};
module.exports.swap_build_dir_out = (napi_build_version) => {
if (napi_build_version) {
const rm = require('rimraf');
rm.sync(module.exports.get_build_dir(napi_build_version));
fs.renameSync('build', module.exports.get_build_dir(napi_build_version));
}
};
module.exports.swap_build_dir_in = (napi_build_version) => {
if (napi_build_version) {
const rm = require('rimraf');
rm.sync('build');
fs.renameSync(module.exports.get_build_dir(napi_build_version), 'build');
}
};
module.exports.get_build_dir = (napi_build_version) => {
return `build-tmp-napi-v${napi_build_version}`;
};
module.exports.get_best_napi_build_version = (package_json, opts) => {
let best_napi_build_version = 0;
const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts);
if (napi_build_versions) {
const our_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined);
napi_build_versions.forEach((napi_build_version) => {
if (napi_build_version > best_napi_build_version && napi_build_version <= our_napi_version) {
best_napi_build_version = napi_build_version;
}
});
}
return best_napi_build_version === 0 ? undefined : best_napi_build_version;
};
module.exports.build_napi_only = (package_json) => {
return (
package_json.binary &&
package_json.binary.package_name &&
package_json.binary.package_name.indexOf('{node_napi_label}') === -1
);
};

View file

@ -0,0 +1,317 @@
module.exports = exports;
const path = require('path');
const semver = require('semver');
const url = require('url');
const detect_libc = require('detect-libc');
const napi = require('./napi.js');
let abi_crosswalk;
// This is used for unit testing to provide a fake
// ABI crosswalk that emulates one that is not updated
// for the current version
if (process.env.NODE_PRE_GYP_ABI_CROSSWALK) {
abi_crosswalk = require(process.env.NODE_PRE_GYP_ABI_CROSSWALK);
} else {
abi_crosswalk = require('./abi_crosswalk.json');
}
const major_versions = {};
Object.keys(abi_crosswalk).forEach((v) => {
const major = v.split('.')[0];
if (!major_versions[major]) {
major_versions[major] = v;
}
});
function get_electron_abi(runtime, target_version) {
if (!runtime) {
throw new Error('get_electron_abi requires valid runtime arg');
}
if (typeof target_version === 'undefined') {
// erroneous CLI call
throw new Error('Empty target version is not supported if electron is the target.');
}
// Electron guarantees that patch version update won't break native modules.
const sem_ver = semver.parse(target_version);
return `${runtime}-v${sem_ver.major}.${sem_ver.minor}`;
}
module.exports.get_electron_abi = get_electron_abi;
function get_node_abi(runtime, versions) {
if (!runtime) {
throw new Error('get_node_abi requires valid runtime arg');
}
if (!versions) {
throw new Error('get_node_abi requires valid process.versions object');
}
const sem_ver = semver.parse(versions.node);
if (sem_ver.major === 0 && sem_ver.minor % 2) {
// odd series
// https://github.com/mapbox/node-pre-gyp/issues/124
return `${runtime}-v${versions.node}`;
}
// process.versions.modules added in >= v0.10.4 and v0.11.7
// https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e
return versions.modules
? `${runtime}-v${Number(versions.modules)}`
: `v8-${versions.v8.split('.').slice(0, 2).join('.')}`;
}
module.exports.get_node_abi = get_node_abi;
function get_runtime_abi(runtime, target_version) {
if (!runtime) {
throw new Error('get_runtime_abi requires valid runtime arg');
}
if (runtime === 'electron') {
return get_electron_abi(runtime, target_version || process.versions.electron);
}
if (runtime !== 'node') {
throw new Error(`Unknown Runtime: '${runtime}'`);
}
if (!target_version) {
return get_node_abi(runtime, process.versions);
}
let cross_obj;
// abi_crosswalk generated with ./scripts/abi_crosswalk.js
if (abi_crosswalk[target_version]) {
cross_obj = abi_crosswalk[target_version];
} else {
const target_parts = target_version.split('.').map((i) => {
return Number(i);
});
if (target_parts.length !== 3) {
// parse failed
throw new Error(`Unknown target version: ${target_version}`);
}
/*
The below code tries to infer the last known ABI compatible version
that we have recorded in the abi_crosswalk.json when an exact match
is not possible. The reasons for this to exist are complicated:
- We support passing --target to be able to allow developers to package binaries for versions of node
that are not the same one as they are running. This might also be used in combination with the
--target_arch or --target_platform flags to also package binaries for alternative platforms
- When --target is passed we can't therefore determine the ABI (process.versions.modules) from the node
version that is running in memory
- So, therefore node-pre-gyp keeps an "ABI crosswalk" (lib/util/abi_crosswalk.json) to be able to look
this info up for all versions
- But we cannot easily predict what the future ABI will be for released versions
- And node-pre-gyp needs to be a `bundledDependency` in apps that depend on it in order to work correctly
by being fully available at install time.
- So, the speed of node releases and the bundled nature of node-pre-gyp mean that a new node-pre-gyp release
need to happen for every node.js/io.js/atom-shell/etc release that might come online if
you want the `--target` flag to keep working for the latest version
- Which is impractical ^^
- Hence the below code guesses about future ABI to make the need to update node-pre-gyp less demanding.
In practice then you can have a dependency of your app like `node-sqlite3` that bundles a `node-pre-gyp` that
only knows about node v0.10.33 in the `abi_crosswalk.json` but target node v0.10.34 (which is assumed to be
ABI compatible with v0.10.33).
TODO: use semver module instead of custom version parsing
*/
const major = target_parts[0];
let minor = target_parts[1];
let patch = target_parts[2];
// io.js: yeah if node.js ever releases 1.x this will break
// but that is unlikely to happen: https://github.com/iojs/io.js/pull/253#issuecomment-69432616
if (major === 1) {
// look for last release that is the same major version
// e.g. we assume io.js 1.x is ABI compatible with >= 1.0.0
while (true) {
if (minor > 0) --minor;
if (patch > 0) --patch;
const new_iojs_target = `${String(major)}.${minor}.${patch}`;
if (abi_crosswalk[new_iojs_target]) {
cross_obj = abi_crosswalk[new_iojs_target];
console.log(`Warning: node-pre-gyp could not find exact match for ${target_version}`);
console.log(`Warning: but node-pre-gyp successfully choose ${new_iojs_target} as ABI compatible target`);
break;
}
if (minor === 0 && patch === 0) {
break;
}
}
} else if (major >= 2) {
// look for last release that is the same major version
if (major_versions[major]) {
cross_obj = abi_crosswalk[major_versions[major]];
console.log(`Warning: node-pre-gyp could not find exact match for ${target_version}`);
console.log(`Warning: but node-pre-gyp successfully choose ${major_versions[major]} as ABI compatible target`);
}
} else if (major === 0) {
// node.js
if (target_parts[1] % 2 === 0) {
// for stable/even node.js series
// look for the last release that is the same minor release
// e.g. we assume node 0.10.x is ABI compatible with >= 0.10.0
while (--patch > 0) {
const new_node_target = `${String(major)}.${minor}.${patch}`;
if (abi_crosswalk[new_node_target]) {
cross_obj = abi_crosswalk[new_node_target];
console.log(`Warning: node-pre-gyp could not find exact match for ${target_version}`);
console.log(`Warning: but node-pre-gyp successfully choose ${new_node_target} as ABI compatible target`);
break;
}
}
}
}
}
if (!cross_obj) {
throw new Error(`Unsupported target version: ${target_version}`);
}
// emulate process.versions
const versions_obj = {
node: target_version,
v8: `${cross_obj.v8}.0`,
// abi_crosswalk uses 1 for node versions lacking process.versions.modules
// process.versions.modules added in >= v0.10.4 and v0.11.7
modules: cross_obj.node_abi > 1 ? cross_obj.node_abi : undefined,
};
return get_node_abi(runtime, versions_obj);
}
module.exports.get_runtime_abi = get_runtime_abi;
const required_parameters = ['module_name', 'module_path', 'host'];
function validate_config(package_json, opts) {
const msg = `${package_json.name} package.json is not node-pre-gyp ready:\n`;
const missing = [];
if (!package_json.main) {
missing.push('main');
}
if (!package_json.version) {
missing.push('version');
}
if (!package_json.name) {
missing.push('name');
}
if (!package_json.binary) {
missing.push('binary');
}
const o = package_json.binary;
if (o) {
required_parameters.forEach((p) => {
if (!o[p] || typeof o[p] !== 'string') {
missing.push(`binary.${p}`);
}
});
}
if (missing.length >= 1) {
throw new Error(`${msg}package.json must declare these properties: \n${missing.join('\n')}`);
}
if (o) {
// enforce https over http
const { protocol } = url.parse(o.host);
if (protocol === 'http:') {
throw new Error(`'host' protocol (${protocol}) is invalid - only 'https:' is accepted`);
}
}
napi.validate_package_json(package_json, opts);
}
module.exports.validate_config = validate_config;
function eval_template(template, opts) {
Object.keys(opts).forEach((key) => {
const pattern = `{${key}}`;
while (template.indexOf(pattern) > -1) {
template = template.replace(pattern, opts[key]);
}
});
return template;
}
// url.resolve needs single trailing slash
// to behave correctly, otherwise a double slash
// may end up in the url which breaks requests
// and a lacking slash may not lead to proper joining
function fix_slashes(pathname) {
if (pathname.slice(-1) !== '/') {
return `${pathname}/`;
}
return pathname;
}
// remove double slashes
// note: path.normalize will not work because
// it will convert forward to back slashes
function drop_double_slashes(pathname) {
return pathname.replace(/\/\//g, '/');
}
function get_process_runtime(versions) {
let runtime = 'node';
if (versions.electron) {
runtime = 'electron';
}
return runtime;
}
module.exports.get_process_runtime = get_process_runtime;
const default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
const default_remote_path = '';
module.exports.evaluate = function (package_json, options, napi_build_version) {
options = options || {};
validate_config(package_json, options); // options is a suitable substitute for opts in this case
const v = package_json.version;
const module_version = semver.parse(v);
const runtime = options.runtime || get_process_runtime(process.versions);
const opts = {
name: package_json.name,
configuration: options.debug ? 'Debug' : 'Release',
debug: options.debug,
module_name: package_json.binary.module_name,
version: module_version.version,
prerelease: module_version.prerelease.length ? module_version.prerelease.join('.') : '',
build: module_version.build.length ? module_version.build.join('.') : '',
major: module_version.major,
minor: module_version.minor,
patch: module_version.patch,
runtime,
node_abi: get_runtime_abi(runtime, options.target),
node_abi_napi: napi.get_napi_version(options.target) ? 'napi' : get_runtime_abi(runtime, options.target),
napi_version: napi.get_napi_version(options.target), // non-zero numeric, undefined if unsupported
napi_build_version: napi_build_version || '',
node_napi_label: napi_build_version ? `napi-v${napi_build_version}` : get_runtime_abi(runtime, options.target),
target: options.target || '',
platform: options.target_platform || process.platform,
target_platform: options.target_platform || process.platform,
arch: options.target_arch || process.arch,
target_arch: options.target_arch || process.arch,
libc: options.target_libc || detect_libc.family || 'unknown',
libc_version: detect_libc.version || 'unknown',
module_main: package_json.main,
toolset: options.toolset || '', // address https://github.com/mapbox/node-pre-gyp/issues/119
};
// support host mirror with npm config `--{module_name}_binary_host_mirror`
// e.g.: https://github.com/node-inspector/v8-profiler/blob/master/package.json#L25
// > npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/
const validModuleName = opts.module_name.replace('-', '_');
const host = process.env[`npm_config_${validModuleName}_binary_host_mirror`] || package_json.binary.host;
opts.host = fix_slashes(eval_template(host, opts));
opts.module_path = eval_template(package_json.binary.module_path, opts);
// now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably
if (options.module_root) {
// resolve relative to known module root: works for pre-binding require
opts.module_path = path.join(options.module_root, opts.module_path);
} else {
// resolve relative to current working directory: works for node-pre-gyp commands
opts.module_path = path.resolve(opts.module_path);
}
opts.module = path.join(opts.module_path, `${opts.module_name}.node`);
opts.remote_path = package_json.binary.remote_path
? drop_double_slashes(fix_slashes(eval_template(package_json.binary.remote_path, opts)))
: default_remote_path;
const package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name;
opts.package_name = eval_template(package_name, opts);
opts.staged_tarball = path.join('build/stage', opts.remote_path, opts.package_name);
opts.hosted_path = url.resolve(opts.host, opts.remote_path);
opts.hosted_tarball = url.resolve(opts.hosted_path, opts.package_name);
return opts;
};

45
node_modules/@discordjs/node-pre-gyp/package.json generated vendored Normal file
View file

@ -0,0 +1,45 @@
{
"name": "@discordjs/node-pre-gyp",
"version": "0.4.2",
"description": "Node.js native addon binary install tool",
"main": "./lib/node-pre-gyp.js",
"bin": "./bin/node-pre-gyp",
"author": "Dane Springmeyer <dane@mapbox.com>",
"license": "BSD-3-Clause",
"scripts": {
"lint": "eslint lib scripts",
"lint:fix": "eslint lib scripts --fix",
"update-crosswalk": "node scripts/abi_crosswalk.js"
},
"keywords": [
"native",
"addon",
"module",
"c",
"c++",
"bindings",
"binary"
],
"repository": {
"type": "git",
"url": "https://github.com/discordjs/node-pre-gyp.git"
},
"dependencies": {
"detect-libc": "^1.0.3",
"https-proxy-agent": "^5.0.0",
"make-dir": "^3.1.0",
"node-fetch": "^2.6.5",
"nopt": "^5.0.0",
"npmlog": "^5.0.1",
"rimraf": "^3.0.2",
"semver": "^7.3.5",
"tar": "^6.1.11"
},
"devDependencies": {
"eslint": "^8.2.0",
"eslint-config-aqua": "^9.0.2",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-prettier": "^4.0.0",
"prettier": "^2.4.1"
}
}

View file

@ -0,0 +1,87 @@
const https = require('https');
const fs = require('fs');
const semver = require('semver');
/*
usage:
node scripts/abi_crosswalk.js
*/
const cross = {};
// IO.js
// thanks to rvagg, this is so simple
// https://github.com/iojs/build/issues/94
https.get('https://iojs.org/download/release/index.json', (res) => {
if (res.statusCode !== 200) {
throw new Error(`server returned ${res.statusCode} for iojs.org`);
}
res.setEncoding('utf8');
let body = '';
res.on('data', (chunk) => {
body += chunk;
});
res.on('end', (err) => {
if (err) throw err;
const releases = JSON.parse(body);
releases.forEach((release) => {
cross[release.version.replace('v', '')] = {
node_abi: Number(release.modules),
v8: release.v8.split('.').slice(0, 2).join('.'),
};
});
});
});
https.get('https://nodejs.org/download/release/index.json', (res) => {
if (res.statusCode !== 200) {
throw new Error(`server returned ${res.statusCode} for nodejs.org`);
}
res.setEncoding('utf8');
let body = '';
res.on('data', (chunk) => {
body += chunk;
});
res.on('end', (err) => {
if (err) throw err;
const releases = JSON.parse(body);
releases.forEach((release) => {
cross[release.version.replace('v', '')] = {
node_abi: Number(release.modules),
v8: release.v8.split('.').slice(0, 2).join('.'),
};
});
});
});
const sortObjectByKey = function (obj) {
const keys = [];
const sorted_obj = {};
for (const key in obj) {
if (Object.hasOwnProperty.call(obj, key)) {
keys.push(key);
}
}
// sort keys
keys.sort((a, b) => {
if (semver.gt(a, b)) {
return 1;
}
return -1;
});
const len = keys.length;
for (let i = 0; i < len; i++) {
const key = keys[i];
sorted_obj[key] = obj[key];
}
return sorted_obj;
};
process.on('exit', (err) => {
if (err) throw err;
fs.writeFileSync('./lib/util/abi_crosswalk.json', JSON.stringify(sortObjectByKey(cross), null, 2));
});

View file

@ -0,0 +1,28 @@
'use strict';
const semver = require('semver');
const data = require('../lib/util/abi_crosswalk.json');
const targets = {};
Object.keys(data).forEach((v) => {
const o = data[v];
let abi;
if (o.node_abi === 1) {
abi = `v8-${o.v8}`;
} else {
abi = `node-v${o.node_abi}`;
}
if (targets[abi] === undefined) {
targets[abi] = v;
} else {
const cur = targets[abi];
if (semver.gt(v, cur)) {
targets[abi] = v;
}
}
});
Object.keys(targets).forEach((k) => {
const version = targets[k];
console.log(version, data[version]);
});

3
node_modules/@discordjs/opus/.eslintrc.json generated vendored Normal file
View file

@ -0,0 +1,3 @@
{
"extends": "aqua/prettier/node"
}

1
node_modules/@discordjs/opus/.gitattributes generated vendored Normal file
View file

@ -0,0 +1 @@
* text=auto eol=lf

View file

@ -0,0 +1,76 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to make participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
- Using welcoming and inclusive language
- Being respectful of differing viewpoints and experiences
- Gracefully accepting constructive criticism
- Focusing on what is best for the community
- Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
- The use of sexualized language or imagery and unwelcome sexual attention or
advances
- Trolling, insulting/derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or electronic
address, without explicit permission
- Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies within all project spaces, and it also applies when
an individual is representing the project or its community in public spaces.
Examples of representing a project or community include using an official
project e-mail address, posting via an official social media account, or acting
as an appointed representative at an online or offline event. Representation of
a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at https://discord.gg/bRCvFy9. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq

View file

@ -0,0 +1,91 @@
## Git Commit Message Convention
> This is adapted from [Angular's commit convention](https://github.com/conventional-changelog/conventional-changelog/tree/master/packages/conventional-changelog-angular).
#### TL;DR:
Messages must be matched by the following regex:
```js
/^(revert: )?(feat|fix|docs|style|refactor|perf|test|workflow|build|ci|chore|types|wip)(\(.+\))?: .{1,72}/;
```
#### Examples
Appears under "Features" header, `GuildMember` subheader:
```
feat(GuildMember): add 'tag' method
```
Appears under "Bug Fixes" header, `Guild` subheader, with a link to issue #28:
```
fix(Guild): handle events correctly
close #28
```
Appears under "Performance Improvements" header, and under "Breaking Changes" with the breaking change explanation:
```
perf(core): improve patching by removing 'bar' option
BREAKING CHANGE: The 'bar' option has been removed.
```
The following commit and commit `667ecc1` do not appear in the changelog if they are under the same release. If not, the revert commit appears under the "Reverts" header.
```
revert: feat(Managers): add Managers
This reverts commit 667ecc1654a317a13331b17617d973392f415f02.
```
### Full Message Format
A commit message consists of a **header**, **body** and **footer**. The header has a **type**, **scope** and **subject**:
```
<type>(<scope>): <subject>
<BLANK LINE>
<body>
<BLANK LINE>
<footer>
```
The **header** is mandatory and the **scope** of the header is optional.
### Revert
If the commit reverts a previous commit, it should begin with `revert:`, followed by the header of the reverted commit. In the body, it should say: `This reverts commit <hash>.`, where the hash is the SHA of the commit being reverted.
### Type
If the prefix is `feat`, `fix` or `perf`, it will appear in the changelog. However, if there is any [BREAKING CHANGE](#footer), the commit will always appear in the changelog.
Other prefixes are up to your discretion. Suggested prefixes are `docs`, `chore`, `style`, `refactor`, and `test` for non-changelog related tasks.
### Scope
The scope could be anything specifying the place of the commit change. For example `GuildMember`, `Guild`, `Message`, `MessageEmbed` etc...
### Subject
The subject contains a succinct description of the change:
- use the imperative, present tense: "change" not "changed" nor "changes"
- don't capitalize the first letter
- no dot (.) at the end
### Body
Just as in the **subject**, use the imperative, present tense: "change" not "changed" nor "changes".
The body should include the motivation for the change and contrast this with previous behavior.
### Footer
The footer should contain any information about **Breaking Changes** and is also the place to
reference GitHub issues that this commit **Closes**.
**Breaking Changes** should start with the word `BREAKING CHANGE:` with a space or two newlines. The rest of the commit message is then used for this.

Some files were not shown because too many files have changed in this diff Show more